index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
64,380 | kaiergin/Quadcopter_simulator | refs/heads/master | /quad_sim.py | import quadcopter,gui,controller,q_controller
import signal
import sys
import argparse
import random
# Constants
TIME_SCALING = 0.0 # Any positive number(Smaller is faster). 1.0->Real Time, 0.0->Run as fast as possible
QUAD_DYNAMICS_UPDATE = 0.002 # seconds
CONTROLLER_DYNAMICS_UPDATE = 0.005 # seconds
global run
def Single_AI(gui_on, get_motor_speeds):
# Set goals to go to
GOALS = [(1,1,5)]
YAWS = [0]
rand0 = (random.random()-.5)/2
rand1 = (random.random()-.5)/2
rand2 = (random.random()-.5)/2
# Define the quadcopters
QUADCOPTER={'q1':{'position':[1,0,4],'orientation':[rand0,rand1,rand2],'L':0.3,'r':0.1,'prop_size':[10,4.5],'weight':1.2}}
# Controller parameters
CONTROLLER_PARAMETERS = {'Motor_limits':[4000,9000],
'Tilt_limits':[-10,10],
'Yaw_Control_Limits':[-900,900],
'Z_XY_offset':500,
'Linear_PID':{'P':[300,300,7000],'I':[0.04,0.04,4.5],'D':[450,450,5000]},
'Linear_To_Angular_Scaler':[1,1,0],
'Yaw_Rate_Scaler':0.18,
'Angular_PID':{'P':[22000,22000,1500],'I':[0,0,1.2],'D':[12000,12000,0]},
}
# Catch Ctrl+C to stop threads
signal.signal(signal.SIGINT, signal_handler)
# Make objects for quadcopter, gui and controller
quad = quadcopter.Quadcopter(QUADCOPTER)
if gui_on:
gui_object = gui.GUI(quads=QUADCOPTER)
ctrl = controller.Controller_AI(quad.get_state,quad.get_time,quad.set_motor_speeds,params=CONTROLLER_PARAMETERS,quad_identifier='q1',get_motor_speeds=get_motor_speeds)
# Start the threads
quad.start_thread(dt=QUAD_DYNAMICS_UPDATE,time_scaling=TIME_SCALING)
ctrl.start_thread(update_rate=CONTROLLER_DYNAMICS_UPDATE,time_scaling=TIME_SCALING)
# Update the GUI while switching between destination poitions
print(quad.get_state('q1')[6:])
global run
while(run==True):
#ctrl.update_target(GOALS[0])
#ctrl.update_yaw_target(YAWS[0])
if gui_on:
gui_object.quads['q1']['position'] = quad.get_position('q1')
gui_object.quads['q1']['orientation'] = quad.get_orientation('q1')
gui_object.update()
# Once quad gets below height of 2, kills simulation
if quad.get_state('q1')[2] < 1:
print(quad.get_state('q1')[6:])
run = False
quad.stop_thread()
ctrl.stop_thread()
def parse_args():
parser = argparse.ArgumentParser(description="Quadcopter Simulator")
parser.add_argument("--sim", help='currently only single_ai', default='single_ai')
parser.add_argument("--time_scale", type=float, default=-1.0, help='Time scaling factor. 0.0:fastest,1.0:realtime,>1:slow, ex: --time_scale 0.1')
parser.add_argument("--quad_update_time", type=float, default=0.0, help='delta time for quadcopter dynamics update(seconds), ex: --quad_update_time 0.002')
parser.add_argument("--controller_update_time", type=float, default=0.0, help='delta time for controller update(seconds), ex: --controller_update_time 0.005')
return parser.parse_args()
def signal_handler(signal, frame):
global run
run = False
print('Stopping')
sys.exit(0)
if __name__ == "__main__":
args = parse_args()
if args.time_scale>=0: TIME_SCALING = args.time_scale
if args.quad_update_time>0: QUAD_DYNAMICS_UPDATE = args.quad_update_time
if args.controller_update_time>0: CONTROLLER_DYNAMICS_UPDATE = args.controller_update_time
AI = q_controller.Q_Controller()
gui_on = False
if args.sim == 'single_ai':
global run
for x in range(1000):
run = True
print("starting simulation: ", x)
if x == 999:
gui_on = True
Single_AI(gui_on, AI.get_motor_speeds)
print("simulation finished, starting training")
AI.replay()
| {"/quad_sim.py": ["/controller.py", "/q_controller.py"]} |
64,381 | kaiergin/Quadcopter_simulator | refs/heads/master | /q_controller.py | from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
import numpy as np
import random
from collections import deque
STATE_SIZE = 12
ACTION_SIZE = 4
LEARNING_RATE = 0.001
alpha = 0.01
class Q_Controller:
def __init__(self):
self.prev_action = [0 for x in range(ACTION_SIZE)]
self.prev_state = [0 for x in range(STATE_SIZE)]
self.memory = deque(maxlen=5000)
self.model = self.create_model()
# bug in tensorflow with asynchronous events makes this predict call necessary
self.model.predict(np.zeros(12).reshape(1,12))
self.gamma = 0.95
self.epsilon = 1.0
self.epsilon_decay = 0.995
def create_model(self):
model = Sequential()
model.add(Dense(24, input_dim=STATE_SIZE, activation='relu'))
model.add(Dense(24, activation='relu'))
model.add(Dense(ACTION_SIZE, activation='sigmoid'))
model.compile(loss='mse', optimizer=Adam(lr=LEARNING_RATE))
return model
def remember(self, state, action, reward, next_state):
self.memory.append((state, action, reward, next_state))
def get_motor_speeds(self, reward, data):
self.remember(self.prev_state, self.prev_action, reward, data)
self.prev_state = data
if np.random.rand() <= self.epsilon:
action = [np.random.rand() for x in range(ACTION_SIZE)]
self.prev_action = action
return action
action = self.model.predict(np.array(data).reshape(1,12))[0]
self.prev_action = action
return action
def replay(self):
minibatch = random.sample(self.memory, len(self.memory) // 10)
for state, action, reward, next_state in minibatch:
target = reward + self.gamma * self.model.predict(np.array(next_state).reshape(1,12))[0]
target_f = self.model.predict(np.array(state).reshape(1,12))
target_f[0] = target_f[0]*(1-alpha) + alpha*target
self.model.fit(np.array(state).reshape(1,12), np.array(target_f).reshape(1,4), epochs=1, verbose=0)
self.epsilon *= self.epsilon_decay
if self.epsilon < 0.01:
self.epsilon = 0.01
| {"/quad_sim.py": ["/controller.py", "/q_controller.py"]} |
64,382 | kaiergin/Quadcopter_simulator | refs/heads/master | /controller.py | import numpy as np
import math
import time
import threading
class Controller_AI():
def __init__(self, get_state, get_time, actuate_motors, params, quad_identifier, get_motor_speeds):
self.get_motor_speeds = get_motor_speeds
self.quad_identifier = quad_identifier
self.actuate_motors = actuate_motors
self.get_state = get_state
self.get_time = get_time
self.MOTOR_LIMITS = params['Motor_limits']
self.TILT_LIMITS = [(params['Tilt_limits'][0]/180.0)*3.14,(params['Tilt_limits'][1]/180.0)*3.14]
self.YAW_CONTROL_LIMITS = params['Yaw_Control_Limits']
self.Z_LIMITS = [self.MOTOR_LIMITS[0]+params['Z_XY_offset'],self.MOTOR_LIMITS[1]-params['Z_XY_offset']]
self.LINEAR_TO_ANGULAR_SCALER = params['Linear_To_Angular_Scaler']
self.YAW_RATE_SCALER = params['Yaw_Rate_Scaler']
self.thread_object = None
self.target = [0,0,0]
self.yaw_target = 0.0
self.run = True
def wrap_angle(self,val):
return( ( val + np.pi) % (2 * np.pi ) - np.pi )
def update(self):
[dest_x,dest_y,dest_z] = self.target
data = [x,y,z,x_dot,y_dot,z_dot,theta,phi,gamma,theta_dot,phi_dot,gamma_dot] = self.get_state(self.quad_identifier)
reward = -(theta)**2 + -(phi)**2 + -(gamma)**2 + -(theta_dot)**2 + -(phi_dot)**2 + -(gamma_dot)**2
#print(reward)
action = self.get_motor_speeds(reward, data)
range_motors = self.MOTOR_LIMITS[1] - self.MOTOR_LIMITS[0]
action = np.array(action)
action *= range_motors #/ 4
action += self.MOTOR_LIMITS[0]
[m1, m2, m3, m4] = action
M = np.clip([m1,m2,m3,m4],self.MOTOR_LIMITS[0],self.MOTOR_LIMITS[1])
self.actuate_motors(self.quad_identifier,M)
def update_target(self,target):
self.target = target
def update_yaw_target(self,target):
self.yaw_target = self.wrap_angle(target)
def thread_run(self,update_rate,time_scaling):
update_rate = update_rate*time_scaling
last_update = self.get_time()
while(self.run==True):
time.sleep(0)
self.time = self.get_time()
if (self.time - last_update).total_seconds() > update_rate:
self.update()
last_update = self.time
def start_thread(self,update_rate=0.005,time_scaling=1):
self.thread_object = threading.Thread(target=self.thread_run,args=(update_rate,time_scaling),daemon=True)
self.thread_object.start()
def stop_thread(self):
self.run = False
| {"/quad_sim.py": ["/controller.py", "/q_controller.py"]} |
64,391 | Andy-O-Hung/zork | refs/heads/master | /Neighborhood.py | from House import *
from Observer import Observer
from random import *
# Neighborhood class, which extends the Observer class
class Neighborhood(Observer):
# Class constructor
def __init__(self):
Observer.__init__(self)
self.gridHeight = randint(2, 3)
self.gridWidth = randint(2, 3)
self.numMonsters = 0
self.grid = self.populateGrid()
# Getter methods
# Returns the height of the grid
def getHeight(self):
return self.gridHeight
# Returns the width of the grid
def getWidth(self):
return self.gridWidth
# Returns total number of monsters in neighborhood
def getMonsterCount(self):
return self.numMonsters
# Returns neighborhood grid
def getGrid(self):
return self.grid
# Helper methods
# This function will populate the grid with houses
def populateGrid(self):
# List variable declaration
g = []
for x in range(0, self.gridHeight):
g.append([])
for y in range(0, self.gridWidth):
tempHouse = House()
self.numMonsters += tempHouse.getNumMonsters()
tempHouse.add_observer(self)
g[x].append(tempHouse)
return g
# Decrements the number of monsters
def update(self):
self.numMonsters -= 1
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,392 | Andy-O-Hung/zork | refs/heads/master | /Game.py | from Neighborhood import *
from Player import *
from random import *
# This is the Game class, which will be running the game logic
class Game(object):
# Class constructor
# Initializes variables
def __init__(self):
# Neighborhood class instantiation
self.hood = Neighborhood()
# Get neighborhood grid, where house objects are located
self.grid = self.hood.getGrid()
# Instance of class player, which is the current player
self.player1 = Player()
# The variables turn and state are used to keep track of the player's
# current state
self.turn = 0
self.state = 0
# This function will run the game
def run(self):
print("\nWelcome to your monster infested neighborhood\n")
self.state = 1
# Run the game until game is finished
# The game will finish when there is no monsters or
# the player dies
while(self.state != 0):
# Print the neighborhood dimensions and the number of houses
print("The size of the neighborhood is: %d x %d\n" % (self.hood.getHeight(), self.hood.getWidth()))
print("There are %d houses in it.\n" % (self.hood.getHeight() * self.hood.getWidth()))
# Prints the neighborhood map
self.printMap();
# Attemp to gather correct house coordinates from user
try:
print("Type the x coordinate of the house you would like to enter!\n")
xHouse = int(raw_input())
# Error catching
# If the x coordinate if less than 0 or bigger than the grid size
if (xHouse > self.hood.getHeight() or xHouse < 1):
# Then raise value error
raise ValueError
print("Type the y coordinate of the house you would like to enter!\n")
yHouse = int(raw_input())
# Error catching
# If the y coordinate if less than 0 or bigger than the grid size
if (yHouse > self.hood.getWidth() or yHouse < 1):
# Then raise value error
raise ValueError
# If coordinates are correct, proceed to house
print("\nEntering house with coordinates %d x %d!\n" % (xHouse, yHouse))
# Call enterHouse() function to enter the house, providing the house
# coordinates within the grid as parameters
self.enterHouse(xHouse, yHouse)
# Error handling
# If coordinates are incorrect, do not attempt to enter house
# Instead, print error message
except ValueError:
print "\nPlease enter correct coordinates\n"
# Helper function
# Prints the house coordinates to the screen
def printMap(self):
print("These are the coordinates of the houses in the neighborhood:\n")
# Loop through the grid 2D array and print the coordinates of every house
for x in range(self.hood.getHeight()):
print("| "),
for y in range(self.hood.getWidth()):
# We add 1 to the coordinates, just so we don't start on 0
# This makes it more undestandable for the user
print("%d x %d |" % ((x+1),(y+1))),
print "\n"
# Game Logic when player is inside the house
def enterHouse(self, xHouse, yHouse):
# Declaration of local variables
inTheHouse = True
houseEntered = self.grid[xHouse - 1][yHouse - 1]
numMonsters = houseEntered.getNumMonsters()
# Check number of NPCs in the house, which could be 0
# If there is no monsters and no people in the house,
# then exit the house
if (len(houseEntered.getNpcs()) == 0):
print "This house is empty! Exiting!!"
return
# If there are NPCs, then proceed
else:
# Execute until players exits house
while(inTheHouse):
try:
# Checks for total number of monsters in the neighborhood
# If there is none, then the game is over and we exit the house
# and the game. User wins
if (self.hood.getMonsterCount() == 0):
print("There are no more monsters!\n")
print("You've won!\n")
inTheHouse = False
self.state = 0
return
NPCNum = 1
print "You have found some creatures in the house!\n\nThis is a list of them:\n"
# Print to the screen the list of NPC's in the house, assigning each an index
# which starts from 1 (again, not 0 for user friendliness) and will be used by
# player to select who to approach
for x in houseEntered.getNpcs():
print("%s (%d)" % (x.getName(), NPCNum))
NPCNum += 1
print "\n"
print "Who would you like to approach? (Enter NPC index to approach or 0 to leave the house)\n"
NPCIndex = int(raw_input())
# The player will input 0 to exit house
if (NPCIndex == 0):
return
# Check user input for possible errors
# If the input is less than 0 or greater than the number
# of NPCs in the house, an error will be raised
elif (NPCIndex > NPCNum - 1 or NPCIndex < 1):
raise ValueError
print "\n"
# If user input is correct, then approach NPC
print("You decided to approach %s\n" % houseEntered.getNpcs()[NPCIndex - 1].getName())
# If the NPC approached is a person, then player gains +1 health
if (houseEntered.getNpcs()[NPCIndex - 1].getName() == "Person"):
self.player1.setHealth(self.player1.getHealth() + 1)
# Player is notified
print("+1 health! Your health: %d\n" %self.player1.getHealth())
# If NPC is not a person, then its monster
# Therefore, player must fight
else:
print "Get ready to fight!\n"
fighting = 1
# Fighting logic of the game
while (fighting == 1):
# Checks for total number of monsters in the neighborhood
# If there is none, then the game is over and we exit the house
# and the game. User wins
if (self.hood.getMonsterCount() == 0):
print("There are no more monsters!\n")
print("You've won!\n")
fighting = 0
inTheHouse = False
self.state = 0
return
# Prints a list of all the weapons that the users has,
# and it shows how many uses each has left
print "It's your turn to attack. These are your weapons:\n"
WeaponNum = 1
for x in self.player1.getInventory():
print("%s. Uses left: %d (%d)" % (x.getName(), x.getUses() ,WeaponNum))
WeaponNum += 1
print "\n"
# Same principle as the NPC approach
print "What weapon would you like to use? Press 0 to get out of the house.\n"
weaponIndex = int(raw_input())
print ""
# If player enter 0, then it will exit the house
if(weaponIndex == 0):
print("Getting out of this house.\n")
return;
# Checking user input for erros
# If the user entered a number lower than 1 or greater than
# the number of weapons available, the raise error
elif(weaponIndex > WeaponNum - 1 or weaponIndex < 1):
raise ValueError
# If weapons has no uses, then player can't attack and loses their turn
elif(self.player1.getInventory()[weaponIndex-1].getUses() == 0):
print "That weapon has no uses left! Sorry, you lost your opportunity to attack\n"
# Fight against a Zombie
if (houseEntered.getNpcs()[NPCIndex - 1].getName() == "Zombie" and self.player1.getInventory()[weaponIndex-1].getUses() > 0):
# Notify player of it's opponent's health
print("You're attacking Zombie using %s!\n" % self.player1.getInventory()[weaponIndex-1].getName())
print("Zombie has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
damageDealt = 0
# Calculate damage for attack with sour straw
# Sour straws do extra damage to zombies
if (self.player1.getInventory()[weaponIndex-1].getName() == "SourStraw"):
print "It's really effective!\n"
damageDealt = self.player1.getAttackValue()*self.player1.getInventory()[weaponIndex-1].getMod()*2
houseEntered.getNpcs()[NPCIndex - 1].setHealth(houseEntered.getNpcs()[NPCIndex - 1].getHealth() - damageDealt)
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
# Calculate damage for any other weapon
else:
damageDealt = self.player1.getAttackValue()*self.player1.getInventory()[weaponIndex-1].getMod()
houseEntered.getNpcs()[NPCIndex - 1].setHealth(houseEntered.getNpcs()[NPCIndex - 1].getHealth() - damageDealt)
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
# Notify player of the damage dealt to the zombie
print("You dealt %d points of damage to Zombie!\n" %damageDealt)
print("Your %s has now %d use(s)\n" % (self.player1.getInventory()[weaponIndex-1].getName(), self.player1.getInventory()[weaponIndex-1].getUses()))
# If the zombie has less than 1 heath, it's dead. Notify player and observer (house in which is in)
# This turns the Zombie into a person, and updates the total number of monsters in the neighborhood
if(houseEntered.getNpcs()[NPCIndex - 1].getHealth() < 1):
print("You defeated Zombie! It will turn into a person now!\n")
houseEntered.killedMonster(NPCIndex - 1)
break;
# If zombie was not killed by attack, notify player of its health
else:
print("Zombie has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
# Fight against a Vampire
elif (houseEntered.getNpcs()[NPCIndex - 1].getName() == "Vampire" and self.player1.getInventory()[weaponIndex-1].getUses() > 0):
# Notify player of it's opponent's health
print("You're attacking Vampire using %s!\n" % self.player1.getInventory()[weaponIndex-1].getName())
print("Vampire has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
damageDealt = 0
# Calculate damage for attack with chocolate bar
# Chocolate bars do extra damage to vampires
if (self.player1.getInventory()[weaponIndex-1].getName() == "ChocolateBar"):
print "Vampire is not harmed by ChocolateBar!\n"
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
# Calculate damage for any other weapon
else:
damageDealt = self.player1.getAttackValue()*self.player1.getInventory()[weaponIndex-1].getMod()
houseEntered.getNpcs()[NPCIndex - 1].setHealth(houseEntered.getNpcs()[NPCIndex - 1].getHealth() - damageDealt)
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
# Notify player of the damage dealt to vampire
print("You dealt %d points of damage to Vampire!\n" %damageDealt)
print("Your %s has now %d use(s)\n" % (self.player1.getInventory()[weaponIndex-1].getName(), self.player1.getInventory()[weaponIndex-1].getUses()))
# If the vampire has less than 1 heath, it's dead. Notify player and observer (house in which is in)
# This turns the vampire into a person, and updates the total number of monsters in the neighborhood
if(houseEntered.getNpcs()[NPCIndex - 1].getHealth() < 1):
print("You defeated Vampire! It will turn into a person now!\n")
houseEntered.killedMonster(NPCIndex - 1)
break;
# If vampire was not killed by attack, notify player of its health
else:
print("Vampire has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
# Fight against a Ghoul
elif (houseEntered.getNpcs()[NPCIndex - 1].getName() == "Ghoul" and self.player1.getInventory()[weaponIndex-1].getUses() > 0):
print("You're attacking Ghoul using %s!\n" % self.player1.getInventory()[weaponIndex-1].getName())
print("Ghoul has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
damageDealt = 0
if (self.player1.getInventory()[weaponIndex-1].getName() == "NerdBomb"):
print "It's SUPER effective!\n"
damageDealt = self.player1.getAttackValue()*self.player1.getInventory()[weaponIndex-1].getMod()*5
houseEntered.getNpcs()[NPCIndex - 1].setHealth(houseEntered.getNpcs()[NPCIndex - 1].getHealth() - damageDealt)
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
else:
damageDealt = self.player1.getAttackValue()*self.player1.getInventory()[weaponIndex-1].getMod()
houseEntered.getNpcs()[NPCIndex - 1].setHealth(houseEntered.getNpcs()[NPCIndex - 1].getHealth() - damageDealt)
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
print("You dealt %d points of damage to Ghoul!\n" %damageDealt)
print("Your %s has now %d use(s)\n" % (self.player1.getInventory()[weaponIndex-1].getName(), self.player1.getInventory()[weaponIndex-1].getUses()))
if(houseEntered.getNpcs()[NPCIndex - 1].getHealth() < 1):
print("You defeated Ghoul! It will turn into a person now!\n")
houseEntered.killedMonster(NPCIndex - 1)
break;
else:
print("Ghoul has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
# Fight against a Werewolf
elif (houseEntered.getNpcs()[NPCIndex - 1].getName() == "Werewolf" and self.player1.getInventory()[weaponIndex-1].getUses() > 0):
print("You're attacking Werewolf using %s!\n" % self.player1.getInventory()[weaponIndex-1].getName())
print("Werewolf has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
damageDealt = 0
if (self.player1.getInventory()[weaponIndex-1].getName() == "ChocolateBar"):
print "Werewolf is not harmed by ChocolateBar!\n"
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
elif (self.player1.getInventory()[weaponIndex-1].getName() == "SourStraw"):
print "Werewolf is not harmed by SourStraw!\n"
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
else:
damageDealt = self.player1.getAttackValue()*self.player1.getInventory()[weaponIndex-1].getMod()
houseEntered.getNpcs()[NPCIndex - 1].setHealth(houseEntered.getNpcs()[NPCIndex - 1].getHealth() - damageDealt)
self.player1.getInventory()[weaponIndex-1].setUses(self.player1.getInventory()[weaponIndex-1].getUses() -1)
print("You dealt %d points of damage to Werewolf!\n" %damageDealt)
print("Your %s has now %d use(s)\n" % (self.player1.getInventory()[weaponIndex-1].getName(), self.player1.getInventory()[weaponIndex-1].getUses()))
if(houseEntered.getNpcs()[NPCIndex - 1].getHealth() < 1):
print("You defeated Werewolf! It will turn into a person now!\n")
houseEntered.killedMonster(NPCIndex - 1)
break;
else:
print("Werewolf has %d healthpoints\n" % houseEntered.getNpcs()[NPCIndex - 1].getHealth())
# After the player attacks, it's attacked
# Notify the player about the attack
print("You're under attack by %s! Watch out!\n" %houseEntered.getNpcs()[NPCIndex - 1].getName())
damageReceived = houseEntered.getNpcs()[NPCIndex - 1].getAttack()
print("%s has dealt %d points of damage!\n" %(houseEntered.getNpcs()[NPCIndex - 1].getName(), damageReceived))
# Set players health after attack
self.player1.setHealth(self.player1.getHealth() - damageReceived)
# If player's health is under 1, then he/she has been defeated and the game is over
if(self.player1.getHealth() < 1):
print("Oh oh. The candy forces have defeated you! You weren't able to save your neighborhood.\n")
print("There still are %d monsters in the neighborhood.\n" %self.hood.getMonsterCount())
fighting = 0
inTheHouse = False
self.state = 0
# If the player wasn't kiled by the attack, then show his/her health
else:
print("You have %d health points now!\n" %self.player1.getHealth())
# Error handling for all possible user input errors catched above
except ValueError:
print("Incorrect input\n")
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,393 | Andy-O-Hung/zork | refs/heads/master | /main.py | # Import the necessary files
# Note that not all classes are imported. This is because only the necessary ones are imported
# into the main, and then the other ones are imported into the other classes when necessary
from Player import *
from Neighborhood import *
from Game import *
# The main method of the game.
def main():
# Basic welcome message to greet the user
print "Welcome to ZORK"
# Creates an instance of the game class, and calls its run function
currGame = Game()
currGame.run()
#This checks if this file is named main.
if __name__ == "__main__":
main()
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,394 | Andy-O-Hung/zork | refs/heads/master | /Player.py | from random import *
from Weapons import *
#Player class which is the user for the game.
class Player(object):
def __init__(self):
self.health = randint(100, 125)
self.attackValue = randint(10, 20)
self.inventory = self.generateWeapons()
#Getter functions
def getHealth(self):
return self.health
def getInventory(self):
return self.inventory
def getAttackValue(self):
return self.attackValue
#Generate the weapons for the player class.
def generateWeapons(self):
weaponList = []
tempKiss = HersheyKiss()
weaponList.append(tempKiss)
for x in range(9):
choose = ["ChocolateBars", "NerdBomb", "SourStraws"]
rand = randint(0,2)
if choose[rand] == "ChocolateBars":
tempBar = ChocolateBar()
weaponList.append(tempBar)
continue
elif choose[rand] == "NerdBomb":
tempBomb = NerdBomb()
weaponList.append(tempBomb)
continue
elif choose[rand] == "SourStraws":
tempStraw = SourStraw()
weaponList.append(tempStraw)
continue
return weaponList
#Setter functions
def setAttackValue(self, a):
self.attackValue = a;
def setHealth(self, a):
self.health = a;
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,395 | Andy-O-Hung/zork | refs/heads/master | /House.py | from Npc import *
from Observer import *
from random import *
# House class, which extends both the observable and the observer class
# It observes all NPCs living within it, and it's also observed
# by the neighborhood class, which keeps track of the amount of total monsters
class House(Observer, Observable):
# Class constructor
def __init__(self):
Observable.__init__(self)
Observer.__init__(self)
self.numberOfNpcs = randint(0, 10)
self.npcList = self.generateNpcs(self.numberOfNpcs)
self.numberOfMonsters = self.countMonsters(self.npcList)
# Getter functions
# Returns list of NPCs in the house
def getNpcs(self):
return self.npcList
# Returns number of monsters in the house
def getNumMonsters(self):
return self.numberOfMonsters
# Helper functions
# This function will generate a given number of random monster
# that will be in a certain house
def generateNpcs(self, number):
npcs = []
# For loop that will run once for every monster
for x in range(number):
choose = ["Person", "Zombie", "Vampire", "Ghoul", "Werewolf"]
rand = randint(0,4)
if choose[rand] == "Person":
tempPerson = Person()
tempPerson.add_observer(self)
npcs.append(tempPerson)
elif choose[rand] == "Zombie":
tempZombie = Zombie()
tempZombie.add_observer(self)
npcs.append(tempZombie)
elif choose[rand] == "Vampire":
tempVampire = Vampire()
tempVampire.add_observer(self)
npcs.append(tempVampire)
elif choose[rand] == "Ghoul":
tempGhoul = Ghoul()
tempGhoul.add_observer(self)
npcs.append(tempGhoul)
elif choose[rand] == "Werewolf":
tempWerewolf = Werewolf()
tempWerewolf.add_observer(self)
npcs.append(tempWerewolf)
# Return the list of monster in the house
return npcs
# This function will count the number of monsters (ignoring people) in
# a house, provided a list of NPCs
def countMonsters(self, npcs):
# Variable that will hold the monster count
monsterCount = 0
# Will loop through the Npc list checking for monsters
for x in npcs:
if x.getName() != "Person":
monsterCount += 1
return monsterCount
# This function will replace a monster with a person
# It will be called when a monster is killed
def killedMonster(self, monsterIndex):
del self.npcList[monsterIndex]
self.numberOfMonsters = self.numberOfMonsters - 1
tempPerson = Person()
tempPerson.add_observer(self)
self.npcList.append(tempPerson)
self.update()
# Update method
def update(self):
self.updateAll()
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,396 | Andy-O-Hung/zork | refs/heads/master | /Weapons.py | from random import *
# Weapon parent class
class Weapon(object):
# Class constructor
# Initializes variables that are common to all weapons
def __init__(self):
self.name = ""
self.attackMod = 0
self.uses = 0
# Getter methods
# Returns weapon name
def getName(self):
return self.name
# Returns weapon attack modifier
def getMod(self):
return self.attackMod
# Returns weapon use count
def getUses(self):
return self.uses
# Setter methods
# Sets weapon use count
def setUses(self, useCount):
self.uses = useCount
# HersheyKiss class
# This class is a child of the weapon parent class
class HersheyKiss(Weapon):
# Parent constructor override
def __init__(self):
super(HersheyKiss, self).__init__()
self.name = "HersheyKiss"
self.attackMod = 1
self.uses = 5000000
# SourStraw class
# This class is a child of the weapon parent class
class SourStraw(Weapon):
# Parent constructor override
def __init__(self):
super(SourStraw, self).__init__()
self.name = "SourStraw"
self.attackMod = uniform(1, 1.75)
self.uses = 2
# ChocolateBar class
# This class is a child of the weapon parent class
class ChocolateBar(Weapon):
# Parent constructor override
def __init__(self):
super(ChocolateBar, self).__init__()
self.name = "ChocolateBar"
self.attackMod = uniform(2, 2.4)
self.uses = 4
# NerdBomb class
# This class is a child of the weapon parent class
class NerdBomb(Weapon):
# Parent constructor override
def __init__(self):
super(NerdBomb, self).__init__()
self.name = "NerdBomb"
self.attackMod = uniform(3.5, 5)
self.uses = 1
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,397 | Andy-O-Hung/zork | refs/heads/master | /Npc.py | from Observer import Observable
from random import *
# Parent class, which extends the observable class
class Npc(Observable):
# Class constructor
# Initializes variables that are common to all NPC's
def __init__(self):
Observable.__init__(self)
self.name = ""
self.attackStrength = 0
self.healthPoints = 0
self.vulnerability = []
self.weakness = "None"
# Getter methods
# Returns NPC's name
def getName(self):
return self.name
# Returns health points of NPC
def getHealth(self):
return self.healthPoints
# Returns NPC's attack strength
def getAttack(self):
return self.attackStrength
# Returns NPC's weakness
def getWeakness(self):
return self.weakness
# Returns the number of vulnerabilities of NPC
def getLengthVulnerability(self):
return len(self.vulnerability)
# Returns a list of all NPC's vulnerabilities
def getVulnerability(self):
return self.vulnerability
# Setter methods
# Set NPC's health
def setHealth(self, health):
self.healthPoints = health
# Person class
# This class is a child of the Npc parent class
class Person(Npc):
# Parent constructor override
def __init__(self):
super(Person,self).__init__()
self.name = "Person"
self.attackStrength = -1
self.healthPoints = 100
#maybe dont need a False statement for hurtable
#random test function
def asdf(self):
return "swag"
# Zombie class
# This class is a child of the Npc parent class
class Zombie(Npc):
# Parent constructor override
def __init__(self):
super(Zombie, self).__init__()
self.name = "Zombie"
self.attackStrength = randint(0, 10)
self.healthPoints = randint(50, 100)
self.vulnerability = ["HersheyKisses", "SourStraws", "ChocolateBars", "NerdBombs"]
self.weakness = "SourStraws"
# Vampire class
# This class is a child of the Npc parent class
class Vampire(Npc):
# Parent constructor override
def __init__(self):
super(Vampire, self).__init__()
self.name = "Vampire"
self.attackStrength = randint(10, 20)
self.healthPoints = randint(100, 200)
self.vulnerability = ["HersheyKisses", "SourStraws", "NerdBombs"]
# Ghoul class
# This class is a child of the Npc parent class
class Ghoul(Npc):
# Parent constructor override
def __init__(self):
super(Ghoul, self).__init__()
self.name = "Ghoul"
self.attackStrength = randint(15, 30)
self.healthPoints = randint(40, 80)
self.vulnerability = ["HersheyKisses", "SourStraws", "ChocolateBars", "NerdBombs"]
self.weakness = "NerdBombs"
# Werewolf class
# This class is a child of the Npc parent class
class Werewolf(Npc):
# Parent constructor override
def __init__(self):
super(Werewolf, self).__init__()
self.name = "Werewolf"
self.attackStrength = randint(0, 40)
self.healthPoints = 200
self.vulnerability = ["HersheyKisses", "NerdBombs"]
| {"/Neighborhood.py": ["/House.py"], "/Player.py": ["/Weapons.py"], "/House.py": ["/Npc.py"]} |
64,413 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/preprocessing/category_encoder.py | '''
-------------------------------------------------------
Category Encoder - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.validation import column_or_1d, check_is_fitted
from sklearn.preprocessing import LabelEncoder
from ..preprocessing.utils import is_float_array, is_object_array, \
check_error_strat
class CategoryEncoder(BaseEstimator, TransformerMixin):
''' Category Encoder
Extends scikit's labels encoder by allowing to encode missing
and previously unseen values.
Parameters
----------
unseen : Replacement strategy for unseen values, str
One of ['encode', 'nan', 'error']
missing : Replacement strategy for missing values, str
One of ['encode', 'nan', 'error']
'''
def __init__(self, unseen='nan', missing='nan'):
replace_strats = ['encode', 'nan', 'error']
if unseen not in replace_strats:
raise ValueError('Value of `unseen` {} is not a valid replacement '
'strategy, {}'.format(unseen, replace_strats))
if missing not in replace_strats:
raise ValueError('Value of `missing` {} is not a valid replacement '
'strategy, {}'.format(missing, replace_strats))
self.default_unseen_ = strat_to_default(unseen)
self.default_missing_ = strat_to_default(missing)
self.unseen = unseen
self.missing = missing
self.le_ = LabelEncoder()
def fit(self, X, y=None):
''' Fitting of the transformer
Parameters
----------
X : array-like, shape (n_samples,)
y : None
There is no need of a target in a transformer, yet the pipeline API
requires this parameter.
Returns
-------
self : object
Returns self.
'''
X = column_or_1d(X.copy(), warn=True)
if is_object_array(X):
missing_mask = [x is np.nan for x in X]
check_error_strat(missing_mask, self.missing, 'missing')
self.le_.fit(X[np.invert(missing_mask)])
elif is_float_array(X):
missing_mask = np.isnan(X)
check_error_strat(missing_mask, self.missing, 'missing')
self.le_.fit(X[np.invert(missing_mask)])
else:
self.le_.fit(X)
self.classes_ = self.le_.classes_
# `fit` should always return `self`
return self
def transform(self, X):
''' Applying transformation on the data
Parameters
----------
X : array-like, shape (n_samples,)
Returns
-------
X : array-like, shape (n_samples,)
The count values. An array of int/float.
'''
X = column_or_1d(X.copy(), warn=True)
check_is_fitted(self, 'classes_')
if is_object_array(X):
missing_mask = [x is np.nan for x in X]
unseen_mask = np.bitwise_xor(np.isin(X, self.le_.classes_, invert=True),
missing_mask)
check_error_strat(missing_mask, self.missing, 'missing')
check_error_strat(unseen_mask, self.unseen, 'unseen')
X = encode_with_masks(X,
self.le_,
self.default_unseen_,
unseen_mask,
self.default_missing_,
missing_mask)
X = correct_dtype(X,
self.default_unseen_,
unseen_mask,
self.default_missing_,
missing_mask)
elif is_float_array(X):
missing_mask = np.isnan(X)
unseen_mask = np.bitwise_xor(np.isin(X, self.le_.classes_, invert=True),
missing_mask)
check_error_strat(missing_mask, self.missing, 'missing')
check_error_strat(unseen_mask, self.unseen, 'unseen')
X = encode_with_masks(X,
self.le_,
self.default_unseen_,
unseen_mask,
self.default_missing_,
missing_mask)
X = correct_dtype(X,
self.default_unseen_,
[],
self.default_missing_,
missing_mask)
else:
X = self.le_.transform(X)
return X
def fit_transform(self, X, y=None):
''' Combined fit and transform
Parameters
----------
X : array-like, shape (n_samples,)
y : None
There is no need of a target in a transformer, yet the pipeline API
requires this parameter.
Returns
-------
X : array-like, shape (n_samples,)
The count values. An array of int/float.
'''
X = column_or_1d(X.copy(), warn=True)
if is_object_array(X):
missing_mask = [x is np.nan for x in X]
check_error_strat(missing_mask, self.missing, 'missing')
encode_mask = np.invert(missing_mask)
X[encode_mask] = self.le_.fit_transform(X[encode_mask])
X[missing_mask] = self.default_missing_
X = correct_dtype(X,
None,
[],
self.default_missing_,
missing_mask)
elif is_float_array(X):
missing_mask = np.isnan(X)
check_error_strat(missing_mask, self.missing, 'missing')
encode_mask = np.invert(missing_mask)
X[encode_mask] = self.le_.fit_transform(X[encode_mask])
X[missing_mask] = self.default_missing_
X = correct_dtype(X,
None,
[],
self.default_missing_,
missing_mask)
else:
X = self.le_.transform(X)
self.classes_ = self.le_.classes_
return X
def encode_with_masks(X, le, default_unseen, unseen_mask, default_missing, missing_mask):
''' Apply encoding values values with masks
'''
encode_mask = np.invert(unseen_mask | missing_mask)
X[encode_mask] = le.transform(X[encode_mask])
X[unseen_mask] = default_unseen
X[missing_mask] = default_missing
return X
def correct_dtype(X, default_unseen, unseen_mask, default_missing, missing_mask):
''' Cast array as correct dtype, int when possible
'''
if (default_unseen is np.nan and np.any(unseen_mask)) or \
(default_missing is np.nan and np.any(missing_mask)):
return X.astype('float')
return X.astype('int')
def strat_to_default(strat):
''' Choose a default value according to strategy
'''
if strat == 'encode':
return -1
if strat == 'nan':
return np.nan
return None
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,414 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/preprocessing/target_encoder.py | '''
-------------------------------------------------------
Target Encoder - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from warnings import warn
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.validation import column_or_1d, check_is_fitted
from ..preprocessing.utils import check_error_strat
class TargetEncoder(BaseEstimator, TransformerMixin):
''' Target Encoder
Transforms categorical values to their respective target mean.
Parameters
----------
smoothing : Smooth means by weighting target mean, float
unseen : Replacement strategy for unseen values, str
One of ['global', 'nan', 'error']
missing : Replacement strategy for missing values, str
One of ['global', 'nan', 'error']
'''
def __init__(self, smoothing=0, unseen='global', missing='global'):
replace_strats = ['global', 'nan', 'error']
if unseen not in replace_strats:
raise ValueError('Value of `unseen` {} is not a valid replacement '
'strategy, {}'.format(unseen, replace_strats))
if missing not in replace_strats:
raise ValueError('Value of `missing` {} is not a valid replacement '
'strategy, {}'.format(missing, replace_strats))
self.unseen = unseen
self.missing = missing
self.smoothing = smoothing
def fit(self, X, y):
''' Fitting of the transformer
Parameters
----------
X : array-like, shape (n_samples,)
The class values. An array of int.
y : array-like, shape (n_samples,)
The target values.
Returns
-------
self : object
Returns self.
'''
X = column_or_1d(X, warn=True)
y = column_or_1d(y, warn=True)
missing_mask = np.isnan(X)
encode_mask = np.invert(missing_mask)
check_error_strat(missing_mask, self.missing, 'missing')
target_mean = np.mean(y)
self.default_unseen_ = strat_to_default(self.unseen,
target_mean)
self.default_missing_ = strat_to_default(self.missing,
target_mean)
self.classes_, counts = np.unique(X[encode_mask], return_counts=True)
self.class_means_ = np.zeros_like(self.classes_, dtype='float64')
for i, c in enumerate(self.classes_):
class_mask = np.where(X == c)
if class_mask[0].shape[0] > 0:
self.class_means_[i] = np.mean(y[class_mask])
else:
self.class_means_[i] = 1.0
if self.smoothing != 0:
# class_counts x class_means + smoothing x global mean
# smooth mean = ----------------------------------------------------
# (class_counts + smoothing)
self.class_means_ = (counts * self.class_means_ + self.smoothing * target_mean)\
/ (counts + self.smoothing)
if self.unseen != 'error':
self.classes_ = np.append(self.classes_, [np.max(self.classes_) + 1])
self.class_means_ = np.append(self.class_means_, [self.default_unseen_])
self.lut_ = np.hstack([self.classes_.reshape(-1, 1),
self.class_means_.reshape(-1, 1)])
if self.class_means_.shape[0] != np.unique(self.class_means_).shape[0]:
warn('Duplicate target encoding for muliple classes. This will '
'make two or more categories indistinguishable.')
# `fit` should always return `self`
return self
def transform(self, X):
''' Applying transformation on the data
Parameters
----------
X : array-like, shape (n_samples,)
The class values. An array of int.
Returns
-------
X : array-like, shape (n_samples,)
The encoded values. An array of float.
'''
check_is_fitted(self, 'class_means_')
X = column_or_1d(X, warn=True)
missing_mask = np.isnan(X)
encode_mask = np.invert(missing_mask)
unseen_mask = np.bitwise_xor(np.isin(X, self.classes_, invert=True),
missing_mask)
check_error_strat(missing_mask, self.missing, 'missing')
check_error_strat(unseen_mask, self.unseen, 'unseen')
# Make all unseen to the same class outside of classes
X[unseen_mask] = np.max(self.classes_)
_, indices = np.unique(X[encode_mask], return_inverse=True)
# Perform replacement with lookup
X[encode_mask] = np.take(self.lut_[:, 1], \
np.take(np.searchsorted(self.lut_[:, 0],
self.classes_),
indices))
if np.any(missing_mask):
X[missing_mask] = self.default_missing_
return X
def fit_transform(self, X, y):
''' Combined fit and transform
Parameters
----------
X : array-like, shape (n_samples,)
The class values. An array of int.
y : array-like, shape (n_samples,)
The target values.
Returns
-------
X : array-like, shape (n_samples,)
The encoded values. An array of float.
'''
X = column_or_1d(X, warn=True)
y = column_or_1d(y, warn=True)
missing_mask = np.isnan(X)
encode_mask = np.invert(missing_mask)
check_error_strat(missing_mask, self.missing, 'missing')
target_mean = np.mean(y)
self.default_unseen_ = strat_to_default(self.unseen,
target_mean)
self.default_missing_ = strat_to_default(self.missing,
target_mean)
self.classes_, indices, counts = np.unique(X[encode_mask],
return_inverse=True,
return_counts=True)
self.class_means_ = np.zeros_like(self.classes_, dtype='float64')
for i, c in enumerate(self.classes_):
class_mask = np.where(X == c)
if class_mask[0].shape[0] > 0:
self.class_means_[i] = np.mean(y[class_mask])
else:
self.class_means_[i] = 1.0
if self.smoothing != 0:
# class_counts x class_means + smoothing x global mean
# smooth mean = ----------------------------------------------------
# (class_counts + smoothing)
self.class_means_ = (counts * self.class_means_ + self.smoothing * target_mean) \
/ (counts + self.smoothing)
self.classes_ = np.append(self.classes_, [np.max(self.classes_) + 1])
self.class_means_ = np.append(self.class_means_, [self.default_unseen_])
self.lut_ = np.hstack([self.classes_.reshape(-1, 1),
self.class_means_.reshape(-1, 1)])
if self.class_means_.shape[0] != np.unique(self.class_means_).shape[0]:
warn('Duplicate target encoding for muliple classes. This will '
'make two or more categories indistinguishable.')
# Perform replacement with lookup
X[encode_mask] = np.take(self.lut_[:, 1], \
np.take(np.searchsorted(self.lut_[:, 0],
self.classes_),
indices))
if np.any(missing_mask):
X[missing_mask] = self.default_missing_
return X
def strat_to_default(strat, global_mean=None):
''' Choose a default value according to strategy
'''
if strat == 'global':
return global_mean
if strat == 'nan':
return np.nan
return None
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,415 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/models/stack_regressor.py | '''
-------------------------------------------------------
Stack Regressor - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_is_fitted
import numpy as np
class StackRegressor(BaseEstimator, RegressorMixin):
''' Stack Regressor
Ensemble regressor that uses one meta regressor and several sub-regressors.
The sub-regressors give their output to to the main regressor which will
use them as input features.
Parameters
----------
regs : Regressors who's output will assist the meta_reg, list regressor
meta_reg : Ensemble regressor that makes the final output, regressor
keep_features : If original input features should be used by meta_reg, bool
refit : If sub-regressors should be refit, bool
'''
def __init__(self, regs, meta_reg, keep_features=False, refit=True):
self.regs = regs
self.meta_reg = meta_reg
self.keep_features = keep_features
self.refit = refit
def fit(self, X, y):
''' Fitting of the regressor
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values. An array of int.
Returns
-------
self : object
Returns self.
'''
# Refit of regressor ensemble
if self.refit:
for reg in self.regs:
reg.fit(X, y)
# Build new tier-2 features
X_meta = build_meta_X(self.regs, X, self.keep_features)
# Fit meta regressor, Stack the ensemble
self.meta_reg.fit(X_meta, y)
self.n_features_ = X.shape[1]
self.n_meta_features_ = X_meta.shape[1]
self.n_regs = len(self.regs)
return self
def predict(self, X):
''' Regression
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of classifications, bools.
'''
check_is_fitted(self, 'n_features_')
# Build new tier-2 features
X_meta = build_meta_X(self.regs, X, self.keep_features)
return self.meta_reg.predict(X_meta)
def build_meta_X(regs, X=None, keep_features=False):
''' Build features that includes outputs of the sub-regressors
Parameters
----------
regs : Regressors who's output will assist the meta_reg, list regressor
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
X_meta : {array-like, sparse matrix}, shape (n_samples, n_features + n_regs)
The prediction input samples for the meta clf.
'''
if keep_features:
X_meta = X
else:
X_meta = None
for reg in regs:
if X_meta is None:
X_meta = reg.predict(X)
else:
y_ = reg.predict(X)
X_meta = np.hstack([X_meta, y_])
return X_meta
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,416 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/models/stack_classifier.py | '''
-------------------------------------------------------
Stack Classifier - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.utils.validation import check_is_fitted
import numpy as np
class StackClassifier(BaseEstimator, ClassifierMixin):
''' Stack Classifier
Ensemble classifier that uses one meta classifiers and several sub-classifiers.
The sub-classifiers give their output to to the meta classifier which will use
them as input features.
Parameters
----------
clfs : Classifiers who's output will assist the meta_clf, list classifier
meta_clf : Ensemble classifier that makes the final output, classifier
drop_first : Drop first class probability to avoid multi-collinearity, bool
keep_features : If original input features should be used by meta_clf, bool
refit : If sub-classifiers should be refit, bool
'''
def __init__(self, clfs, meta_clf, drop_first=True, keep_features=False, refit=True):
self.clfs = clfs
self.meta_clf = meta_clf
self.drop_first = drop_first
self.keep_features = keep_features
self.refit = refit
def fit(self, X, y):
''' Fitting of the classifier
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values. An array of int.
Returns
-------
self : object
Returns self.
'''
# Refit of classifier ensemble
if self.refit:
for clf in self.clfs:
clf.fit(X, y)
# Build new tier-2 features
X_meta = build_meta_X(self.clfs, X, self.keep_features)
# Fit meta classifer, Stack the ensemble
self.meta_clf.fit(X_meta, y)
# set attributes
self.n_features_ = X.shape[1]
self.n_meta_features_ = X_meta.shape[1]
self.n_clfs_ = len(self.clfs)
return self
def predict_proba(self, X):
''' Probability prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of probabilities, floats.
'''
check_is_fitted(self, 'n_features_')
# Build new tier-2 features
X_meta = build_meta_X(self.clfs, X, self.keep_features)
return self.meta_clf.predict_proba(X_meta)
def predict(self, X):
''' Classification
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of classifications, bools.
'''
check_is_fitted(self, 'n_features_')
# Build new tier-2 features
X_meta = build_meta_X(self.clfs, X, self.keep_features)
return self.meta_clf.predict(X_meta)
def build_meta_X(clfs, X=None, drop_first=True, keep_features=False):
''' Build features that includes outputs of the sub-classifiers
Parameters
----------
clfs : Classifiers that who's output will assist the meta_clf, list classifier
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
drop_first : Drop first proba to avoid multi-collinearity, bool
keep_features : If original input features should be used by meta_clf, bool
Returns
-------
X_meta : {array-like, sparse matrix}, shape (n_samples, n_features + n_clfs*classes)
The prediction input samples for the meta clf.
'''
if keep_features:
X_meta = X
else:
X_meta = None
for clf in clfs:
if X_meta is None:
if drop_first:
X_meta = clf.predict_proba(X)
else:
X_meta = clf.predict_proba(X)[:, 1:]
else:
if drop_first:
y_ = clf.predict_proba(X)
else:
y_ = clf.predict_proba(X)[:, 1:]
X_meta = np.hstack([X_meta, y_])
return X_meta
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,417 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/models/fold_lgbm.py | '''
-------------------------------------------------------
Fold LightGBM - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from copy import copy
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_X_y, check_array, check_is_fitted
import numpy as np
class FoldLightGBM(BaseEstimator):
''' Fold LightGBM
Meta estimator that performs cross validation over k folds on a LightGBM
estimator. Can optionally be used as an ensemble of k estimators.
Parameters
----------
lgbm : Base estimator
fold : Fold cross validation object
metric : Evaluations metric, func(y, y_)
fit_params : Parameters that should be fed to estimator during fit.
Dictionary (string -> object)
refit_full : Flag for post fit behaviour
True: Retrain one estimator on full data
False: Continue as an ensemble trained on separate folds
refit_params : Parameters that should be fed to estimator during refit.
Dictionary (string -> object)
verbose : Printing of fold scores, bool or int
'''
def __init__(self, lgbm, fold, metric, fit_params={}, refit_full=False, refit_params={}, verbose=1):
proba_metric = metric.__name__ in ['roc_auc_score']
regressor = issubclass(type(lgbm), RegressorMixin)
if proba_metric and regressor:
raise ValueError('Cannot be both a regressor and use a metric that '
'requires `predict_proba`')
if proba_metric and not hasattr(lgbm, 'predict_proba'):
raise ValueError('Metric `{}` requires a classifier that implements '
'`predict_proba`'.format(metric.__name__))
self.lgbm = lgbm
self.fit_params = fit_params
self.fold = fold
self.metric = metric
self.is_regressor_ = regressor
self.is_proba_metric_ = proba_metric
self.refit_full = refit_full
self.refit_params = refit_params
self.verbose = verbose
def fit(self, X, y):
''' Fitting of the estimator
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values.
Returns
-------
self : object
Returns self.
'''
X, y = check_X_y(X, y, accept_sparse=True, force_all_finite=False)
if not self.refit_full:
self.lgbms_ = []
self.oof_scores_ = []
if not self.is_regressor_:
self.n_classes_ = np.unique(y).shape[0]
if self.is_proba_metric_:
self.oof_y_ = np.zeros((X.shape[0], self.n_classes_),
dtype=np.float64)
else:
self.oof_y_ = np.zeros_like(y)
current_fold = 1
for fold_idx, oof_idx in self.fold.split(X, y):
X_fold, y_fold = X[fold_idx], y[fold_idx]
X_oof, y_oof = X[oof_idx], y[oof_idx]
if self.refit_full:
lgbm = self.lgbm
else:
lgbm = copy(self.lgbm)
lgbm.fit(X_fold, y_fold,
sample_weight=self.fit_params.get('sample_weight'),
init_score=self.fit_params.get('init_score'),
eval_set=(X_oof, y_oof),
eval_names=self.fit_params.get('eval_names'),
eval_sample_weight=self.fit_params.get('eval_sample_weight'),
eval_init_score=self.fit_params.get('eval_init_score'),
eval_metric=self.fit_params.get('eval_metric'),
early_stopping_rounds=self.fit_params.get('early_stopping_rounds'),
verbose=self.fit_params.get('verbose', self.verbose),
feature_name=self.fit_params.get('feature_name', 'auto'),
categorical_feature=self.fit_params.get('categorical_feature', 'auto'),
callbacks=self.fit_params.get('callbacks'),
)
if self.is_proba_metric_:
y_oof_ = lgbm.predict_proba(X_oof)
self.oof_y_[oof_idx] = y_oof_
y_oof_ = y_oof_[:, 1]
else:
y_oof_ = lgbm.predict(X_oof)
self.oof_y_[oof_idx] = y_oof_
oof_score = self.metric(y_oof, y_oof_)
self.oof_scores_.append(oof_score)
if not self.refit_full:
self.lgbms_.append(lgbm)
if self.verbose:
print('Finished fold {} with score: {:.4f}'.format(current_fold,
oof_score))
current_fold += 1
if self.refit_full:
self.lgbm.fit(X, y,
sample_weight=self.refit_params.get('sample_weight'),
init_score=self.refit_params.get('init_score'),
eval_set=self.refit_params.get('eval_set'),
eval_names=self.refit_params.get('eval_names'),
eval_sample_weight=self.refit_params.get('eval_sample_weight'),
eval_init_score=self.refit_params.get('eval_init_score'),
eval_metric=self.refit_params.get('eval_metric'),
early_stopping_rounds=self.refit_params.get('early_stopping_rounds'),
verbose=self.refit_params.get('verbose', self.verbose),
feature_name=self.refit_params.get('feature_name', 'auto'),
categorical_feature=self.refit_params.get('categorical_feature', 'auto'),
callbacks=self.refit_params.get('callbacks'),
)
if self.is_proba_metric_:
self.oof_score_ = self.metric(y, self.oof_y_[:, 1])
else:
self.oof_score_ = self.metric(y, self.oof_y_)
if self.verbose:
print('Finished with a total score of: {:.4f}'.format(self.oof_score_))
self.n_features_ = X.shape[1]
self.n_folds_ = self.fold.n_splits
return self
def predict_proba(self, X):
''' Probability prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of probabilities, floats.
'''
if not hasattr(self.lgbm, 'predict_proba'):
raise ValueError('Base estimator does not support `predict_proba`')
X = check_array(X, accept_sparse=True, force_all_finite=False)
check_is_fitted(self, 'n_features_')
if self.refit_full:
y_ = self.lgbm.predict_proba(X)
else:
y_ = np.zeros((X.shape[0], self.n_classes_), dtype=np.float64)
for lgbm in self.lgbms_:
y_ += lgbm.predict_proba(X) / self.n_folds_
return y_
def predict(self, X):
''' Prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of predictions.
'''
X = check_array(X, accept_sparse=True, force_all_finite=False)
check_is_fitted(self, 'n_features_')
if not (self.is_regressor_ or self.refit_full):
y_ = np.zeros((X.shape[0],), dtype=np.float64)
for lgbm in self.lgbms_:
y_ += lgbm.predict(X) / self.n_folds_
elif self.refit_full:
y_ = self.lgbm.predict(X)
else:
y_ = np.argmax(self.predict_proba(X), axis=1)
return y_
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,418 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/preprocessing/count_encoder.py | '''
-------------------------------------------------------
Count Encoder - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from warnings import warn
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.validation import column_or_1d, check_is_fitted
from ..preprocessing.utils import check_error_strat, is_float_array
class CountEncoder(BaseEstimator, TransformerMixin):
''' Count Encoder
Transforms categorical values to their respective value count.
Parameters
----------
unseen : Replacement strategy for unseen values, str
One of ['one', 'nan', 'error']
missing : Replacement strategy for missing values, str
One of ['one', 'nan', 'error']
'''
def __init__(self, unseen='one', missing='one'):
replace_strats = ['one', 'nan', 'error']
if unseen not in replace_strats:
raise ValueError('Value of `unseen` {} is not a valid replacement '
'strategy, {}'.format(unseen, replace_strats))
if missing not in replace_strats:
raise ValueError('Value of `missing` {} is not a valid replacement '
'strategy, {}'.format(missing, replace_strats))
self.default_unseen_ = strat_to_default(unseen)
self.default_missing_ = strat_to_default(missing)
self.unseen = unseen
self.missing = missing
def fit(self, X, y=None):
''' Fitting of the transformer
Parameters
----------
X : array-like, shape (n_samples,)
The class values. An array of int.
y : None
There is no need of a target in a transformer, yet the pipeline API
requires this parameter.
Returns
-------
self : object
Returns self.
'''
X = column_or_1d(X, warn=True)
check_error_strat(np.isnan(X), self.missing, 'missing')
self.classes_, self.counts_ = np.unique(X[np.isfinite(X)],
return_counts=True)
if self.classes_.shape[0] != np.unique(self.counts_).shape[0]:
warn('Duplicate count encoding for muliple classes. This will '
'make two or more categories indistinguishable.')
self.classes_ = np.append(self.classes_, [np.max(self.classes_) + 1])
self.counts_ = np.append(self.counts_, [self.default_unseen_])
self.lut_ = np.hstack([self.classes_.reshape(-1, 1),
self.counts_.reshape(-1, 1)])
# `fit` should always return `self`
return self
def transform(self, X):
''' Applying transformation on the data
Parameters
----------
X : array-like, shape (n_samples,)
The class values. An array of int.
Returns
-------
X : array-like, shape (n_samples,)
The count values. An array of int/float.
'''
check_is_fitted(self, 'classes_')
X = column_or_1d(X, warn=True)
missing_mask = np.isnan(X)
encode_mask = np.invert(missing_mask)
unseen_mask = np.bitwise_xor(np.isin(X, self.classes_, invert=True),
missing_mask)
check_error_strat(missing_mask, self.missing, 'missing')
check_error_strat(unseen_mask, self.unseen, 'unseen')
# Make all unseen to the same class outside of classes
X[unseen_mask] = np.max(self.classes_)
_, indices = np.unique(X[encode_mask], return_inverse=True)
# Perform replacement with lookup
X[encode_mask] = np.take(self.lut_[:, 1], \
np.take(np.searchsorted(self.lut_[:, 0],
self.classes_),
indices))
if np.any(missing_mask):
X[missing_mask] = self.default_missing_
# Cast as int if possible
if is_float_array(X) and np.all(np.isfinite(X)):
X = X.astype('int64')
return X
def strat_to_default(strat):
''' Choose a default value according to strategy
'''
if strat == 'one':
return 1
if strat == 'nan':
return np.nan
return None
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,419 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/utils/__init__.py | '''
-------------------------------------------------------
Utils - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from .utils import compress_dataframe
__all__ = ['compress_dataframe']
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,420 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/preprocessing/__init__.py | '''
-------------------------------------------------------
Preprocessing - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from .category_encoder import CategoryEncoder
from .multi_column_encoder import MultiColumnEncoder
from .target_encoder import TargetEncoder
from .count_encoder import CountEncoder
__all__ = ['CategoryEncoder',
'MultiColumnEncoder',
'TargetEncoder',
'CountEncoder']
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,421 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/__init__.py | '''
-------------------------------------------------------
extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
__all__ = ['models',
'preprocessing',
'utils'] | {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,422 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/models/__init__.py | '''
-------------------------------------------------------
Models - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from .fold_estimator import FoldEstimator
from .fold_lgbm import FoldLightGBM
from .stack_classifier import StackClassifier
from .stack_regressor import StackRegressor
__all__ = ['FoldEstimator',
'FoldLightGBM',
'StackClassifier',
'StackRegressor']
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,423 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/preprocessing/utils.py | '''
-------------------------------------------------------
Preprocessing Utils - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
import numpy as np
def is_object_array(X):
''' Check if an array is an object array
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
truth : bool
'''
return X.dtype.type is np.object_
def is_float_array(X):
''' Check if an array is a float array
One of [float16, float32, float64]
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
truth : bool
'''
return X.dtype.type in [np.float16, np.float32, np.float64]
def check_error_strat(mask, strat, name):
''' Check error strategies for encoders that can allow missing and
unseen values.
Parameters
----------
masks : array-like, indices or bool mask relevant to strategy
strat : strategy for handling occurances in mask, string
name : name of error check instance, string
'''
if strat == 'error' and np.any(mask):
indices = list(np.where(mask)[0])
raise ValueError('Error value found at index {}. Aborting '
'according to {} strategy.'.format(indices, name))
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,424 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/models/fold_estimator.py | '''
-------------------------------------------------------
Fold Estimator - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from copy import copy
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_X_y, check_array, check_is_fitted
import numpy as np
class FoldEstimator(BaseEstimator):
''' Fold Estimator
Meta estimator that performs cross validation over k folds. Can optionally
be used as an ensemble of k estimators.
Parameters
----------
est : Base estimator
fold : Fold cross validation object
metric : Evaluations metric, func(y, y_)
refit_full : Flag for post fit behaviour
True: Retrain one estimator on full data
False: Continue as an ensemble trained on separate folds
verbose : Printing of fold scores, bool or int
'''
def __init__(self, est, fold, metric, refit_full=False, verbose=1):
proba_metric = metric.__name__ in ['roc_auc_score']
regressor = issubclass(type(est), RegressorMixin)
if proba_metric and regressor:
raise ValueError('Cannot be both a regressor and use a metric that '
'requires `predict_proba`')
if proba_metric and not hasattr(est, 'predict_proba'):
raise ValueError('Metric `{}` requires a classifier that implements '
'`predict_proba`'.format(metric.__name__))
self.est = est
self.fold = fold
self.metric = metric
self.is_regressor_ = regressor
self.is_proba_metric_ = proba_metric
self.refit_full = refit_full
self.verbose = verbose
def fit(self, X, y):
''' Fitting of the estimator
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values.
Returns
-------
self : object
Returns self.
'''
X, y = check_X_y(X, y, accept_sparse=True)
if not self.refit_full:
self.ests_ = []
self.oof_scores_ = []
if not self.is_regressor_:
self.n_classes_ = np.unique(y).shape[0]
if self.is_proba_metric_:
self.oof_y_ = np.zeros((X.shape[0], self.n_classes_),
dtype=np.float64)
else:
self.oof_y_ = np.zeros_like(y)
current_fold = 1
for fold_idx, oof_idx in self.fold.split(X, y):
X_fold, y_fold = X[fold_idx], y[fold_idx]
X_oof, y_oof = X[oof_idx], y[oof_idx]
if self.refit_full:
est = self.est
else:
est = copy(self.est)
est.fit(X_fold, y_fold)
if self.is_proba_metric_:
y_oof_ = est.predict_proba(X_oof)
self.oof_y_[oof_idx] = y_oof_
y_oof_ = y_oof_[:, 1]
else:
y_oof_ = est.predict(X_oof)
self.oof_y_[oof_idx] = y_oof_
oof_score = self.metric(y_oof, y_oof_)
self.oof_scores_.append(oof_score)
if not self.refit_full:
self.ests_.append(est)
if self.verbose:
print('Finished fold {} with score: {:.4f}'.format(current_fold,
oof_score))
current_fold += 1
if self.refit_full:
self.est.fit(X, y)
if self.is_proba_metric_:
self.oof_score_ = self.metric(y, self.oof_y_[:, 1])
else:
self.oof_score_ = self.metric(y, self.oof_y_)
if self.verbose:
print('Finished with a total score of: {:.4f}'.format(self.oof_score_))
self.n_features_ = X.shape[1]
self.n_folds_ = self.fold.n_splits
return self
def predict_proba(self, X):
''' Probability prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of probabilities, floats.
'''
if not hasattr(self.est, 'predict_proba'):
raise ValueError('Base estimator does not support `predict_proba`')
X = check_array(X, accept_sparse=True)
check_is_fitted(self, 'n_features_')
if self.refit_full:
y_ = self.est.predict_proba(X)
else:
y_ = np.zeros((X.shape[0], self.n_classes_), dtype=np.float64)
for est in self.ests_:
y_ += est.predict_proba(X) / self.n_folds_
return y_
def predict(self, X):
''' Prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of predictions.
'''
if not (self.is_regressor_ or self.refit_full) and \
not hasattr(self.est, 'predict_proba'):
raise ValueError('Can only ensemble classifiers that implement '
'`predict_proba`')
X = check_array(X, accept_sparse=True)
check_is_fitted(self, 'n_features_')
if not (self.is_regressor_ or self.refit_full):
y_ = np.zeros((X.shape[0],), dtype=np.float64)
for est in self.ests_:
y_ += est.predict(X) / self.n_folds_
elif self.refit_full:
y_ = self.est.predict(X)
else:
y_ = np.argmax(self.predict_proba(X), axis=1)
return y_
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,425 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/models/fold_xgb.py | '''
-------------------------------------------------------
Fold XGBoost - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from copy import copy
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_X_y, check_array, check_is_fitted
import numpy as np
class FoldXGBoost(BaseEstimator):
''' Fold XGBoost
Meta estimator that performs cross validation over k folds on a XGBoost
estimator. Can optionally be used as an ensemble of k estimators.
Parameters
----------
xgb : Base estimator
fold : Fold cross validation object
metric : Evaluations metric, func(y, y_)
fit_params : Parameters that should be fed to estimator during fit.
Dictionary (string -> object)
refit_full : Flag for post fit behaviour
True: Retrain one estimator on full data
False: Continue as an ensemble trained on separate folds
refit_params : Parameters that should be fed to estimator during refit.
Dictionary (string -> object)
verbose : Printing of fold scores, bool or int
'''
def __init__(self, xgb, fold, metric, fit_params={}, refit_full=False, refit_params={}, verbose=1):
is_proba_metric = metric.__name__ in ['roc_auc_score']
is_regressor = issubclass(type(xgb), RegressorMixin)
if is_proba_metric and is_regressor:
raise ValueError('Cannot be both a regressor and use a metric that '
'requires `predict_proba`')
if is_proba_metric and not hasattr(xgb, 'predict_proba'):
raise ValueError('Metric `{}` requires a classifier that implements '
'`predict_proba`'.format(metric.__name__))
self.xgb = xgb
self.fit_params = fit_params
self.fold = fold
self.metric = metric
self.is_regressor_ = is_regressor
self.is_proba_metric_ = is_proba_metric
self.refit_full = refit_full
self.refit_params = refit_params
self.verbose = verbose
def fit(self, X, y):
''' Fitting of the estimator
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values.
Returns
-------
self : object
Returns self.
'''
X, y = check_X_y(X, y, accept_sparse=True, force_all_finite=False)
if not self.refit_full:
self.xgbs_ = []
self.oof_scores_ = []
if not self.is_regressor_:
self.n_classes_ = np.unique(y).shape[0]
if self.is_proba_metric_:
self.oof_y_ = np.zeros((X.shape[0], self.n_classes_),
dtype=np.float64)
else:
self.oof_y_ = np.zeros_like(y)
current_fold = 1
for fold_idx, oof_idx in self.fold.split(X, y):
X_fold, y_fold = X[fold_idx], y[fold_idx]
X_oof, y_oof = X[oof_idx], y[oof_idx]
if self.refit_full:
xgb = self.xgb
else:
xgb = copy(self.xgb)
xgb.fit(X_fold, y_fold,
sample_weight=self.fit_params.get('sample_weight'),
eval_set=[(X_oof, y_oof)],
eval_metric=self.fit_params.get('eval_metric'),
early_stopping_rounds=self.fit_params.get('early_stopping_rounds'),
verbose=self.fit_params.get('verbose', self.verbose),
xgb_model=self.fit_params.get('xgb_model'),
sample_weight_eval_set=self.fit_params.get('sample_weight_eval_set'),
callbacks=self.fit_params.get('callbacks')
)
if self.is_proba_metric_:
y_oof_ = xgb.predict_proba(X_oof)
self.oof_y_[oof_idx] = y_oof_
y_oof_ = y_oof_[:, 1]
else:
y_oof_ = xgb.predict(X_oof)
self.oof_y_[oof_idx] = y_oof_
oof_score = self.metric(y_oof, y_oof_)
self.oof_scores_.append(oof_score)
if not self.refit_full:
self.xgbs_.append(xgb)
if self.verbose:
print('Finished fold {} with score: {:.4f}'.format(current_fold,
oof_score))
current_fold += 1
if self.refit_full:
xgb.fit(X_fold, y_fold,
sample_weight=self.refit_params.get('sample_weight'),
eval_set=self.refit_params.get('eval_set'),
eval_metric=self.refit_params.get('eval_metric'),
early_stopping_rounds=self.refit_params.get('early_stopping_rounds'),
verbose=self.refit_params.get('verbose', self.verbose),
xgb_model=self.refit_params.get('xgb_model'),
sample_weight_eval_set=self.refit_params.get('sample_weight_eval_set'),
callbacks=self.refit_params.get('callbacks')
)
if self.is_proba_metric_:
self.oof_score_ = self.metric(y, self.oof_y_[:, 1])
else:
self.oof_score_ = self.metric(y, self.oof_y_)
if self.verbose:
print('Finished with a total score of: {:.4f}'.format(self.oof_score_))
self.n_features_ = X.shape[1]
self.n_folds_ = self.fold.n_splits
return self
def predict_proba(self, X):
''' Probability prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of probabilities, floats.
'''
if not hasattr(self.xgb, 'predict_proba'):
raise ValueError('Base estimator does not support `predict_proba`')
X = check_array(X, accept_sparse=True, force_all_finite=False)
check_is_fitted(self, 'n_features_')
if self.refit_full:
y_ = self.xgb.predict_proba(X)
else:
y_ = np.zeros((X.shape[0], self.n_classes_), dtype=np.float64)
for xgb in self.xgbs_:
y_ += xgb.predict_proba(X) / self.n_folds_
return y_
def predict(self, X):
''' Prediction
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The prediction input samples.
Returns
-------
y : ndarray, shape (n_samples,)
Returns an array of predictions.
'''
X = check_array(X, accept_sparse=True, force_all_finite=False)
check_is_fitted(self, 'n_features_')
if not (self.is_regressor_ or self.refit_full):
y_ = np.zeros((X.shape[0],), dtype=np.float64)
for xgb in self.xgbs_:
y_ += xgb.predict(X) / self.n_folds_
elif self.refit_full:
y_ = self.xgb.predict(X)
else:
y_ = np.argmax(self.predict_proba(X), axis=1)
return y_
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,426 | simon-larsson/extrakit-learn | refs/heads/master | /xklearn/preprocessing/multi_column_encoder.py | '''
-------------------------------------------------------
Multi-Column Encoder - extrakit-learn
Author: Simon Larsson <larssonsimon0@gmail.com>
License: MIT
-------------------------------------------------------
'''
from copy import copy
import numpy as np
from sklearn.utils.validation import check_array, check_is_fitted
from sklearn.base import BaseEstimator, TransformerMixin
class MultiColumnEncoder(BaseEstimator, TransformerMixin):
''' Multi-Column Encoder
Use column encoders, such as sklearn's LabelEncoder, on multiple columns.
Parameters
----------
enc : Base encoder that should be used on columns
columns : Indices or mask to select columns for encoding, list-like
`columns=None` encodes all columns.
'''
def __init__(self, enc, columns=None):
self.enc = enc
self.columns = columns
def fit(self, X, y=None):
''' Fitting of the transformer
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
y : None
There is no need of a target in a transformer, yet the pipeline API
requires this parameter.
Returns
-------
self : object
Returns self.
'''
X = check_array(X, accept_sparse=True)
self.columns = to_column_indices(X, self.columns)
self.encs_ = []
if self.columns is None:
for col in X.T:
enc = copy(self.enc)
enc.fit(col, y)
self.encs_.append(enc)
elif len(self.columns) > 0:
for col in X[:, self.columns].T:
enc = copy(self.enc)
enc.fit(col, y)
self.encs_.append(enc)
self.n_features_ = X.shape[1]
self.n_encoders_ = len(self.encs_)
# `fit` should always return `self`
return self
def transform(self, X):
''' Applying transformation on the data
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The class values.
Returns
-------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The encoded values.
'''
X = check_array(X, accept_sparse=True)
check_is_fitted(self, 'n_encoders_')
if self.columns is None:
for i, col in enumerate(X.T):
enc = self.encs_[i]
X[:, i] = enc.transform(col)
else:
for i, col_idx in enumerate(self.columns):
enc = self.encs_[i]
X[:, col_idx] = enc.transform(X[:, col_idx])
return X
def fit_transform(self, X, y=None, **fit_params):
''' Combined fit and transform
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The class values.
y : None
There is no need of a target in a transformer, yet the pipeline API
requires this parameter.
fit_params : Parameters that should be fed to base encuder during fit.
Dictionary (string -> object)
Returns
-------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The encoded values.
'''
X = check_array(X, accept_sparse=True)
self.columns = to_column_indices(X, self.columns)
self.encs_ = []
if self.columns is None:
for i, col in enumerate(X.T):
enc = copy(self.enc)
X[:, i] = enc.fit_transform(col, y, fit_params)
self.encs_.append(enc)
else:
for col_idx in self.columns:
enc = copy(self.enc)
X[:, col_idx] = enc.fit_transform(X[:, col_idx], y, fit_params)
self.encs_.append(enc)
self.n_features_ = X.shape[1]
self.n_encoders_ = len(self.encs_)
return X
def to_column_indices(X, columns):
''' Unify index masks and bool masks into bool masks
'''
if columns is None:
return None
columns = np.array(columns).reshape(-1)
if X.shape[1] == columns.shape[0]:
return np.where(columns)[0]
return columns
| {"/xklearn/preprocessing/category_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/target_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/count_encoder.py": ["/xklearn/preprocessing/utils.py"], "/xklearn/preprocessing/__init__.py": ["/xklearn/preprocessing/category_encoder.py", "/xklearn/preprocessing/multi_column_encoder.py", "/xklearn/preprocessing/target_encoder.py", "/xklearn/preprocessing/count_encoder.py"], "/xklearn/models/__init__.py": ["/xklearn/models/fold_estimator.py", "/xklearn/models/fold_lgbm.py", "/xklearn/models/stack_classifier.py", "/xklearn/models/stack_regressor.py"]} |
64,428 | tcmoore3/ObitScraper | refs/heads/master | /master.py | # Python 2.7
import csv
import datetime
import random
import sys
import time
import pandas as pd
import requests
import obitscrape
start_time = datetime.datetime.now()
avoid_block_seconds = 2
discharge_date_cushion = -14
input_file = sys.argv[1]
output_file = sys.argv[2]
overwrite_output = sys.argv[3].lower() in ['true', '1', 't', 'y', 'yes']
previous_global_ids = []
skipped_lookups = 0
successful_lookups = 0
complete_lookups = 0
df = pd.read_csv(input_file)
pats = df.to_dict('records')
spaces_between_output_headers = 3
console_results_headers = ['Success', 'Complete', 'Skipped', 'Remaining', 'Total',
'Success Rate', 'Completion Rate', 'Time Elapsed']
if overwrite_output:
obitscrape.csvOutput(output_file, 'wb', 'header')
try:
with open(output_file, 'rb') as csvfile:
reader = csv.reader(csvfile)
reader.next()
for row in reader:
previous_global_ids.append(row[0])
except:
previous_global_ids = []
obitscrape.csvOutput(output_file, 'wb', 'header')
obitscrape.printResults('header', console_results_headers, spaces_between_output_headers)
for row in pats:
prior_id = str(row['global_member_id']) in previous_global_ids
if not overwrite_output and prior_id:
complete_lookups += 1
skipped_lookups += 1
continue
results = obitscrape.findDeathDate(row['global_member_id'], row['first_name'], row['last_name'], row['person_birth_date'], discharge_date_cushion, row['discharge_date'])
avoid_block_seconds += random.uniform(-0.5, 0.5)
delay_until= datetime.datetime.now() + datetime.timedelta(seconds=avoid_block_seconds)
obitscrape.csvOutput(output_file, 'a', 'results', results)
complete_lookups += 1
if results['death_date'] != '':
successful_lookups += 1
try:
succ_pct = str(int(float(successful_lookups) / float(complete_lookups - skipped_lookups) * 100)) + '%'
except ZeroDivisionError:
succ_pct = '0%'
rslt = [str(successful_lookups),
str(complete_lookups),
str(skipped_lookups),
str(len(df) - complete_lookups),
str(len(df)),
succ_pct,
str(int(float(complete_lookups) / float(len(df)) * 100)) + '%',
str(datetime.datetime.now() - start_time)]
obitscrape.printResults('results', console_results_headers, spaces_between_output_headers, start_time, rslt)
while datetime.datetime.now() < delay_until:
time.sleep(0.25)
else:
continue
| {"/master.py": ["/obitscrape.py"]} |
64,429 | tcmoore3/ObitScraper | refs/heads/master | /obitscrape.py | # Python 2.7
import csv
import datetime
import json
import re
import requests
def findDeathDate(global_member_id, first_name, last_name, dob, discharge_date_cushion, discharge_date='NULL', state=''):
url = ('http://www.tributes.com/search/obituaries/?solr=&first=' + first_name + '&last=' + last_name
+ '&city=&state=' + state + '&search_type=Range+2010-Now&dod=&keywords=')
usr_agent = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36'}
r = requests.get(url, headers=usr_agent).text
u = re.search('item_ids = (.+?);', r)
jdata = json.loads(u.group(1))
death_date = ''
tributes_id = ''
tributes_url = ''
match_count = 0
results = {'global_member_id': global_member_id, 'first_name': first_name, 'last_name': last_name, 'death_date': death_date,
'time_generated': datetime.datetime.now(), 'match_count': match_count, 'tributes_id': tributes_id, 'tributes_url': tributes_url}
year, month, day = discharge_date.split('-')
discharge_date = datetime.date(int(year), int(month), int(day))
loose_discharge_date = discharge_date + datetime.timedelta(days=discharge_date_cushion)
for row in jdata:
year, month, day = row['dod'].split('/')
obit_death_date = datetime.date(int(year), int(month), int(day))
match_first_name = row['first_name'] == first_name.lower()
match_last_name = row['last_name'] == last_name.lower()
match_dob = row['dob'] == dob.replace('-', '/')
match_dod = obit_death_date >= loose_discharge_date
if match_first_name and match_last_name and match_dob and match_dod:
match_count += 1
results['death_date'] = obit_death_date
results['match_count'] = match_count
search_id = row['id']
real_id = re.search('search_item_' + str(search_id) + '(.*?) class="serif', r, re.DOTALL).group(1)
real_id = re.search('/obituary/show/[^0-9]*?-([0-9]*?)"', real_id).group(1)
try:
results['tributes_id'] = real_id
except:
print 'Parser broke when looking for ID for ' + first_name + ' ' + last_name + '. DOB = ' + str(dob) + '. Global = ' + str(global_member_id)
results['tributes_id'] = 'ERROR: PARSER FAIL'
results['tributes_url'] = 'ERORR: PARSER FAIL. NEEDS MANUAL LOOKUP'
continue
results['tributes_url'] = 'http://www.tributes.com/obituary/show/' + real_id
return results
def csvOutput(output_file, file_mode, write_mode, results=''):
with open(output_file, file_mode) as f:
writer = csv.DictWriter(f, lineterminator='\n', fieldnames=['global_member_id', 'first_name', 'last_name', 'death_date', 'time_generated',
'match_count', 'tributes_id', 'tributes_url'])
if write_mode == 'header':
writer.writeheader()
if write_mode == 'results':
writer.writerow(results)
def printResults(write_mode, console_results_headers, spaces_between_output_headers, start_time=None, rslt=None):
hdrs = console_results_headers[:]
if write_mode == 'header':
print ''
for idx, val in enumerate(hdrs):
hdrs[idx] = [val, len(val)]
for h in hdrs:
if h == hdrs[len(hdrs) - 1]:
print h[0]
else:
print h[0] + ' ' * spaces_between_output_headers,
if write_mode == 'results':
for idx, val in enumerate(hdrs):
hdrs[idx] = [val, len(val), rslt[idx]]
for h in hdrs:
if h == hdrs[len(hdrs) - 1]:
print str(datetime.datetime.now() - start_time)
else:
print str(h[2]) + ' ' * (spaces_between_output_headers + h[1] - len(str(h[2]))),
| {"/master.py": ["/obitscrape.py"]} |
64,465 | Daniil-Kost/url_manager | refs/heads/development | /url_app/tests.py | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.test import APIClient
from url_manager.urls import api_version
from url_app.models import Url
from .test_fixtures.mock_for_tests import URLS, post_success_data
from url_app import util
# Create your tests here.
class ApiResourcesTests(TestCase):
def setUp(self):
self.client = APIClient()
for url in URLS:
record = Url.objects.create(url=url[0], short_url=url[1], title=" ")
record.save()
password = "TestPass!2222"
user = User.objects.create_user(username="test_user", password=password)
user.save()
self.urls = Url.objects.all()
for url in self.urls:
util.save_user_urls(user, url)
data = {"username": user.username, "password": password}
response = self.client.post(f"/api-token-auth/", data, format='json')
token = response.data["token"]
self.headers = {"Content-Type": "application/json", "Authorization": f"Token {token}"}
self.client.login(username=user.username, password=password)
self.client.credentials(HTTP_AUTHORIZATION=f'Token {token}')
def test_get_all_urls_for_user_success(self):
response = self.client.get(f"/{api_version}urls", headers=self.headers)
data = list(response.data)
self.assertEqual(3, len(data))
self.assertEqual(200, response.status_code)
self.assertIn("short_url", data[0])
def test_get_all_urls_for_user_failed_with_invalid_token(self):
self.client.credentials(HTTP_AUTHORIZATION=f'Token dnsjfdfjbfjdsbdfbjdbf')
response = self.client.get(f"/{api_version}urls", headers=self.headers)
self.assertEqual(401, response.status_code)
def test_post_new_url_success(self):
response = self.client.post(f"/{api_version}urls", post_success_data, format='json')
uuid = response.data["uuid"]
get_response = self.client.get(f"/{api_version}urls", headers=self.headers)
user_uuids_list = [resp['uuid'] for resp in get_response.data]
self.assertEqual(201, response.status_code)
self.assertIn("short_url", response.data)
self.assertIn("uuid", response.data)
self.assertIn("title", response.data)
self.assertIn(uuid, user_uuids_list)
def test_post_new_url_failed_with_invalid_data(self):
response = self.client.post(f"/{api_version}urls", {}, format='json')
self.assertEqual(400, response.status_code)
self.assertIn("error", response.data)
self.assertEqual(response.data["error"], "Invalid data. Please define 'url' in your request data.")
def test_get_url_by_uuid_success(self):
uuid = self.urls[0].uuid
response = self.client.get(f"/{api_version}urls/{uuid}", headers=self.headers)
data = dict(response.data)
self.assertEqual(200, response.status_code)
self.assertEqual(data["uuid"], str(uuid))
self.assertIn("short_url", data)
self.assertIn("title", data)
self.assertIn("url", data)
def test_delete_url_success(self):
uuid = self.urls[0].uuid
response = self.client.delete(f"/{api_version}urls/{uuid}", headers=self.headers)
get_response = self.client.get(f"/{api_version}urls/{uuid}", headers=self.headers)
self.assertEqual(204, response.status_code)
self.assertEqual(404, get_response.status_code)
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,466 | Daniil-Kost/url_manager | refs/heads/development | /url_app/views.py | from crispy_forms.layout import Submit
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import FormActions
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django import forms
from django.contrib.auth import login
from django.views.generic import UpdateView, DeleteView
from django.forms import ModelForm
from django.urls import reverse, reverse_lazy
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import redirect
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from url_app.models import Url
from url_app import util
from url_app.serializer import UrlSerializer
from .tokens import account_activation_token
def account_activation_sent(request):
return render(request, 'pages/account_activation_sent.html')
def activate(request, uidb64, token):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = User.objects.get(pk=uid)
except (TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
user.is_active = True
user.profile.email_confirmed = True
user.save()
login(request, user)
return redirect('home')
else:
return render(request, 'pages/account_activation_invalid.html')
def signup(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save()
user.refresh_from_db() # load the profile instance created by the signal
user.profile.name = form.cleaned_data.get('username')
user.is_active = False
user.save()
current_site = get_current_site(request)
subject = 'Activate Your MySite Account'
message = render_to_string('pages/account_activation_email.html', {
'user': user,
'domain': current_site.domain,
'uuid': urlsafe_base64_encode(force_bytes(user.pk)).decode(),
'token': account_activation_token.make_token(user),
})
user.email_user(subject, message)
return redirect('account_activation_sent')
else:
form = SignUpForm()
return render(request, 'pages/signup.html', {'form': form})
@login_required(login_url='/login/')
def url_get_add(request):
user = request.user
urls = Url.objects.filter(profile=user.profile)
page = request.GET.get('page')
if request.method == "POST":
if request.POST.get('create_url') is not None:
request_data = {'url': request.POST.get('url'), 'short_url': request.POST.get('short_url')}
data, errors = util.prepare_url_data(request_data)
if not errors:
url = Url(url=data["url"],
title=data["title"],
short_url=data["short_url"])
url.save()
util.save_user_urls(user, url)
return HttpResponseRedirect(
'%s?status_message=Url successfully added!' %
reverse('home'))
else:
my_urls = util.paginate(urls, page, 5)
return render(request, 'pages/main.html',
{'my_urls': my_urls,
'errors': errors})
else:
my_urls = util.paginate(urls, page, 5)
return render(request, 'pages/main.html',
{'my_urls': my_urls})
class UrlUpdateForm(ModelForm):
class Meta:
model = Url
exclude = ("slug",)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_action = reverse('url_edit',
kwargs={'pk': kwargs['instance'].id})
self.helper.form_method = 'POST'
self.helper.form_class = 'form-horizontal'
self.helper.help_text_inline = True
self.helper.html5_required = True
self.helper.label_class = 'col-sm-4 control label'
self.helper.field_class = 'col-sm-8'
self.helper.layout.append(FormActions(
Submit('add_button', 'Save',
css_class="btn save btn-primary"),
Submit('cancel_button', 'Cancel',
css_class="btn cancel btn-danger"), ))
self.fields['domain'].widget.attrs = {'disabled': 'disabled'}
class UrlUpdateView(UpdateView):
"""docstring for UrlUpdateView"""
model = Url
template_name = 'pages/url_edit.html'
form_class = UrlUpdateForm
success_url = '/'
success_message = "Url updated successfully !"
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect(reverse('home'))
else:
return super().post(request, *args, **kwargs)
class UrlDeleteView(DeleteView):
"""docstring for UrlDeleteView"""
model = Url
template_name = 'pages/url_delete.html'
success_url = reverse_lazy('home')
success_message = "Url successfully deleted !"
def delete(self, request, *args, **kwargs):
return super().delete(request, *args, **kwargs)
class UrlRedirectView(UpdateView):
"""docstring for UrlRedirectView"""
model = Url
template_name = 'pages/base.html'
exclude = ("",)
def get(self, request, **kwargs):
path = request.path
new_path = path[1:len(path) - 1]
obj = Url.objects.get(short_url=new_path)
obj.clicks += 1
obj.save()
return redirect(obj.url)
class UrlList(APIView):
"""
List of all data with urls, or create a new short url.
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
user = request.user
urls = Url.objects.filter(profile=user.profile)
serializer = UrlSerializer(urls, many=True)
return Response(serializer.data)
def post(self, request):
if not request.data.get("url"):
return Response({"error": "Invalid data. Please define 'url' in your request data."},
status=status.HTTP_400_BAD_REQUEST)
data, errors = util.prepare_url_data(request.data)
serializer = UrlSerializer(data=data)
if serializer.is_valid():
serializer.save()
url_uuid = dict(serializer.data)["uuid"]
url = Url.objects.get(uuid=url_uuid)
util.save_user_urls(request.user, url)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UrlDetail(APIView):
"""Get or delete selected data by id"""
permission_classes = (IsAuthenticated,)
def _get_object(self, uuid):
try:
return Url.objects.get(uuid=uuid)
except ObjectDoesNotExist:
raise Http404
def get(self, request, uuid):
url = self._get_object(uuid)
serializer = UrlSerializer(url)
return Response(serializer.data)
def delete(self, request, uuid):
url = self._get_object(uuid)
url.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class SignUpForm(UserCreationForm):
email = forms.EmailField(max_length=254, help_text='Required. Inform a valid email address.')
class Meta:
model = User
fields = ('username', 'email', 'password1', 'password2',)
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,467 | Daniil-Kost/url_manager | refs/heads/development | /url_manager/urls.py | """url_manager URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.urls import path
from django.contrib.auth import views as auth_views
from django.contrib import admin
from rest_framework.authtoken.views import obtain_auth_token
from url_app.views import (
url_get_add,
signup,
account_activation_sent,
activate,
UrlUpdateView,
UrlDeleteView,
UrlRedirectView,
UrlList,
UrlDetail,
)
api_version = "api/v1/"
urlpatterns = [
path('admin/', admin.site.urls),
path('', url_get_add, name='home'),
path('url/<int:pk>/edit/', UrlUpdateView.as_view(), name='url_edit'),
path('url/<int:pk>/delete/', UrlDeleteView.as_view(), name='url_delete'),
path(f'{api_version}urls', UrlList.as_view(), name='api_url_list'),
path(f'{api_version}urls/<uuid:uuid>', UrlDetail.as_view()),
path('signup/', signup, name='signup'),
path('login/', auth_views.LoginView.as_view(), name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('api-token-auth/', obtain_auth_token, name='api_token_auth'),
path('api-auth/', include('rest_framework.urls')),
path('account/account_activation_sent/', account_activation_sent, name='account_activation_sent'),
url(r'^(?P<slug>[-\w]+)/$', UrlRedirectView.as_view(),
name='url_redirect'),
url(r'^account/activate/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
activate, name='activate'),
]
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,468 | Daniil-Kost/url_manager | refs/heads/development | /url_app/test_fixtures/mock_for_tests.py | from url_manager.settings import DEFAULT_DOMAIN
from url_app import util
URLS = [
("http://devacademy.ru/posts/ochered-soobschenij-i-asinhronnyie-zadachi-s-pomoschyu-celery-i-rabbitmq/",
f"{DEFAULT_DOMAIN}{util.short_url_generator()}"),
("https://docs.djangoproject.com/en/2.1/ref/models/querysets/",
f"{DEFAULT_DOMAIN}{util.short_url_generator()}"),
("https://stackoverflow.com/questions/9943504/right-to-left-string-replace-in-python",
f"{DEFAULT_DOMAIN}{util.short_url_generator()}")
]
post_success_data = {"url": "https://www.django-rest-framework.org/tutorial/3-class-based-views/"}
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,469 | Daniil-Kost/url_manager | refs/heads/development | /url_app/admin.py | from django.contrib import admin
from url_app.models import Url, Profile
# Register your models here.
admin.site.register(Url)
admin.site.register(Profile)
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,470 | Daniil-Kost/url_manager | refs/heads/development | /url_app/serializer.py | from rest_framework import serializers
from url_app.models import Url
class UrlSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Url
fields = ('uuid', 'url', 'title', 'short_url', 'clicks', 'create_dttm')
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,471 | Daniil-Kost/url_manager | refs/heads/development | /url_app/util.py | import shortuuid
from bs4 import BeautifulSoup
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from urllib.request import urlopen
from urllib.error import HTTPError
from url_manager.settings import DEFAULT_DOMAIN
# generate random short url
def short_url_generator():
return f"{shortuuid.uuid()[0:8]}"
# function for getting title in <h1> tag
def get_title(url):
try:
html = urlopen(url)
except HTTPError:
print("This web-page: " + url + " is not defined.")
return ""
try:
soup = BeautifulSoup(html.read(), "html.parser")
title = soup.find('h1').getText()
except AttributeError:
print("Tag was not found")
return ""
return title
def paginate(obj, current_page, pages):
paginator = Paginator(obj, pages)
try:
result = paginator.page(current_page)
except PageNotAnInteger:
result = paginator.page(1)
except EmptyPage:
result = paginator.page(paginator.num_pages)
return result
def save_user_urls(user, url):
user_urls = list(user.profile.urls.all())
user_urls.append(url)
user.profile.urls.set(user_urls)
user.save()
def prepare_url_data(data):
val = URLValidator()
errors = {}
try:
val(data["url"])
title = get_title(data["url"])
data["title"] = title
except ValidationError:
data['url'] = u"Your long URL is invalid"
data["title"] = ""
if data.get("short_url"):
if 4 > len(data["short_url"]) or len(data["short_url"]) > 8:
errors['short_url'] = "Short URL will be at least" \
" 4 chars and max 8 chars"
if not data.get("short_url") or data.get("short_url") == "":
data["short_url"] = f'{DEFAULT_DOMAIN}{short_url_generator()}'
return data, errors
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,472 | Daniil-Kost/url_manager | refs/heads/development | /url_app/models.py | import uuid
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from url_manager.settings import DEFAULT_DOMAIN
class Profile(models.Model):
"""To keep extra user data"""
user = models.OneToOneField(User, on_delete=models.CASCADE)
class Meta:
verbose_name = "User Profile"
verbose_name_plural = "Users Profiles"
name = models.CharField(
max_length=256,
blank=True,
verbose_name="User Name")
email_confirmed = models.BooleanField(default=False)
urls = models.ManyToManyField('Url',
verbose_name="Urls",
blank=True,)
def __str__(self):
return self.name
@receiver(post_save, sender=User)
def update_user_profile(sender, instance, created, **kwargs):
if created:
p = Profile.objects.create(user=instance, name=instance.username)
p.save()
instance.profile.save()
class Url(models.Model):
"""Url Model"""
class Meta:
"""docstring for Meta"""
verbose_name = "Url"
verbose_name_plural = "Urls"
uuid = models.UUIDField(
verbose_name="UUID",
default=uuid.uuid4,
editable=False,
unique=True)
url = models.URLField(
unique=False,
verbose_name="URL",
blank=False)
title = models.CharField(
max_length=256,
blank=True,
verbose_name="Text")
domain = models.CharField(
max_length=96,
blank=True,
verbose_name="Domain",
default=DEFAULT_DOMAIN)
short_url = models.CharField(
max_length=256,
blank=True,
unique=True,
verbose_name="Short URL")
slug = models.SlugField(
unique=False,
verbose_name="Slug",
default=short_url)
clicks = models.IntegerField(
blank=True,
verbose_name="Clicks",
default=0)
create_dttm = models.DateTimeField(
verbose_name="Created",
blank=True,
default=datetime.now)
def __str__(self):
return f"URL: {self.url}. Short url: {self.short_url}"
| {"/url_app/tests.py": ["/url_manager/urls.py", "/url_app/models.py", "/url_app/test_fixtures/mock_for_tests.py"], "/url_app/views.py": ["/url_app/models.py", "/url_app/serializer.py"], "/url_manager/urls.py": ["/url_app/views.py"], "/url_app/admin.py": ["/url_app/models.py"], "/url_app/serializer.py": ["/url_app/models.py"]} |
64,473 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise8/Tools.py | def find_result(bot_a, bot_b):
result = ""
if bot_a == bot_b:
result = "tie"
elif bot_a == "rock" and bot_b == "paper":
result = "botB"
elif bot_a == "rock" and bot_b == "scissors":
result = "botA"
elif bot_b == "rock" and bot_a == "paper":
result = "botA"
elif bot_b == "rock" and bot_a == "scissors":
result = "botB"
elif bot_a == "paper" and bot_b == "rock":
result = "botA"
elif bot_a == "paper" and bot_b == "scissors":
results = "botB"
elif bot_b == "paper" and bot_a == "rock":
result = "botB"
elif bot_b == "paper" and bot_a == "scissors":
result = "botA"
elif bot_a == "scissors" and bot_b == "rock":
result = "botB"
elif bot_a == "scissors" and bot_b == "paper":
result = "botA"
elif bot_b == "scissors" and bot_a == "rock":
result = "botA"
elif bot_b == "scissors" and bot_a == "paper":
result = "botB"
return result | {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,474 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise9/exercise9.py | # Exercise 9
import random
import time
import re
pattern = r"^[-+]?[0-9]+$" # Pattern to find positive or negative or zero integer.
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
play_again = True # Boolean to restart the game after finishing.
user_input = None
while play_again is True and user_input not in exit_keys:
print('Please wait while the machine generates a random number...')
time.sleep(0.6)
random_number = random.randint(1, 9) # Generate a random integer between 1 and 9.
print('---> A random integer number was generated between 1 and 9. Try to guess it.' + ' *** ' + str(random_number) + ' *** ')
number_of_guesses = 0
while True: # Loop forever or until exit key is typed.
user_input = input('---> Your guess: ') # Get user input.
if user_input in exit_keys: # User just entered on of the exit sequences. ('n', 'e', 'ex', 'exi', 'exit')
user_input_on_exit = input('You just pressed one of the exit keys (n, e, ex, exi, exit)...\nAre you sure you want to exit (y/n)? ')
if user_input_on_exit == 'y':
print('Exiting...')
print(100 * '_' + '\n')
exit(0)
else:
continue
elif not re.match(pattern, user_input): # User did not enter an integer.
print(18 * ' ' + 'Warning! Please type an integer. (This will not count as a valid guess. Lucky you...)')
continue
guess = int(user_input)
if guess < random_number: # User's guess was lower than the number generated.
number_of_guesses += 1
print(18 * ' ' + ' Too low... Please try again.')
elif guess > random_number: # User's guess was higher than the number generated.
number_of_guesses += 1
print(18 * ' ' + ' Too high... Please try again.')
elif random_number == int(user_input): # User's guessed correctly.
number_of_guesses += 1
print((len(str(number_of_guesses)) + 23) * ' ' + 81 * '*')
print(19 * ' ' + ' --> * Congratulations!!! your guess was Correct and you only needed ' + str(number_of_guesses) + ' guess/guesses * <--')
print((len(str(number_of_guesses)) + 23) * ' ' + 81 * '*' + '\n')
break
user_input = input('Would you like to play again (y/n)? ') # Prompt player to restart the game.
if user_input == 'y':
print('Restarting...')
time.sleep(0.6)
print(100 * '_' + '\n')
continue
elif user_input == 'n':
print('Exiting...')
print(100 * '_' + '\n')
play_again = False
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,475 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise3/exercise3.py | # Exercise 3
import re
pattern1 = r"[-+]?[0-9]+" # Pattern to find positive or negative integer.
pattern2 = r"^[-+]?[0-9]+$" # Pattern to find one positive or negative integer.
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
print("\nExit keys: n, e, ex, exi, exit.\nType key + Enter to exit\n")
while True: # Loop forever or until exit key is typed.
prompt_list = "Please, type some integers, separated by space and press enter."
user_input_list = input(prompt_list + 50*" " + "exit keys: " + str(exit_keys) + "\n")
# Get user's input. List of integers to be parsed.
if user_input_list in exit_keys: # If user typed one of the exit keys, program will exit.
print("Program will now exit. Thank you.")
break
user_list = [int(value) for value in re.findall(pattern1, user_input_list)]
# Find all valid integers in user's input list.
prompt_number = "Please, type a number to compare to list's values and press enter."
user_input_number = input(prompt_number + 47*" " + "exit keys: " + str(exit_keys) + "\n")
# Get user's input. Number to be compared with each list item.
if user_input_list in exit_keys: # If user typed one of the exit keys, program will exit.
print("Program will now exit. Thank you.")
break
user_number = int(re.match(pattern2, user_input_number).group())
# Find valid integer in user's input.
print([number for number in user_list if number < user_number]) # One-liner.
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,476 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise6/exercise6.py | # Exercise 6
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
print("Type exit key + Enter to exit" + 67*" " + "||exit keys: " + str(exit_keys) + "||\n")
while True: # Loop forever or until exit key is typed.
prompt = "Please, type a string and press enter"
user_input = input(prompt + 50*" " + "||exit keys: " + str(exit_keys) + "||\n") # Get user input.
if user_input in exit_keys: # If user typed one of the exit keys, program will exit.
print("---> Program will now exit. Thank you.")
break
if user_input == user_input[::-1]:
# Compare the user's string with itself reversed, to find out whether it is a palindrome or not.
print("---> The string you typed is a palindrome!")
else:
print("---> The string you typed is NOT a palindrome...")
print("")
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,477 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise10/exercise10.py | # Exercise 10
import random
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
while True: # Loop forever or until exit key is typed.
print(44 * '-')
print('exit keys: ', exit_keys)
print(44 * '-')
print('\nPlease wait while the machine generates two random lists...')
alpha = random.choices(range(0, 100), k=random.randint(0, 100)) # Generate a list of random 'k' length with random integers in range 0-100.
beta = random.choices(range(0, 100), k=random.randint(0, 100)) # Generate a list of random 'k' length with random integers in range 0-100.
print('The machine generated two random lists with integers.')
print(4 * ' ' + 'List alpha ->', alpha)
print(4 * ' ' + 'List beta ->', beta)
user_input = input('\n---> Press enter to continue or one of the exit keys to exit.\n This will return common elements from the above lists, without duplicates: ')
if user_input == '': # Empty 'user_input' means that the user pressed enter without any other input.
common_elements_list = list(dict.fromkeys([alpha_x for alpha_x in alpha if alpha_x in beta])) # Find common elements and remove duplicates.
print('\n---> common elements', common_elements_list)
valid_input = True
while valid_input is True:
user_input = input('Would you like to restart (y/n)? ') # Prompt player to restart the process.
if user_input == 'y':
print('Restarting...')
print(100 * '_')
valid_input = False # Used to break the 'while' loop.
elif user_input == 'n':
print('Exiting...')
print(100 * '_')
exit(0)
else:
print('Sorry I didn\'t get that...') # 'while' loop will continue.
elif user_input in exit_keys: # User just entered on of the exit sequences. ('n', 'e', 'ex', 'exi', 'exit')
user_input_on_exit = input('\nYou just pressed one of the exit keys (n, e, ex, exi, exit)...\nAre you sure you want to exit (y/n)? ')
if user_input_on_exit == 'y':
print('Exiting...')
print(100 * '_' + '\n')
exit(0)
else:
continue
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,478 | ZogopZ/Internship | refs/heads/master | /utilities/assets/spamMail/Tools.py | import datetime
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib
import getpass
working_dates = []
weekends = []
holidays = []
working_status = True
def get_working_dates(): #####################################
return working_dates # #
# #
# #
def get_weekends(): # #
return weekends # Function getters for #
# global variables #
# #
def get_holidays(): # #
return holidays # #
# #
# #
def get_working_status(): # #
return working_status #####################################
def set_dates(work_start, work_end):
date_generated = [work_start + datetime.timedelta(days=x) for x in range(0, (work_end - work_start).days)]
# Generate all dates between first day and last day at work.
for date in date_generated:
if date.weekday() in (5, 6): # Append weekends to specified list.
weekends.append(date)
elif date.weekday() in holidays: # Ignore holidays.
continue
else: # Append working dates to specified list.
working_dates.append(date) # Weekends and holidays are not included.
def set_working_status(now):
global working_status
for day in holidays: # No work during holidays.
if now.date() == day:
working_status = False
for day in weekends: # No work during weekends.
if now.date() == day.date():
working_status = False
before_work = datetime.datetime(now.year, now.month, now.day, 9, 0, 0, 1) # Same date as today, before work.
after_work = datetime.datetime(now.year, now.month, now.day, 17, 0, 0, 1) # Same date as today, after work.
if now <= before_work or now >= after_work:
working_status = False
def time_left(so_many_today):
tfl = []
days_left = so_many_today.days ##################################################################
seconds_left = so_many_today.seconds # Generate specified values, according to calculated difference. #
micro_seconds_left = so_many_today.microseconds ##################################################################
years_left = days_left // 365 #########################################################
days_left -= years_left * 365 # #
months_left = days_left // 30 # #
days_left -= months_left * 30 # #
weeks_left = days_left // 7 # #
days_left -= weeks_left * 7 # Generate specified time periods with simple math #
hours_left = seconds_left // 60 // 60 # #
seconds_left -= 60 * 60 * hours_left # #
minutes_left = seconds_left // 60 # #
seconds_left -= 60 * minutes_left # #
mil_seconds_left = micro_seconds_left // 1000 # #
micro_seconds_left -= 1000 * mil_seconds_left #########################################################
tfl.extend([years_left, months_left, weeks_left, days_left,
hours_left, minutes_left, seconds_left, mil_seconds_left, micro_seconds_left])
# Add above time values to tfl list.
return tfl
def set_holidays():
global holidays
holidays = [datetime.date(2019, 12, 25),
datetime.date(2019, 12, 26),
datetime.date(2020, 1, 1),
datetime.date(2020, 1, 6),
datetime.date(2020, 3, 2),
datetime.date(2020, 3, 25),
datetime.date(2020, 4, 17),
datetime.date(2020, 4, 20),
datetime.date(2020, 5, 1),
]
def paid_days_off(now):
off_days_paid = 0
if now.month == 11:
# off_days_paid = 2.48
off_days_paid = 0
elif now.month == 12:
# off_days_paid = 2.48 + 5
off_days_paid = 2.48
elif now.month == 1:
# off_days_paid = 2.48 + 5 + 4
off_days_paid = 2.48 + 5
elif now.month == 2:
# off_days_paid = 2.48 + 5 + 4 + 5
off_days_paid = 2.48 + 5 + 4
elif now.month == 3:
# off_days_paid = 2.48 + 5 + 4 + 5 + 5
off_days_paid = 2.48 + 5 + 4 + 5
elif now.month == 4:
# off_days_paid = 2.48 + 5 + 4 + 5 + 5 + 5
off_days_paid = 2.48 + 5 + 4 + 5 + 5
elif now.month == 5:
# off_days_paid = 2.48 + 5 + 4 + 5 + 5 + 5 + 2.48
off_days_paid = 2.48 + 5 + 4 + 5 + 5 + 5
return off_days_paid
def send_email(zois_email):
sender = 'zwisss@hotmail.com'
# recipients_list = ['zwisss@hotmail.com']
recipients_list = ['zwisss@hotmail.com', 'theonzwg@gmail.com', 'mariannaleventi@gmail.com', 'ilias.Anagnostopoulos@intrasoft-intl.com', 'tzogx@hotmail.com']
server = smtplib.SMTP('smtp.live.com', 587)
server.ehlo() # Hostname to send for this command defaults to the fully qualified domain name of the local host.
server.starttls() # Puts connection to SMTP server in TLS mode
server.ehlo()
server.login('zwisss@hotmail.com', getpass.getpass('Password: ')) # Hide password typing from screen.
for receiver in recipients_list:
message = create_message(zois_email, sender, receiver, recipients_list)
server.sendmail(message['From'], message['To'], message.as_string()) # Send the message via the server.
if receiver == recipients_list[0]:
print('\nMail to maself was successfully sent.')
elif receiver == recipients_list[1]:
print('Mail to Porportheon was successfully sent.')
elif receiver == recipients_list[2]:
print('Mail to PhD student Marianna, was successfully sent.')
elif receiver == recipients_list[3]:
print('Mail to Ilia, was successfully sent.')
elif receiver == recipients_list[4]:
print('Mail to Taso, was successfully sent.')
server.quit()
def create_message(zois_email, sender, receiver, recipients_list):
ptd_images = '/home/zois/Documents/Internship/utilities/assets/spamMail/assets/images/'
ptf_image = ''
message = MIMEMultipart() # Create message object instance.
message['From'] = sender # Setup the parameters of the message.
message['To'] = receiver
if receiver == recipients_list[0]:
message['Subject'] = 'Very important stuff'
ptf_image = ptd_images + 'zois.png'
elif receiver == recipients_list[1]:
message['Subject'] = 'Σπίτι με 200 ευρώ και όλα τα κομφόρ!'
ptf_image = ptd_images + 'theoni.png'
elif receiver == recipients_list[2]:
message['Subject'] = '[Zizizi] Regarding monthly salary.'
ptf_image = ptd_images + 'marianna2.png'
elif receiver == recipients_list[3]:
message['Subject'] = 'Σχετικά με τον μηνιαίο μισθό.'
ptf_image = ptd_images + 'ilias.png'
elif receiver == recipients_list[4]:
message['Subject'] = 'Best bees are the dead bees.'
ptf_image = ptd_images + 'tasos.png'
message.attach(MIMEText(zois_email, 'plain')) # Add in the message body.
# to add an attachment is just add a MIMEBase object to read a picture locally.
with open(ptf_image, 'rb') as f:
mime = MIMEBase('image', 'png')
mime.add_header('Content-Disposition', 'attachment', filename='zois.png')
mime.add_header('X-Attachment-Id', '0')
mime.add_header('Content-ID', '<0>')
mime.set_payload(f.read())
encoders.encode_base64(mime)
message.attach(mime)
return message
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,479 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise1/exercise1.py | # Exercise 1
import re
import datetime
prompt = "Please, enter your name, age and copies separated by comma or space\n"
user_input = input(prompt) # Get user's input and store it as a string.
pattern = r'\w+' # This is the pattern that will be used with findall() below to separate the user_input.
input_list = re.findall(pattern, user_input) # Find all alphanumeric words inside user_input.
name = input_list[0]
age = int(input_list[1])
copies = int(input_list[2])
year = datetime.date.today().year # Find current year.
years_until_100 = 100 - age # Calculate how many years until the user reaches a hundred years old.
year_turning_100 = year + years_until_100 # Calculate the exact year that the user will be turning a hundred.
print("\n")
print(copies*("Hello " + name + ". " + "You will turn a hundred hears old in the year "
+ str(year_turning_100) + ".\n")) # Print requested output.
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,480 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise7/exercise7.py | # Exercise 7
import random
import re
pattern = r"[-+]?[0-9]+" # Pattern to find positive or negative integers.
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
auto_keys = ("a", "au", "aut", "auto") # Use these keys to automatically generate a list of random integers.
print("Type exit key + Enter to exit" + 67*" " + "||exit keys: " + str(exit_keys) + "||\n")
while True: # Loop forever or until exit key is typed.
prompt = "Please, type a few integers and press enter...\n" \
"or type a and press enter to automatically generate a list of integers."
user_input = input(prompt + 25 * " " + "||exit keys: " + str(exit_keys) + "||\n") # Get user input.
if user_input in exit_keys: # If user typed one of the exit keys, program will exit.
print("---> Program will now exit. Thank you.")
break
elif user_input in auto_keys: # If user typed one of the auto keys.
a_len = random.randrange(20) # Generate a random integer to represent the length of list a.
a = random.sample(range(1, 101), a_len)
# Generate a list of a_len length with random integers in range 1-100.
print(str(a) + " <--- Automatically generated list.")
print(str([even for even in a if even % 2 == 0]) + " <--- Even numbers of the above list.\n")
continue
elif user_input not in auto_keys:
user_input_list = [int(value) for value in re.findall(pattern, user_input)]
# Find all integer values in user's input and add them to user_input_list.
print(str(user_input_list) + " <--- User input list.")
print(str([even for even in user_input_list if even % 2 == 0]) + " <--- Even numbers of the above list.\n")
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,481 | ZogopZ/Internship | refs/heads/master | /PythonBonus/Bonus1/SoManyToday.py | # Calculates time left until date 13/05/2020 and time 17:00 EET UTC/GMT +2 hours.
# Also calculates how much money earned today and how much money earned since the start of the internship.
from datetime import timedelta
from PythonBonus.Bonus1.Tools import *
# Change above line with: from Tools import *
# if you want to run it one line from a terminal.
now = datetime.datetime.today() # Get current date-time.
work_start = datetime.datetime(now.year, now.month, now.day, 9, 0, 0, 0) # Datetime that work starts.
work_end = datetime.datetime(now.year, now.month, now.day, 17, 0, 0, 0) # Datetime that work ends.
first_day_date = datetime.datetime(2019, 11, 13, 9) # Specify starting date of internship.
last_day_date = datetime.datetime(2020, 5, 13, 9) # Specify ending date of internship.
set_dates(first_day_date, last_day_date) # Sets working dates, weekends
set_holidays() # and holidays.
set_working_status(now) # Sets working_status True if working, false if not.
total_to_be_paid = (580.8 - 580.8 * 0.2976) * 6 # Total pay for 6 month internship.
pay_per_hour_8 = 23.232 / 8
pay_per_microsecond_8 = 23.232 / (8 * 60 * 60 * 1000 * 1000) # Calculate pay per microsecond working 8 hours a day.
# Above pay_per_* values are pre calculated by the corresponding company as 23.232 euro per day.
so_many_today = last_day_date + timedelta(hours=8) - now # Generate date difference.
tfl = time_left(so_many_today) # This function returns time left in multiple time types.
output_1 = "Η πρακτική μου τελειώνει σε " + str(tfl[0]) + " χρόνια " \
+ str(tfl[1]) + " μήνες " + str(tfl[2]) + " εβδομάδες " \
+ str(tfl[3]) + " ημέρες " + "\r\n" + 49 * " " \
+ str(tfl[3]) + " ώρες " + str(tfl[4]) + " λεπτά " \
+ str(tfl[5]) + " δευτερόλεπτα " + "\r\n" + 49 * " " \
+ str(tfl[6]) + " χιλιοστά του δευτερολέπτου και " \
+ str(tfl[7]) + " μικροδευτερόλεπτα.\r\n"
output_2 = ""
time_worked_today = 0
if get_working_status(): # If I am working right now, calculate pay earned today.
time_worked_today = now - datetime.datetime(now.year, now.month, now.day, 9) # Calculate working time today.
microseconds_passed_today = time_worked_today.total_seconds() * (10 ** 6) # Convert to microseconds.
euro_made_today = (microseconds_passed_today * pay_per_microsecond_8)
output_2 = "Σήμερα έχω ήδη βγάλει " + str(euro_made_today) + " ευρώ" + \
" και έχω δουλέψει περίπου " + str(microseconds_passed_today / (10 ** 6) / 60 / 60) + " ώρες.\r\n"
elif not get_working_status():
if now < work_start:
time_worked_today = now - now
output_2 = "\nΑυτήν την στιγμή δεν δουλεύω και αράζω πέτσα. Σε λιγάκι πιάνουμε δουλειά...\r\n"
elif now > work_end:
time_worked_today = work_end - work_start
output_2 = "\nΑυτήν την στιγμή δεν δουλεύω και αράζω πέτσα. Σήμερα βγήκε το μεροκάματο των 23,232 ευρώ. \r\n"
time_worked = 0
days_worked = 0
for day in working_dates: # For each working day, calculate the sum of working seconds.
days_worked += 1 # Calculate days worked for printing only.
if day.year == now.year and day.month == now.month and day.day == now.day:
break # For today working seconds are already calculated.
time_worked += 8 * 60 * 60
hibernate_seconds = 2.48 * 8 * 60 * 60 # Total seconds worked, on non working days. (UNKNOWN REASON YET)
time_worked += time_worked_today.total_seconds() + hibernate_seconds
microseconds_passed = time_worked * (10 ** 6) # Convert to microseconds.
euro_made = (microseconds_passed * pay_per_microsecond_8)
monthly_wage_earned = 0
if (euro_made / 580.8) < 1: # First month work was less than a full months work.
monthly_wage_earned = euro_made - 359.63
elif (euro_made / 580.8) >= 1: # Not first month of work.
full_months_passed = int((euro_made - 359.63) / 580.8) # Calculate full months passed.
monthly_wage_earned = euro_made - 359.63 - full_months_passed * 580.8
output_3 = "Μηνιαία έχω βγάλει " + str(monthly_wage_earned) + " ευρώ.\r\n"
output_4 = "Συνολικά έχω βγάλει " + str(euro_made) + " ευρώ και έχω δουλέψει " + \
str(days_worked) + " ημέρες, συνυπολογίζοντας την σημερινή.\r\n\r\n"
signature = "Regards,\r\nZois Zogopoulos\r\n"
pySignature = 113 * " " + "This is an automated email from Python.\r\n"
dateString = "Ημερομηνία: " + str(now.date()) + "\r\nΏρα: " + str(now.time()) + "\r\n\r\n"
zois_email = dateString + output_1 + output_2 + output_3 + output_4 + signature + pySignature
print(zois_email)
# send_email(zois_email)
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,482 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise5/exercise5.py | # Exercise 5
import random
a_len = random.randrange(20) # Generate a random integer to represent the length of list a.
b_len = random.randrange(20) # Generate a random integer to represent the length of list b.
a = random.sample(range(1, 101), a_len) # Generate a list of a_len length with random integers in range 1-100.
b = random.sample(range(1, 101), b_len) # Generate a list of b_len length with random integers in range 1-100.
common_elements = []
for element_A in a:
if element_A in common_elements: # If an element is already inserted in the answer list, do not parse it.
continue
for element_B in b:
if element_A == element_B: # Found a common element not already in answer list.
common_elements.append(element_A) # Add it to the answer list.
print("Random generated list a: " + str(a))
print("Random generated list b: " + str(b))
print("\n---> Common elements: " + str(common_elements))
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,483 | ZogopZ/Internship | refs/heads/master | /PythonBonus/Bonus1/Tools.py | import datetime
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib
working_dates = []
weekends = []
holidays = []
working_status = True
def get_working_dates(): #####################################
return working_dates # #
# #
# #
def get_weekends(): # #
return weekends # Function getters for #
# global variables #
# #
def get_holidays(): # #
return holidays # #
# #
# #
def get_working_status(): # #
return working_status #####################################
def set_dates(work_start, work_end):
date_generated = [work_start + datetime.timedelta(days=x) for x in range(0, (work_end - work_start).days)]
# Generate all dates between first day and last day at work.
for date in date_generated:
if date.weekday() in (5, 6): # Append weekends to specified list.
weekends.append(date)
elif date.weekday() in holidays: # Ignore holidays.
continue
else: # Append working dates to specified list.
working_dates.append(date) # Weekends and holidays are not included.
def set_working_status(now):
global working_status
for day in holidays: # No work during holidays.
if now.date() == day:
working_status = False
for day in weekends: # No work during weekends.
if now.date() == day.date():
working_status = False
before_work = datetime.datetime(now.year, now.month, now.day, 9, 0, 0, 1) # Same date as today, before work.
after_work = datetime.datetime(now.year, now.month, now.day, 17, 0, 0, 1) # Same date as today, after work.
if now <= before_work or now >= after_work:
working_status = False
def time_left(so_many_today):
tfl = []
days_left = so_many_today.days ##################################################################
seconds_left = so_many_today.seconds # Generate specified values, according to calculated difference. #
micro_seconds_left = so_many_today.microseconds ##################################################################
years_left = days_left // 365 #########################################################
days_left -= years_left * 365 # #
months_left = days_left // 30 # #
days_left -= months_left * 30 # #
weeks_left = days_left // 7 # #
days_left -= weeks_left * 7 # Generate specified time periods with simple math #
hours_left = seconds_left // 60 // 60 # #
seconds_left -= 60 * 60 * hours_left # #
minutes_left = seconds_left // 60 # #
seconds_left -= 60 * minutes_left # #
mil_seconds_left = micro_seconds_left // 1000 # #
micro_seconds_left -= 1000 * mil_seconds_left #########################################################
tfl.extend([years_left, months_left, weeks_left, days_left,
hours_left, minutes_left, seconds_left, mil_seconds_left, micro_seconds_left])
# Add above time values to tfl list.
return tfl
def set_holidays():
global holidays
holidays = [datetime.date(2019, 12, 25),
datetime.date(2020, 3, 25),
datetime.date(2020, 4, 13),
datetime.date(2020, 5, 1),
]
def send_email(zois_email):
msg = MIMEMultipart() # Create message object instance.
# recipients = ["theonzwg@gmail.com", "tzogx@hotmail.com", "Marianna.Leventi@ruhr-uni-bochum.de",
# "zwisss@hotmail.com"]
msg['From'] = "zwisss@hotmail.com" # Setup the parameters of the message.
# msg['To'] = ", ".join(recipients)
# msg['To'] = "zwisss@hotmail.com"
msg['Subject'] = "Very important stuff"
msg.attach(MIMEText(zois_email, 'plain')) # Add in the message body.
server = smtplib.SMTP("smtp.live.com", 587)
server.ehlo() # Hostname to send for this command defaults to the fully qualified domain name of the local host.
server.starttls() # Puts connection to SMTP server in TLS mode
server.ehlo()
server.login('zwisss@hotmail.com', password=input()) # Login Credentials.
server.sendmail(msg['From'], "zwisss@hotmail.com", msg.as_string()) # Send the message via the server.
print("\nMail to maself was successfully sent.")
server.sendmail(msg['From'], "theonzwg@gmail.com", msg.as_string())
print("Mail to Porportheon was successfully sent.")
server.sendmail(msg['From'], "Marianna.Leventi@ruhr-uni-bochum.de", msg.as_string())
print("Mail to Marianna, was successfully sent.")
server.sendmail(msg['From'], "tzogx@hotmail.com", msg.as_string())
print("Mail to Taso, was successfully sent.")
msg['Subject'] = "Σχετικά με τον μηνιαίο μισθό."
server.sendmail(msg['From'], "ilias.Anagnostopoulos@intrasoft-intl.com", msg.as_string())
print("Mail to Ilia, was successfully sent.")
server.quit()
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,484 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise2/exercise2.py | # Exercise 2
import re
pattern = r"^[-+]?[0-9]+$" # Pattern to find positive or negative integer.
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
print("Exit keys: n, e, ex, exi, exit. Type key + Enter to exit")
while True: # Loop forever or until exit key is typed.
prompt = "Please, type an integer number and press enter"
user_input = input(prompt + 50*" " + "exit keys: " + str(exit_keys) + "\n") # Get user input.
if user_input in exit_keys: # If user typed one of the exit keys, program will exit.
print("Program will now exit. Thank you.")
break
if re.match(pattern, user_input): # User input, matches pattern (namely is an integer).
int_value = int(user_input) # Convert to int type.
if int_value % 2 == 0: # Integer is even.
print("You just typed an even integer.")
if int_value % 4 == 0: # Integer is even and a multiple of 4.
print("This integer is a multiple of 4\n")
print("")
else: # Integer is odd.
print("You just typed an odd integer.\n")
else: # User input is not valid integer type.
print("The value you just typed is not an integer. Please, try again.\n")
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,485 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise4/exercise4.py | # Exercise 4
import re
pattern = r"^[-+]?[0-9]+$" # Pattern to find positive or negative integer.
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
print("Type exit key + Enter to exit" + 67*" " + "||exit keys: " + str(exit_keys) + "||\n")
while True: # Loop forever or until exit key is typed.
prompt = "Please, type an integer number and press enter"
user_input = input(prompt + 50*" " + "||exit keys: " + str(exit_keys) + "||\n") # Get user input.
answer_list = []
if user_input in exit_keys: # If user typed one of the exit keys, program will exit.
print("---> Program will now exit. Thank you.")
break
if re.match(pattern, user_input): # User input, matches pattern (namely is an integer).
int_value = int(user_input) # Convert to int type.
if int_value == 0:
print("---> Hey all non-zero numbers are divisors of 0.\n")
continue;
elif int_value > 0:
for potential_divisor in range(1, int(int_value + 1 / 2)):
if int_value % potential_divisor == 0:
answer_list.append(potential_divisor)
answer_list.append(int_value)
elif int_value < 0:
for potential_divisor in range(int(int_value - 1 / 2), 0):
if int_value % potential_divisor == 0:
answer_list.append(potential_divisor)
print("---> " + str(answer_list) + "\n")
else: # User input is not valid integer type.
print("---> The value you just typed is not an integer. Please, try again.\n")
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,486 | ZogopZ/Internship | refs/heads/master | /PythonExercises/Exercise8/exercise8.py | # Exercise 8
import random
from PythonExercises.Exercise8.Tools import find_result
exit_keys = ("n", "e", "ex", "exi", "exit") # Use these keys to exit.
rock_keys = ("r", "ro", "roc", "rock") # Use these keys to play rock.
paper_keys = ("p", "pa", "pap", "pape", "paper") # Use these keys to play paper.
scissor_keys = ("s", "sc", "sci", "scis", "sciss",
"scisso", "scissor", "scissors") # Use these keys to play scissors.
while True: # Loop forever or until exit key is typed.
prompt = "Please select a game mode:\n" \
" ---> press a and enter for bot mode.\n" \
" ---> press pvp and enter for player versus player."
user_input = input(prompt + 25 * " " + "||exit keys: " + str(exit_keys) + "||\n") # Get user input.
if user_input in exit_keys: # If user typed one of the exit keys, program will exit.
print("---> Program will now exit. Thank you.")
break
elif user_input == "a":
score_a = 0
score_b = 0
print(" " + 70 * "_")
while user_input not in exit_keys:
bot_A_selection = random.choice(["rock", "paper", "scissors"])
bot_B_selection = random.choice(["rock", "paper", "scissors"])
print("|Player A selects --> |%s|" % bot_A_selection.center(8) + 39*" " + "|")
print("|Player B selects --> |%s|" % bot_B_selection.center(8) + 39*" " + "|")
result = find_result(bot_A_selection, bot_B_selection)
if result == "tie":
print("| ---> Tie no one scored!" + 46*" " + "|")
elif result == "botA":
score_a += 1
print("| ---> Player A just scored a point." + 35*" " + "|")
elif result == "botB":
score_b += 1
print("| ---> Player B just scored a point." + 35*" " + "|")
print(str(score_a) + " " + str(score_b) + "\n"
"to continue press enter")
user_input = input("")
| {"/PythonBonus/Bonus1/SoManyToday.py": ["/PythonBonus/Bonus1/Tools.py"], "/PythonExercises/Exercise8/exercise8.py": ["/PythonExercises/Exercise8/Tools.py"]} |
64,532 | arijitx/Appliances-Energy-Use-Prediction | refs/heads/master | /template.py | import numpy as np
import pandas as pd
### encode CSV To feature Matrix
def get_feature_matrix(file_path):
#imports
import time
def date2x(x):
k=(time.mktime(time.strptime(x, "%Y-%m-%d %H:%M:%S"))-time.mktime(time.strptime("2016-01-11 17:00:00", "%Y-%m-%d %H:%M:%S")))/600
k=k%1008
return int(k)
def date2k(x):
k=(time.mktime(time.strptime(x, "%Y-%m-%d %H:%M:%S"))-time.mktime(time.strptime("2016-01-11 17:00:00", "%Y-%m-%d %H:%M:%S")))/600
k=k/1008
return int(k)
data=pd.read_csv(file_path)
data['x']=data['date'].apply(date2x)
data['k']=data['date'].apply(date2k)
sigma=2
num_f=1008
k_len=20
mus=range(1,1009)
mat=data[['x']].as_matrix()
ks=data[['k']].as_matrix()
conn=np.concatenate
phix=np.zeros((len(mat),20161))
phix[:,0]=ks.flatten()
phi=ks
for j in range(num_f):
x=mat[:,0].reshape((mat.shape[0],1))
x=((x-mus[j])**2)/(2*sigma**2)
x=np.exp(-x)
phi=np.hstack((phi,x))
print
for i in range(len(mat)):
k=int(phix[i][0])
k1=np.zeros((k*num_f))
k2=np.zeros(((k_len-k-1)*num_f))
k3=conn((np.array([k]),k1))
k4=conn((phi[i][1:],k2))
phix[i]=conn((k3,k4))
return phix
## Encode Target Data to numpy array from csv
def get_output(file_path):
data=pd.read_csv(file_path)
return data[['Output']].as_matrix()
## Function to train models in Interval
def get_wt(feature_matrix, output, lambda_reg, p):
phi=feature_matrix
y=output
learning_rate=0.02
max_iter=50
def power(x):
if x==0:
return 0
else:
return x**(p-2)
def mod_w_p(w):
x=np.absolute(w)
vfunc = np.vectorize(power)
x=vfunc(x)
return x
def lasso(w):
for i in range(len(w)):
if(w[i]<0):
w[i]=-1
if(w[i]>0):
w[i]=1
return w
def rmse(x,y,w):
err=0.0
err=((y-np.dot(x,w))**2).sum()/y.size
err=err**.5
return err
w=np.zeros(phi.shape[1]).reshape((phi.shape[1],1))
errs=[]
for i in range(max_iter):
t1=np.dot(phi.T,y)
t2=np.dot(np.dot(phi.T,phi),w)
if(p>1):
t3=lambda_reg*p*np.dot(w.T,mod_w_p(w))
if(p<=1):
t3=lambda_reg*lasso(w)
t=t1-t2-t3
w=w+learning_rate*t
errs.append(rmse(phi,y,w))
print("RMSE : ",errs[-1])
return w
## to get weight vectr given feature_matrix , target , lambda and P
def get_weight_vector(feature_matrix,output,lambda_reg,p):
num_f=1008
k_len=20
fxs=[None]*k_len
ys=[None]*k_len
for i in range(len(feature_matrix)):
k=int(feature_matrix[i][0])
if fxs[k] is None:
fxs[k]=np.array([feature_matrix[i][k*num_f+1:k*num_f+1009]])
ys[k]=np.array([output[i]])
else:
fxs[k]=np.vstack((fxs[k],feature_matrix[i][k*num_f+1:k*num_f+1009]))
ys[k]=np.vstack((ys[k],output[i]))
wts=[]
for i in range(k_len):
print('Model ',i+1)
wts.append(get_wt(fxs[i],ys[i],lambda_reg,p))
print(wts[0].shape)
wtss=np.array([0])
wtss.shape=(1,1)
wtss=np.concatenate((wtss,wts[0]))
for i in range(1,k_len):
wtss=np.concatenate((wtss,wts[i]))
return wtss
## get best weight vector
def get_my_best_weight_vector():
wts=np.load('best_wts.pikl')
return wts
| {"/test.py": ["/template.py"]} |
64,533 | arijitx/Appliances-Energy-Use-Prediction | refs/heads/master | /test.py | import template as t
import pandas as pd
import numpy as np
phi=t.get_feature_matrix('data/train.csv')
print('DONE GENERATING FEATURES')
Y=t.get_output('data/train.csv')
print('DONE GENERATING OUTPUT')
wts=t.get_weight_vector(phi,Y,0.0,2)
print('DONE GENERATING WEIGHTS')
wts.dump('wts.pickl')
test_phi=t.get_feature_matrix('data/test.csv')
print('DONE GENERATING TEST FEATURES')
test=pd.read_csv('data/test.csv')
result=pd.DataFrame(test['Id'])
result['Output']=pd.Series(np.dot(test_phi,wts).flatten())
result.to_csv('output.csv',index=False)
print('DONE GENERATING OUTPUT')
| {"/test.py": ["/template.py"]} |
64,536 | somasekhar1/django | refs/heads/master | /app1/migrations/0001_initial.py | # Generated by Django 2.1.1 on 2019-02-01 11:49
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Employee',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
('dob', models.DateField()),
('doj', models.DateField()),
('gender', models.CharField(max_length=10)),
('designition', models.CharField(max_length=40)),
('contact', models.IntegerField(default=10)),
('email', models.EmailField(max_length=254)),
('salery', models.DecimalField(decimal_places=2, max_digits=10)),
],
),
]
| {"/app1/views.py": ["/app1/models.py"]} |
64,537 | somasekhar1/django | refs/heads/master | /app1/models.py | from django.db import models
class Employee(models.Model):
id=models.IntegerField(primary_key=True)
name=models.CharField(max_length=50)
dob=models.DateField()
doj=models.DateField()
gender=models.CharField(max_length=10)
designition=models.CharField(max_length=40)
contact=models.IntegerField(default=10)
email=models.EmailField()
salery=models.DecimalField(max_digits=10,decimal_places=2)
image=models.ImageField(upload_to="my_images",default=False)
| {"/app1/views.py": ["/app1/models.py"]} |
64,538 | somasekhar1/django | refs/heads/master | /app1/views.py | import csv
from django.http import HttpResponse
from django.shortcuts import render,redirect
from .models import Employee
def showindex(request):
emp=Employee.objects.all()
return render(request,"index.html",{"employee":emp})
def savesetails(request):
id=request.POST.get("t1")
name=request.POST.get("t2")
dob=request.POST.get("t3")
doj=request.POST.get("t4")
gender=request.POST.get("t5")
designition=request.POST.get("t6")
contact=request.POST.get("t7")
email=request.POST.get("t8")
salery=request.POST.get("t9")
image=request.FILES["t10"]
Employee(id=id,name=name,dob=dob,doj=doj,gender=gender,designition=designition,contact=contact,email=email,salery=salery,image=image).save()
return render(request,"index.html",{"msg":"data saved"})
def viewdetails(request):
id=request.POST.get("t1")
emp=Employee.objects.filter(id=id).all()
return render(request,"viewdetails.html",{"data":emp})
def deletedetails(request):
delete = request.POST.get("t2")
emp = Employee.objects.filter(id=delete).all()
return render(request,"deletedetails.html",{"datas":emp})
def deletes(request):
delid=request.POST.get("del_id")
Employee.objects.filter(id=delid).delete()
emp=Employee.objects.all()
return render(request,"index.html",{"employee":emp})
def updatedetails(request):
uid=request.POST.get("u_id")
Employee.objects.filter(id=uid).update()
d1=Employee.objects.all()
return render(request,"update.html",{"d2":d1})
def employeecsv(request):
response=HttpResponse(content_type="text/csv")
response["content-disposition"]="atachment";filename="employee/csv"
wr=csv.writer(response)
emp=Employee.objects.all()
for x in emp:
wr.writerow([x.id,x.name,x.dob,x.doj,x.gender,x.designition,x.contact,x.email,x.salery,x.image])
return response | {"/app1/views.py": ["/app1/models.py"]} |
64,539 | malexit240/django-questions | refs/heads/master | /polls/models.py |
from django.db import models
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField(verbose_name='date published')
def __str__(self):
return "%s" %self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question,on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
class User(models.Model):
last_name = models.CharField(max_length=64,blank=True)
class Meta:
abstract = True # wont saved in db
class Author(User):
first_name = models.CharField(max_length=64)
class Meta: # additional sets
verbose_name = 'Author' # for admin
verbose_name_plural = 'Authors' #for admin
ordering = ['-first_name'] # sort by field (- is down)
db_table = 'Test Table' # name table in db
unique_together = ('first_name','last_name') # complex key
class UaAuthor:
class Meta:
proxy = True # proxy it is proxy..whou
class Ganre(models.Model):
name = models.CharField(max_length=64)
class BookManager(models.Manager):
def author1(self):
return self.get_queryset().filter(first_name='author')
class Book(models.Model):
title = models.CharField(max_length=64)
author = models.ForeignKey(Author,on_delete=models.CASCADE,related_name='books')
ganres = models.ManyToManyField(Ganre,related_name='books',through='BookGanre',default=[])
objects=BookManager()
man = BookManager()
comments = GenericRelation('Comment',related_query_name='book')
class BookGanre(models.Model):
book = models.ForeignKey(Book,on_delete=models.CASCADE)
ganre = models.ForeignKey(Ganre,on_delete=models.CASCADE)
date_add = models.DateField(auto_now_add=True)
class Comment(models.Model):
text = models.CharField(max_length=150)
content_type = models.ForeignKey(ContentType,on_delete=models.CASCADE)
object_id = models.IntegerField()
content_object = GenericForeignKey('content_type','object_id')
def FillDB():
a1 = Author.objects.create(first_name='A',last_name='B')
a2 = Author.objects.create(first_name='B',last_name='C')
a3 = Author.objects.create(first_name='C',last_name='A')
ga = Ganre.objects.create(name='ganre A')
gb = Ganre.objects.create(name='ganre B')
gc = Ganre.objects.create(name='ganre C')
ba = Book.objects.create(title='Book A',author=a1)
bb = Book.objects.create(title='Book B',author=a1)
bc = Book.objects.create(title='Book C',author=a1)
models.JSONField(encoder="")
a1.save()
a2.save()
a3.save()
ga.save()
gb.save()
gc.save()
ba.save()
bb.save()
bc.save() | {"/polls/views.py": ["/polls/models.py", "/polls/forms.py"], "/polls/forms.py": ["/polls/models.py"], "/polls/url.py": ["/polls/views.py"]} |
64,540 | malexit240/django-questions | refs/heads/master | /polls/views.py | from django.shortcuts import render
from django.http import (HttpResponse, Http404,
HttpResponseRedirect, HttpRequest)
from .models import Question, Choice
from django.template import loader
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_GET
from django.views.generic import ListView, DetailView
from .forms import QuestionForm
class IndexView(ListView):
template_name = 'polls/index.html' # model = Question
context_object_name = 'object_list'
queryset = Question.objects.order_by('-question_text')
class DetailsView(DetailView):
template_name = 'polls/details.html'
model = Question
context_object_name = 'question'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["form"] = QuestionForm()
return context
class ChoiceCreate(CreateView):
template_name = 'polls/detail.html'
model = Choice
fields = ['question','choice_text']
@require_GET
def index(request: HttpRequest):
return render(request, 'polls/index.html', {
"lastest_question_list": Question.objects.order_by('-pub_date')[:5]
})
lastest_question_list = Question.objects.order_by('-pub_date')[:5]
template = loader.get_template('polls/index.html')
context = {
"lastest_question_list": lastest_question_list
}
return HttpResponse(template.render(context, request))
def detail(request, question_id):
try:
return render(request, 'polls/details.html',
{"question": Question.objects.get(id=question_id)})
except Question.DoesNotExist:
raise Http404('Question does')
def result(request, question_id):
question = get_object_or_404(Question,pk=question_id)
return render(request,'polls/result.html',{"question":question})
def vote(request, question_id):
question = get_object_or_404(Question,id=question_id)
form = QuestionForm(request.POST)
if form.is_valid():
try:
selected_choice = question.choice_set.get(id=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
pass
else:
selected_choice.votes +=1
selected_choice.save()
return HttpResponseRedirect('{}/result'.format(question_id))
class ChoiceFormView(FormView):
template_name = 'polls/detail.html'
form_class = ChoiceModelForm
succsess_url = '/polls' | {"/polls/views.py": ["/polls/models.py", "/polls/forms.py"], "/polls/forms.py": ["/polls/models.py"], "/polls/url.py": ["/polls/views.py"]} |
64,541 | malexit240/django-questions | refs/heads/master | /polls/forms.py | from django import forms
from .models import Choice
class QuestionForm(forms.Form):
question_id = forms.IntegerField()
def celan(self):
return self.cleaned_data
class ChoiceModelForm(forms.ModelForm):
class Meta:
model = Choice
fields = ['quetion'] | {"/polls/views.py": ["/polls/models.py", "/polls/forms.py"], "/polls/forms.py": ["/polls/models.py"], "/polls/url.py": ["/polls/views.py"]} |
64,542 | malexit240/django-questions | refs/heads/master | /polls/url.py | from django.contrib import admin
from django.urls import path,include
from django.views.generic import View, TemplateView , RedirectView
from .views import *
app_name = 'polls'
urlpatterns = [
path('<int:question_id>/',DetailsView.as_view(),name='detail'),
path('<int:question_id>/result',result,name='result'),
path('<int:question_id>/vote',vote,name='vote'),
path('',index,name='index'),
path('',TemplateView.as_view(template_name='polls/index.html')), # for static page
path('',IndexView.as_view()),
path('',DetailsView.as_view()),
] | {"/polls/views.py": ["/polls/models.py", "/polls/forms.py"], "/polls/forms.py": ["/polls/models.py"], "/polls/url.py": ["/polls/views.py"]} |
64,545 | oyurimatheus/pythonfuncional | refs/heads/master | /gerenciador_de_gastos.py | from collections import Counter
from functools import reduce
import operator
import itertools
from gastos import Gasto
def abre_arquivo(arquivo, encoding='utf-8'):
with open(arquivo, encoding=encoding) as arquivo:
tuplas = map(_extrai_tipo_e_valor, arquivo)
return list(map(Gasto.de_tupla, tuplas))
def previsao(gastos):
print("calculando previsao...")
return map(lambda gasto: Gasto(gasto.categoria, gasto.valor + 1), gastos)
def abre_arquivos(arquivos):
return list(itertools.chain(*map(abre_arquivo, arquivos)))
def prevendo_gastos_de_arquivos(gastos_mensais):
# gastos_previstos = map(previsao, gastos_mensais)
gastos_previstos = (previsao(gastos) for gastos in gastos_mensais)
return list(itertools.chain(*gastos_previstos))
def _extrai_tipo_e_valor(gasto):
tipo, valor = gasto.split(',')
return tipo, float(valor)
def total_de_gastos(gastos):
total = reduce(lambda acumulado, gasto: acumulado + gasto.valor, gastos, 0)
return total
def categorias_mais_frequentes(gastos):
categorias = map(operator.attrgetter('categoria'), gastos)
contador = Counter(categorias)
categoria_mais_frequente = contador.most_common(n=1)
return categoria_mais_frequente[0][0]
def total_de_gastos_da_categoria(categoria, gastos):
gastos_filtrados = filter(lambda gasto: gasto.categoria == categoria, gastos)
# total = reduce(lambda acumulado, gasto: acumulado + gasto.valor, gastos_filtrados, 0)
return total_de_gastos(gastos_filtrados)
def gastos_da_categoria(categoria, gastos):
gastos_filtrados = filter(lambda gasto: gasto.categoria == categoria, gastos)
for gasto in gastos_filtrados:
# print('gerando um novo valor')
yield gasto.valor
def maiores_gastos(gastos):
return _ordenar_valores(gastos, reverse=True)
def menores_gastos(gastos):
return _ordenar_valores(gastos)
def _ordenar_valores(gastos, *, reverse=False, top_n=5):
gastos_ordenados = sorted(gastos, key=operator.attrgetter('valor'), reverse=reverse)
return gastos_ordenados[:top_n]
| {"/gerenciador_de_gastos.py": ["/gastos.py"], "/principal.py": ["/gerenciador_de_gastos.py", "/menu_ui.py"], "/menu_ui.py": ["/gerenciador_de_gastos.py"]} |
64,546 | oyurimatheus/pythonfuncional | refs/heads/master | /principal.py | import gerenciador_de_gastos
import menu_ui
if __name__ == '__main__':
arquivo = input('Digite o nome do arquivo que deseja abrir: ')
gastos = gerenciador_de_gastos.abre_arquivo(arquivo)
menu_ui.mostra_menu(gastos)
| {"/gerenciador_de_gastos.py": ["/gastos.py"], "/principal.py": ["/gerenciador_de_gastos.py", "/menu_ui.py"], "/menu_ui.py": ["/gerenciador_de_gastos.py"]} |
64,547 | oyurimatheus/pythonfuncional | refs/heads/master | /gastos.py | class Gasto:
def __init__(self, categoria, valor):
self.__categoria = categoria
self.__valor = valor
@property
def categoria(self):
return self.__categoria
@property
def valor(self):
return self.__valor
@staticmethod
def de_tupla(tupla):
categoria, valor = tupla
return Gasto(categoria, valor)
| {"/gerenciador_de_gastos.py": ["/gastos.py"], "/principal.py": ["/gerenciador_de_gastos.py", "/menu_ui.py"], "/menu_ui.py": ["/gerenciador_de_gastos.py"]} |
64,548 | oyurimatheus/pythonfuncional | refs/heads/master | /menu_ui.py | import gerenciador_de_gastos
def mostra_menu(gastos):
texto_do_menu = '''
OPÇÕES
1 - Total dos gastos
2 - Gastos mais frequentes
3 - Total de gastos de uma categoria
4 - Lista de gastos de uma categoria
5 - Listar os 5 maiores gastos
6 - Listar os 5 menores gastos
7 - Previsão dos gastos do próximo mês
0 - Sair do programa
'''
opcoes = {
1: mostra_total_gastos,
2: mostra_gastos_frequentes,
3: mostra_total_de_gasto_da_categoria,
4: mostra_lista_de_gastos_da_categoria,
5: mostra_5_maiores_gastos,
6: mostra_5_menores_gastos,
7: previsao_de_gastos
}
while True:
print(texto_do_menu)
opcao = int(input('Selecione uma opção: '))
while opcao < 0 or opcao > 7:
opcao = int(input('Selecione uma opção válida: '))
if opcao == 0:
break
funcao = opcoes.get(opcao)
executa_opcao(funcao, gastos)
def executa_opcao(funcao, gastos):
funcao(gastos)
def mostra_lista_de_gastos_da_categoria(gastos):
categoria = input('Digite o nome da categoria: ')
gastos_da_categoria = gerenciador_de_gastos.gastos_da_categoria(categoria, gastos)
print(f'Os gastos da categoria {categoria} foram:')
for gasto in gastos_da_categoria:
print(f'\t - R${gasto}')
def mostra_total_de_gasto_da_categoria(gastos):
categoria = input('Digite o nome da categoria: ')
gastos_da_categoria = gerenciador_de_gastos.total_de_gastos_da_categoria(categoria, gastos)
print(f'O total de gastos da categoria {categoria} foi de {gastos_da_categoria}')
def mostra_gastos_frequentes(gastos):
mais_frequente = gerenciador_de_gastos.categorias_mais_frequentes(gastos)
print(f'A categoria mais frequente foi a {mais_frequente}')
def mostra_total_gastos(gastos):
total = gerenciador_de_gastos.total_de_gastos(gastos)
print(f'Os gastos totais foram de R$ {total}')
def mostra_5_maiores_gastos(gastos):
maiores_gastos = gerenciador_de_gastos.maiores_gastos(gastos)
print('Os maiores gastos foram:')
for gasto in maiores_gastos:
print(f'{gasto.categoria} - R$ {gasto.valor}')
def mostra_5_menores_gastos(gastos):
menores_gastos = gerenciador_de_gastos.menores_gastos(gastos)
print('Os menores gastos foram:')
for gasto in menores_gastos:
print(f'{gasto.categoria} - R$ {gasto.valor}')
def previsao_de_gastos(gastos):
lista_de_gastos = [gastos]
arquivo = input('Se desejar abrir outro arquivo, digite seu nome, senão pressione <Enter>: ')
if arquivo:
lista_de_gastos.append(gerenciador_de_gastos.abre_arquivo(arquivo))
gastos_previstos = gerenciador_de_gastos.prevendo_gastos_de_arquivos(lista_de_gastos)
print(f'Listando {len(gastos_previstos)} gastos')
for gasto in gastos_previstos:
print(f'{gasto.categoria} - R$ {gasto.valor}')
| {"/gerenciador_de_gastos.py": ["/gastos.py"], "/principal.py": ["/gerenciador_de_gastos.py", "/menu_ui.py"], "/menu_ui.py": ["/gerenciador_de_gastos.py"]} |
64,563 | mbacchi/adminapi | refs/heads/master | /cloud_utils/net_utils/ip_tx.py | #!/usr/bin/python
"""
Simple IP packet generator/client test tool.
Provides very limited support for testing specific IP protocols. Primarily used to test
specific network paths in a cloud or data center traversing firewalls/security groups, nat points,
etc..
"""
from os.path import abspath, basename
from random import getrandbits
import array
import socket
import struct
import sys
import time
from optparse import OptionParser, OptionValueError
# ICMP TYPES
ICMP_ECHO_REQUEST = 8
ICMP_EHCO_REPLY = 0
# SCTP TYPES
CHUNK_DATA = 0
CHUNK_INIT = 1
CHUNK_HEARTBEAT = 3
# DEBUG LEVELS
TRACE = 3
DEBUG = 2
INFO = 1
QUIET = 0
VERBOSE_LVL = INFO
def get_script_path():
"""
Returns the path to this script
"""
try:
import inspect
except ImportError:
return None
return abspath(inspect.stack()[0][1])
def sftp_file(sshconnection, verbose_level=DEBUG):
"""
Uploads this script using the sshconnection's sftp interface to the sshconnection host.
:param sshconnection: SshConnection object
:param verbose_level: The level at which this method should log it's output.
"""
script_path = get_script_path()
script_name = basename(script_path)
sshconnection.sftp_put(script_path, script_name)
debug('Done Copying script:"{0}" to "{1}"'.format(script_name, sshconnection.host),
verbose_level)
return script_name
def debug(msg, level=DEBUG):
"""
Write debug info to stdout filtering on the set verbosity level and prefixing each line
with a '#' to allow for easy parsing of results from output.
:param msg: string to print
:param level: verbosity level of this message
:return: None
"""
if not VERBOSE_LVL:
return
if VERBOSE_LVL >= level:
for line in str(msg).splitlines():
sys.stdout.write("# {0}\n".format(str(line)))
sys.stdout.flush()
def get_src(dest):
"""
Attempts to learn the source IP from the outbound interface used to reach the provided
destination
:param dest: destination address/ip
:return: local ip
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
s.connect((dest, 1))
source_ip = s.getsockname()[0]
s.close()
return source_ip
def remote_sender(ssh, dst_addr, port=None, srcport=None, proto=17, count=1, socktimeout=10,
timeout=15, data=None, verbose=False, interval=.1, cb=None, cbargs=None):
"""
Uses the ssh SshConnection obj's sftp interface to transfer this script to the remote
machine and execute it with the parameters provided. Will return the combined stdout & stderr
of the remote session.
:param ssh: SshConnection object to run this script
:param dst_addr: Where to send packets to
:param port: The destination port of the packets (depending on protocol support)
:param srcport: The source port to use in the sent packets
:param proto: The IP protocol number (ie: 1=icmp, 6=tcp, 17=udp, 132=sctp)
:param count: The number of packets to send
:param timeout: The max amount of time allowed for the remote command to execute
:param socktimeout: Time out used for socket operations
:param data: Optional data to append to the built packet(s)
:param verbose: Boolean to enable/disable printing of debug info
:param cb: A method/function to be used as a call back to handle the ssh command's output
as it is received. Must return type sshconnection.SshCbReturn
:param cbargs: list of args to be provided to callback cb.
:return: :raise RuntimeError: If remote command return status != 0
"""
if verbose:
verbose_level = VERBOSE_LVL
else:
verbose_level = DEBUG
script = sftp_file(ssh, verbose_level=verbose_level)
# destip, proto, dstport=345, ptype=None, payload=None
cmd = "python {0} -o {1} -c {2} -d {3} -i {4} -t {5} "\
.format(script, proto, count, dst_addr, interval, socktimeout)
if port:
cmd += " -p {0} ".format(port)
if srcport is not None:
cmd += " -s {0} ".format(srcport)
if data is not None:
cmd += ' -l "{0}"'.format(data.strip('"'))
out = ""
debug("CMD: {0}".format(cmd), verbose_level)
cmddict = ssh.cmd(cmd, listformat=False, timeout=timeout, cb=cb, cbargs=cbargs,
verbose=verbose)
out += cmddict.get('output')
if cmddict.get('status') != 0:
raise RuntimeError('{0}\n"{1}" cmd failed with status:{2}, on host:{3}'
.format(out, cmd, cmddict.get('status'), ssh.host))
debug(out, verbose_level)
return out
def send_ip_packet(destip, proto=4, count=1, interval=.1, payload=None, timeout=10):
"""
Send a raw ip packet, payload can be used to append to the IP header...
:param destip: Destination ip
:param proto: protocol to use, default is 4
:param payload: optional string buffer to append to IP packet
"""
s = None
if payload is None:
payload = 'IP TEST PACKET'
payload = payload or ""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, proto)
s.settimeout(timeout)
for x in xrange(0, count):
s.sendto(payload, (destip, 0))
time.sleep(interval)
except socket.error as SE:
if SE.errno == 1 and 'not permitted' in SE.strerror:
sys.stderr.write('Permission error creating socket, try with sudo, root...?\n')
raise
finally:
if s:
s.close()
def send_sctp_packet(destip, dstport=101, srcport=100, proto=132, ptype=None, payload=None,
sctpobj=None, count=1, interval=.1, timeout=10):
"""
Send Basic SCTP packets
:param destip: Destination IP to send SCTP packet to
:param dstport: Destination port to use in the SCTP packet
:param srcport: Source port to use in the SCTP packet
:param proto: Protocol number to use, default is 132 for SCTP
:param ptype: SCTP type, default is 'init' type
:param payload: optional payload to use in packets (ie data chunk payload)
:param sctpobj: A pre-built sctpobj to be sent
"""
s = None
if payload is None:
payload = 'SCTP TEST PACKET'
payload = payload or ""
if ptype is None:
ptype = CHUNK_INIT
try:
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, proto)
s.setsockopt(socket.SOL_IP, socket.IP_TOS, 0x02) # set ecn bit
s.settimeout(timeout)
if not sctpobj:
sctpobj = SCTP(srcport=srcport, dstport=dstport, ptype=ptype, payload=payload)
for x in xrange(0, count):
s.sendto(sctpobj.pack(), (destip, dstport))
time.sleep(interval)
except socket.error as SE:
if SE.errno == 1 and 'not permitted' in SE.strerror:
sys.stderr.write('Permission error creating socket, try with sudo, root...?\n')
raise
finally:
if s:
s.close()
def send_udp_packet(destip, srcport=None, dstport=101, proto=17, payload=None, count=1,
interval=.1, timeout=10):
"""
Send basic UDP packet
:param destip: Destination IP to send UDP packet
:param srcport: source port to use in the UDP packet, if provided will attempt to bind
to this port
:param dstport: destination port to use in the UDP packet
:param proto: protocol number, default is 17 for UDP
:param payload: optional payload for this packet
"""
s = None
if payload is None:
payload = 'UDP TEST PACKET'
payload = payload or ""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, proto)
s.settimeout(timeout)
if srcport is not None:
s.bind(('', srcport))
for x in xrange(0, count):
s.sendto(payload, (destip, dstport))
time.sleep(interval)
except socket.error as SE:
if SE.errno == 1 and 'not permitted' in SE.strerror:
sys.stderr.write('Permission error creating socket, try with sudo, root...?\n')
raise
finally:
if s:
s.close()
def send_tcp_packet(destip, dstport=101, srcport=None, proto=6, payload=None, bufsize=None,
count=1, interval=.1, timeout=10):
"""
Send basic TCP packet
:param destip: Destination IP to send TCP packet
:param dstport: destination port to use in this TCP packet
:param srcport: source port to use in this TCP packet. If provided will attempt to bind
to this port
:param proto: protocol number, default is 6 for TCP
:param payload: optional payload for this packet
:param bufsize: Buffer size for recv() on socket after sending packet
:return: Any data read on socket after sending the packet
"""
data = ''
s = None
if payload is None:
payload = 'TCP TEST PACKET'
payload = payload or ""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, proto)
s.settimeout(timeout)
if srcport is not None:
s.bind(('', srcport))
s.connect((destip, dstport))
for x in xrange(0, count):
s.send(payload)
if bufsize:
data += s.recv(bufsize)
time.sleep(interval)
except socket.error as SE:
if SE.errno == 1 and 'not permitted' in SE.strerror:
sys.stderr.write('Permission error creating socket, try with sudo, root...?\n')
raise
finally:
if s:
s.close()
return data
def send_icmp_packet(destip, id=1234, seqnum=1, code=0, proto=1, ptype=None, count=1, interval=.1,
payload='ICMP TEST PACKET', timeout=10):
"""
Send basic ICMP packet (note: does not wait for, or validate a response)
:param destip: Destination IP to send ICMP packet to
:param id: ID, defaults to '1234'
:param seqnum: Sequence number, defaults to '1'
:param code: ICMP subtype, default to 0
:param proto: protocol number, defaults to 1 for ICMP
:param ptype: ICMP type, defaults to icmp echo request
:param payload: optional payload
"""
if payload is None:
payload = 'ICMP TEST PACKET'
payload = payload or ""
s = None
if ptype is None:
ptype = ICMP_ECHO_REQUEST
try:
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, proto)
s.settimeout(timeout)
icmp = ICMP(destaddr=destip, id=id, seqnum=seqnum, code=code, ptype=ptype, payload=payload)
for x in xrange(0, count):
s.sendto(icmp.pack(), (destip, 0))
time.sleep(interval)
except socket.error as SE:
if SE.errno == 1 and 'not permitted' in SE.strerror:
sys.stderr.write('Permission error creating socket, try with sudo, root...?\n')
raise
finally:
if s:
s.close()
def send_packet(destip, proto, srcport=None, dstport=345, ptype=None, payload=None, count=1,
interval=.1, timeout=10, verbose=DEBUG):
"""
Wrapper to sends packets of varying types
:param destip: Destination IP to send packet to
:param proto: IP protocol number (ie:1=icmp, 6=tcp, 17=udp, 132=sctp)
:param srcport: Source port to use in packet (Depends on protocol)
:param dstport: Destination port to use in packet (Depends on protocol)
:param ptype: Packet type (if protocol supports subtypes)
:param payload: Optional payload to send with packet
:param count: Number of packets to send
:param verbose: Sets the level info will be logged at
"""
debug('send_packet: destip:{0}, dstport:{1}, proto:{2}, ptype:{3}, count:{4}, interval:{5}'
.format(destip, dstport, proto, ptype, count, interval), level=verbose)
if proto in [1, 'icmp']:
send_icmp_packet(destip=destip, ptype=ptype, payload=payload, count=count,
interval=interval, timeout=timeout)
elif proto in [6, 'tcp']:
send_tcp_packet(destip=destip, srcport=srcport, dstport=dstport, payload=payload,
count=count, interval=interval, timeout=timeout)
elif proto in [17, 'udp']:
send_udp_packet(destip=destip, srcport=srcport, dstport=dstport, payload=payload,
count=count, interval=interval, timeout=timeout)
elif proto in [132, 'sctp']:
send_sctp_packet(destip=destip, srcport=srcport, ptype=ptype, dstport=dstport,
payload=payload, count=count, interval=interval, timeout=timeout)
else:
send_ip_packet(destip=destip, proto=proto, payload=payload, count=count, interval=interval,
timeout=timeout)
###################################################################################################
# ICMP PACKET BUILDERS
###################################################################################################
class ICMP(object):
def __init__(self, destaddr, id=1234, seqnum=1, code=0, ptype=None,
payload=None):
self.destaddr = destaddr
if payload is None:
payload = 'ICMP TEST PACKET'
self.payload = payload or ""
self.icmptype = ptype or ICMP_ECHO_REQUEST
self.id = id
self.code = code
self.seqnum = seqnum
def pack(self):
tmp_checksum = 0
header = struct.pack("bbHHh", self.icmptype, self.code, tmp_checksum, self.id, self.seqnum)
fin_checksum = checksum(header + self.payload)
header = struct.pack("bbHHh", self.icmptype, self.code, socket.htons(fin_checksum),
self.id, self.seqnum)
packet = header + self.payload
return packet
###################################################################################################
# SCTP PACKET BUILDERS
###################################################################################################
class InitChunk(object):
def __init__(self, tag=None, a_rwnd=62464, outstreams=10, instreams=65535, tsn=None,
param_data=None):
self.tag = tag or getrandbits(32) or 3
self.a_rwnd = a_rwnd
self.outstreams = outstreams or 1 # 0 is invalid
self.instreams = instreams or 1 # 0 is invalid
self.tsn = tsn or getrandbits(32) or 4
if param_data is None:
param_data = ""
suppaddrtypes = SctpSupportedAddrTypesParam()
param_data += suppaddrtypes.pack()
ecn = SctpEcnParam()
param_data += ecn.pack()
fwdtsn = SctpFwdTsnSupportParam()
param_data += fwdtsn.pack()
self.param_data = param_data
def pack(self):
packet = struct.pack('!IIHHI', self.tag, self.a_rwnd, self.outstreams, self.instreams,
self.tsn)
if self.param_data:
packet += self.param_data
return packet
class SctpIPv4Param(object):
def __init__(self, type=5, length=8, ipv4addr=None):
self.type = type
self.length = length
self.addr = ipv4addr
def pack(self):
packet = struct.pack('!HHI', self.type, self.length, self.addr)
return packet
class SctpSupportedAddrTypesParam(object):
def __init__(self, ptype=12, addr_types=None):
ipv4 = 5
# ipv6 = 6
# hostname = 11
if addr_types is None:
addr_types = [ipv4]
if not isinstance(addr_types, list):
addr_types = [addr_types]
self.addr_types = addr_types
self.ptype = 12
self.length = 4 + (2 * len(self.addr_types))
def pack(self):
fmt = '!HH'
contents = [self.ptype, self.length]
for atype in self.addr_types:
fmt += 'H'
contents.append(atype)
contents = tuple(contents)
packet = struct.pack(fmt, *contents)
# add padding
if len(self.addr_types) % 2:
packet += struct.pack("H", 0)
return packet
class SctpEcnParam(object):
def __init__(self, ptype=32768):
self.ptype = ptype
self.length = 4
def pack(self):
return struct.pack('!HH', self.ptype, self.length)
class SctpFwdTsnSupportParam(object):
def __init__(self, ptype=49152):
self.ptype = ptype
self.length = 4
def pack(self):
return struct.pack('!HH', self.ptype, self.length)
class DataChunk(object):
def __init__(self, tsn=1, stream_id=12345, stream_seq=54321, payload_proto=0, payload=None):
if payload is None:
payload = "TEST SCTP DATA CHUNK"
self.payload = payload
self.tsn = tsn
self.stream_id = stream_id
self.stream_seq = stream_seq
self.payload_proto = payload_proto
@property
def length(self):
return 12 + len(self.payload)
def pack(self):
packet = struct.pack('!iHHi', self.tsn, self.stream_id, self.stream_seq,
self.payload_proto)
packet += self.payload
return packet
class HeartBeatChunk(object):
def __init__(self, parameter=1, payload=None):
self.parameter = parameter
if payload is None:
payload = str(getrandbits(64))
self.hb_info = payload
self.hb_info_length = 4 + len(payload)
def pack(self):
chunk = struct.pack('!HH', self.parameter, self.hb_info_length)
chunk += self.hb_info
return chunk
class ChunkHdr(object):
def __init__(self, chunktype=None, flags=0, payload=None, chunk=None):
if chunktype is None:
chunktype = 1
self.chunktype = chunktype
self.chunkflags = flags
if chunk:
self.chunkobj = chunk
elif chunktype == 0:
self.chunkobj = DataChunk(payload=payload)
elif chunktype == 1:
self.chunkobj = InitChunk()
elif chunktype == 4:
self.chunkobj = HeartBeatChunk(payload=payload)
self.chunk_data = self.chunkobj.pack()
self.chunklength = 4
# SCTP header plus rest of packet = length?
self.chunklength = 4 + len(self.chunk_data)
def pack(self):
chunk = struct.pack('!bbH', self.chunktype, self.chunkflags, self.chunklength)
packet = chunk + self.chunk_data
return packet
class SCTP(object):
"""
Chunk Types
0 DATA Payload data
1 INIT Initiation
2 INIT ACK initiation acknowledgement
3 SACK Selective acknowledgement
4 HEARTBEAT Heartbeat request
5 HEARTBEAT ACK Heartbeat acknowledgement
6 ABORT Abort
7 SHUTDOWN Shutdown
8 SHUTDOWN ACK Shutdown acknowledgement
9 ERROR Operation error
10 COOKIE ECHO State cookie
11 COOKIE ACK Cookie acknowledgement
12 ECNE Explicit congestion notification echo (reserved)
13 CWR Congestion window reduced (reserved)
14 SHUTDOWN COMPLETE
Chunk Flags
# I - SACK chunk should be sent back without delay.
# U - If set, this indicates this data is an unordered chunk and the stream sequence number
is invalid. If an unordered chunk is fragmented then each fragment has this flag set.
# B - If set, this marks the beginning fragment. An unfragmented chunk has this flag set.
# E - If set, this marks the end fragment. An unfragmented chunk has this flag set
"""
def __init__(self, srcport, dstport, tag=None, ptype=None, payload=None, chunk=None):
self.src = srcport
self.dst = dstport
self.checksum = 0
if ptype is None:
ptype = CHUNK_INIT
tag = 0 # Verification tag is set to 0 for init
if tag is None:
tag = getrandbits(16)
self.tag = tag
chunk = chunk or ChunkHdr(chunktype=ptype, payload=payload)
self.chunk = chunk.pack()
def pack(self, src=None, dst=None, tag=None, do_checksum=True):
src = src or self.src
dst = dst or self.dst
verification_tag = tag or self.tag
packet = struct.pack('!HHII', src, dst, verification_tag, 0)
chunk = self.chunk
if not do_checksum:
packet += chunk
return packet
pktchecksum = cksum(packet + chunk)
# Rebuild the packet with the checksum
packet = struct.pack('!HHII', src, dst,
verification_tag, pktchecksum)
packet += chunk
return packet
###################################################################################################
# Borrowed checksum method, big thanks to the following...
# (Including this in this file for ease of transfer when testing this on remote VMs as a
# alone script.)
###################################################################################################
def checksum(source_string):
"""
From: https://github.com/samuel/python-ping
Copyright (c) Matthew Dixon Cowles, <http://www.visi.com/~mdc/>.
Distributable under the terms of the GNU General Public License
version 2. Provided with no warranties of any sort.
"""
# I'm not too confident that this is right but testing seems to
# suggest that it gives the same answers as in_cksum in ping.c.
sum = 0
count_to = (len(source_string) / 2) * 2
count = 0
while count < count_to:
this_val = ord(source_string[count + 1])*256+ord(source_string[count])
sum = sum + this_val
sum = sum & 0xffffffff # Necessary?
count = count + 2
if count_to < len(source_string):
sum = sum + ord(source_string[len(source_string) - 1])
sum = sum & 0xffffffff # Necessary?
sum = (sum >> 16) + (sum & 0xffff)
sum = sum + (sum >> 16)
answer = ~sum
answer = answer & 0xffff
# Swap bytes. Bugger me if I know why.
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
###################################################################################################
# Borrowed crc32c for python, big thanks to the following...
# (Including this in this file for ease of transfer when testing this on remote VMs as a
# alone script.)
###################################################################################################
# """
# Copyright (c) 2004 Dug Song <dugsong@monkey.org>
# All rights reserved, all wrongs reversed.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the authors and copyright holders may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# """
# CRC-32C Checksum
# http://tools.ietf.org/html/rfc3309
crc32c_table = (
0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L, 0xC79A971FL,
0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL, 0x8AD958CFL, 0x78B2DBCCL,
0x6BE22838L, 0x9989AB3BL, 0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L,
0x5E133C24L, 0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L, 0x9A879FA0L,
0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L, 0x5D1D08BFL, 0xAF768BBCL,
0xBC267848L, 0x4E4DFB4BL, 0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L,
0x33ED7D2AL, 0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L, 0x6DFE410EL,
0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL, 0x30E349B1L, 0xC288CAB2L,
0xD1D83946L, 0x23B3BA45L, 0xF779DEAEL, 0x05125DADL, 0x1642AE59L,
0xE4292D5AL, 0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L, 0x417B1DBCL,
0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L, 0x86E18AA3L, 0x748A09A0L,
0x67DAFA54L, 0x95B17957L, 0xCBA24573L, 0x39C9C670L, 0x2A993584L,
0xD8F2B687L, 0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L, 0x96BF4DCCL,
0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L, 0xDBFC821CL, 0x2997011FL,
0x3AC7F2EBL, 0xC8AC71E8L, 0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L,
0x0F36E6F7L, 0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L, 0xEB1FCBADL,
0x197448AEL, 0x0A24BB5AL, 0xF84F3859L, 0x2C855CB2L, 0xDEEEDFB1L,
0xCDBE2C45L, 0x3FD5AF46L, 0x7198540DL, 0x83F3D70EL, 0x90A324FAL,
0x62C8A7F9L, 0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L, 0x3CDB9BDDL,
0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L, 0x82F63B78L, 0x709DB87BL,
0x63CD4B8FL, 0x91A6C88CL, 0x456CAC67L, 0xB7072F64L, 0xA457DC90L,
0x563C5F93L, 0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL, 0x92A8FC17L,
0x60C37F14L, 0x73938CE0L, 0x81F80FE3L, 0x55326B08L, 0xA759E80BL,
0xB4091BFFL, 0x466298FCL, 0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL,
0x0B21572CL, 0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L, 0x65D122B9L,
0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL, 0x2892ED69L, 0xDAF96E6AL,
0xC9A99D9EL, 0x3BC21E9DL, 0xEF087A76L, 0x1D63F975L, 0x0E330A81L,
0xFC588982L, 0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L, 0x38CC2A06L,
0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L, 0xFF56BD19L, 0x0D3D3E1AL,
0x1E6DCDEEL, 0xEC064EEDL, 0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L,
0xD0DDD530L, 0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL, 0x8ECEE914L,
0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L, 0xD3D3E1ABL, 0x21B862A8L,
0x32E8915CL, 0xC083125FL, 0x144976B4L, 0xE622F5B7L, 0xF5720643L,
0x07198540L, 0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL, 0xE330A81AL,
0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL, 0x24AA3F05L, 0xD6C1BC06L,
0xC5914FF2L, 0x37FACCF1L, 0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L,
0x7AB90321L, 0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L, 0x34F4F86AL,
0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL, 0x79B737BAL, 0x8BDCB4B9L,
0x988C474DL, 0x6AE7C44EL, 0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L,
0xAD7D5351L
)
def add(crc, buf):
buf = array.array('B', buf)
for b in buf:
crc = (crc >> 8) ^ crc32c_table[(crc ^ b) & 0xff]
return crc
def done(crc):
tmp = ~crc & 0xffffffffL
b0 = tmp & 0xff
b1 = (tmp >> 8) & 0xff
b2 = (tmp >> 16) & 0xff
b3 = (tmp >> 24) & 0xff
crc = (b0 << 24) | (b1 << 16) | (b2 << 8) | b3
return crc
def cksum(buf):
"""Return computed CRC-32c checksum."""
return done(add(0xffffffffL, buf))
###################################################################################################
# end of borrowed crc32c for python
###################################################################################################
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-p", "--dstport", dest="dstport", type="int", default=101,
help="Destination Port", metavar="PORT")
parser.add_option("-s", "--srcport", dest="srcport", type="int", default=100,
help="Source Port", metavar="PORT")
parser.add_option("-c", "--count", dest="count", type="int", default=1,
help="Number of packets to send", metavar="COUNT")
parser.add_option("-i", "--interval", dest="interval", type="float", default=.1,
help="Time interval between sending packets, default='.1'",
metavar="INTERVAL")
parser.add_option("-d", "--dst", dest="destip", default=None,
help="Destination ip", metavar="IP")
parser.add_option("-o", "--proto", dest="proto", type="int", default=17,
help="Protocol number(Examples: 1:icmp, 6:tcp, 17:udp, 132:sctp), "
"default:17", metavar="PROTOCOL")
parser.add_option("-l", "--payload", dest="payload", default=None,
help="Chunk, data, payload, etc", metavar="DATA")
parser.add_option("-v", "--verbose", dest="verbose", type='int', default=DEBUG,
help="Verbose level, 0=quiet, 1=info, 2=debug, 3=trace. Default=1")
parser.add_option("-t", "--socktimeout", dest='socktimeout', type='float', default=10,
help='Socket timeout in seconds', metavar='TIMEOUT')
options, args = parser.parse_args()
if not options.destip:
raise OptionValueError("'-d / --dst' for destination IP/Addr must be provided")
VERBOSE_LVL = options.verbose
destip = options.destip
proto = options.proto
srcport = options.srcport
interval = options.interval
socktimeout = options.socktimeout
if srcport is not None:
srcport = int(srcport)
dstport = int(options.dstport)
payload = options.payload
count = options.count
send_packet(destip=destip, proto=proto, srcport=srcport, dstport=dstport, payload=payload,
count=count, interval=interval, timeout=socktimeout)
| {"/cloud_admin/systemconnection.py": ["/cloud_admin/access/autocreds.py"]} |
64,564 | mbacchi/adminapi | refs/heads/master | /cloud_admin/cloudview/__init__.py |
import difflib
import json
import os
import yaml
from shutil import copyfile
from cloud_utils.system_utils.machine import Machine
class Namespace(object):
"""
Convert dict (if provided) into attributes and return a somewhat
generic object
"""
def __init__(self, **kwargs):
if kwargs:
for key in kwargs:
value = kwargs[key]
try:
if isinstance(value, dict):
setattr(self, key, Namespace(**value))
else:
setattr(self, key, value)
except:
print '"{0}" ---> "{1}" , type: "{2}"'.format(key,
value,
type(value))
raise
def __repr__(self):
return "Namespace:{0}".format(self.__class__.__name__)
def _get_keys(self):
return vars(self).keys()
def _filtered_dict(self):
return {k: v for (k, v) in self.__dict__.iteritems() if not k.startswith('_')}
def do_default(self):
# Removes all values not starting with "_" from dict
for key in self._filtered_dict():
if key in self.__dict__:
if isinstance(self.__dict__[key], Namespace):
self.__dict__[key].do_default()
self.__dict__.pop(key)
def to_json(self, default=None, sort_keys=True, indent=4, **kwargs):
if default is None:
def default(o):
return o._filtered_dict()
return json.dumps(self,
default=default,
sort_keys=True,
indent=4,
**kwargs)
def to_yaml(self, json_kwargs=None, yaml_kwargs=None):
if yaml_kwargs is None:
yaml_kwargs = {'default_flow_style': False}
if json_kwargs is None:
json_kwargs = {}
jdump = self.to_json(**json_kwargs)
yload = yaml.load(jdump)
return yaml.dump(yload, **yaml_kwargs)
class ConfigBlock(Namespace):
def __init__(self, connection):
self._connection = connection
@property
def blockname(self):
"""
This must be defined for matching the current config block against a specific section
in a loaded config (json, yaml). ie to match against this block, self.blockname should
return the string 'cluster'.
ie:
cluster:
one:
stuff: 1000
"""
return self.__class__.__name__.lower().replace('block', '')
def build_active_config(self):
raise NotImplementedError("{0} has not implemented this base method"
.format(self.__class__.__name__))
def read_config_from_file(self, location=None):
raise NotImplementedError("{0} has not implemented this base method"
.format(self.__class__.__name__))
def diff_config(self, active=None, configured=None):
raise NotImplementedError("{0} has not implemented this base method"
.format(self.__class__.__name__))
def _get_dict_from_file(self, file_path, machine=None, format='json'):
"""
Attempts to read in json from an existing file, load and return as
a dict
:param file_path: string representing a local or remote config file path to read from
:param machine a cloud utils Machine() obj used for reading a remote filepath.
:param format: supported values are 'json' and 'yaml'. Defaults to 'json'
"""
newdict = None
if machine:
assert isinstance(machine, Machine)
if not machine.is_file(file_path):
raise ValueError('File not found on remote machine:"{0}", path:"{1}"'
.format(machine.hostname, file_path))
data = machine.sys('cat {0}'.format(file_path), listformat=False, code=0)
elif os.path.exists(str(file_path)) and os.path.getsize(str(file_path)):
if not os.path.isfile(file_path):
raise ValueError('config file exists at path and is not '
'a file:' + str(file_path))
conf_file = open(file_path, 'rb')
with conf_file:
data = conf_file.read()
if data:
def searchDictForSelf(self, newdict):
# Return the section of the loaded dict that pertains to this configblock
section = newdict.get(self.blockname, None)
if section and isinstance(section, dict):
return section
# Check for a nested dict that matches...
for key, value in newdict.iteritems():
if isinstance(value, dict):
if str(key).lower() == str(self.blockname).lower():
return value
elif self.blockname in value:
return value.get(self.blockname)
else:
# check the nested dict
searchDictForSelf(self, value)
# No section matching self.blockname was found....
return {}
try:
if format == 'yaml':
newdict = yaml.load(data)
else:
newdict = json.loads(data)
newdict = searchDictForSelf(self, newdict)
except ValueError as ve:
ve.args = (['Failed to load json config from: "{0}". '
'ERR: "{1}"'.format(file_path, ve.message)])
raise
return newdict
def diff_json(self, file_path, machine=None):
"""
Method to show current values -vs- those (saved) in a file.
Will return a formatted string to show the difference
:param file_path: string, local file path to read config into for diff
"""
if not file_path:
raise ValueError('File path must be provided for diff()')
# Create formatted string representation of dict values
text1 = self.to_json().splitlines()
# Create formatted string representation of values in file
file_dict = self._get_dict_from_file(file_path=file_path,
machine=machine,
format='json') or {}
text2 = json.dumps(file_dict, sort_keys=True, indent=4).splitlines()
diff = difflib.unified_diff(text2, text1, lineterm='')
return str('\n'.join(diff))
def diff_yaml(self, file_path, machine=None):
"""
Method to show current values -vs- those (saved) in a file.
Will return a formatted string to show the difference
:param file_path: string, local file path to read config into for diff
"""
if not file_path:
raise ValueError('File path must be provided for diff()')
# Create formatted string representation of dict values
text1 = self.to_yaml().splitlines()
# Create formatted string representation of values in file
file_dict = self._get_dict_from_file(file_path=file_path,
machine=machine,
format='yaml') or {}
text2 = yaml.dump(file_dict, default_flow_style=False).splitlines()
diff = difflib.unified_diff(text2, text1, lineterm='')
return str('\n'.join(diff))
def save(self, path=None):
"""
Will write the json configuration to a file at path or by default at
self.write_file_path.
:param path: string, local file path to save the config to.
"""
path = path or self.write_file_path
if not path:
raise ValueError('Path/write_file_path has not been set '
'or provided.')
backup_path = path + '.bak'
config_json = self.to_json()
if os.path.isfile(path):
copyfile(path, backup_path)
save_file = file(path, "w")
with save_file:
save_file.write(config_json)
save_file.flush()
| {"/cloud_admin/systemconnection.py": ["/cloud_admin/access/autocreds.py"]} |
64,565 | mbacchi/adminapi | refs/heads/master | /cloud_admin/hosts/helpers/__init__.py |
class EucaMachineHelpers(object):
_helpername = None
def __init__(self, eucahost):
self.eucahost = eucahost
self.sys = eucahost.sys
self.log = eucahost.log
self.debug = self.log.debug
self.services = getattr(eucahost, 'services', [])
self.eucalyptus_conf = eucahost.eucalyptus_conf
| {"/cloud_admin/systemconnection.py": ["/cloud_admin/access/autocreds.py"]} |
64,566 | mbacchi/adminapi | refs/heads/master | /cloud_admin/systemconnection.py |
import copy
import logging
from prettytable import PrettyTable
import re
import threading
from cloud_admin.access.autocreds import AutoCreds
from cloud_admin.services.serviceconnection import ServiceConnection
from cloud_admin.hosts.eucahost import EucaHost
from cloud_utils.system_utils.machine import Machine
from cloud_utils.log_utils.eulogger import Eulogger
from cloud_utils.log_utils import markup
class SystemConnection(ServiceConnection):
def __init__(self,
hostname,
username='root',
password=None,
keypath=None,
proxy_hostname=None,
proxy_username='root',
proxy_password=None,
proxy_keypath=None,
config_yml=None,
config_qa=None,
credpath=None,
aws_access_key=None,
aws_secret_key=None,
log_level='INFO',
boto_debug_level=0,
euca_user='admin',
euca_account='eucalyptus',
):
self.clc_connect_kwargs = {
'hostname': hostname,
'username': username,
'password': password,
'keypath': keypath,
'proxy_hostname': proxy_hostname,
'proxy_username': proxy_username,
'proxy_password': proxy_password,
'proxy_keypath': proxy_keypath
}
self._clc_machine = None
self.hostname = hostname
self.config_qa = config_qa
self.config_yml = config_yml
# self._aws_access_key = aws_access_key
# self._aws_secret_key = aws_secret_key
self._eucahosts = {}
self._credpath = credpath
self.log = Eulogger(identifier=self.__class__.__name__, stdout_level=log_level)
self.creds = AutoCreds(credpath=self._credpath,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key,
aws_account_name=euca_account,
aws_user_name=euca_user,
logger=self.log,
**self.clc_connect_kwargs)
super(SystemConnection, self).__init__(hostname=hostname,
aws_secret_key=self.creds.aws_secret_key,
aws_access_key=self.creds.aws_access_key,
logger=self.log,
boto_debug_level=boto_debug_level)
def set_loglevel(self, level, parent=False):
"""
wrapper for log.setLevel, accept int or string.
Levels can be found in logging class. At the time this was written they are:
CRITICAL:50
DEBUG:10
ERROR:40
FATAL:50
INFO:20
NOTSET:0
WARN:30
WARNING:30
"""
level = level or logging.NOTSET
if not isinstance(level, int) and not isinstance(level, basestring):
raise ValueError('set_loglevel. Level must be of type int or string, got: "{0}/{1}"'
.format(level, type(level)))
if isinstance(level, basestring):
level = getattr(logging, str(level).upper())
return self.log.set_parentloglevel(level)
@property
def clc_machine(self):
if not self._clc_machine:
if self.clc_connect_kwargs['hostname']:
if self.eucahosts[self.clc_connect_kwargs['hostname']]:
self._clc_machine = self.eucahosts[self.clc_connect_kwargs['hostname']]
else:
self._clc_machine = Machine(**self.clc_connect_kwargs)
self.eucahosts[self.clc_connect_kwargs['hostname']] = self._clc_machine
return self._clc_machine
@property
def eucahosts(self):
if not self._eucahosts:
self._eucahosts = self._update_host_list()
return self._eucahosts
def _update_host_list(self):
machines = self.get_all_machine_mappings()
connect_kwargs = copy.copy(self.clc_connect_kwargs)
if 'hostname' in connect_kwargs:
connect_kwargs.pop('hostname')
hostlock = threading.Lock()
def add_host(ip, services, self=self, connect_kwargs=connect_kwargs):
host = EucaHost(connection=self, hostname=ip, services=services, **connect_kwargs)
with hostlock:
self._eucahosts[ip] = host
threads = []
for ip, services in machines.iteritems():
t = threading.Thread(target=add_host, args=(ip, services))
t.start()
threads.append(t)
# self._eucahosts[ip] = EucaHost(connection=self, hostname=ip, services=services,
# **connect_kwargs)
for t in threads:
t.join()
return self._eucahosts
def get_host_by_hostname(self, hostname):
return self.eucahosts.get(hostname, None)
def get_hosts_by_service_type(self, servicetype):
ret_list = []
for ip, host in self.eucahosts.iteritems():
for service in host.services:
if service.type == servicetype:
ret_list.append(host)
return ret_list
def get_hosts_for_cloud_controllers(self):
clc = None
return self.get_hosts_by_service_type(servicetype='eucalyptus')
def get_hosts_for_node_controllers(self, partition=None, instanceid=None):
ncs = self.get_hosts_by_service_type(servicetype='node')
if not partition and not instanceid:
return ncs
retlist = []
for nc in ncs:
if instanceid:
for instance in nc.instances:
if instance == instanceid:
return [nc]
if nc.partition == partition:
retlist.append(nc)
return retlist
def get_hosts_cluster_controllers(self, partition=None):
ccs = self.get_hosts_by_service_type(servicetype='cluster')
if not partition:
return ccs
retlist = []
for cc in ccs:
if cc.partition == partition:
retlist.append(cc)
return retlist
def get_hosts_for_storage_controllers(self, partition=None):
scs = self.get_hosts_by_service_type(servicetype='storage')
if not partition:
return scs
retlist = []
for sc in scs:
if sc.partition == partition:
retlist.append(sc)
return retlist
def get_hosts_for_ufs(self):
ufs = None
out = self.get_hosts_by_service_type(servicetype='user-api')
if out:
ufs = out[0]
return ufs
def get_hosts_for_walrus(self):
walrus = None
out = self.get_hosts_by_service_type(servicetype='walrusbackend')
if out:
walrus = out[0]
return walrus
def show_cloud_legacy_summary(self, repo_info=True, print_method=None, file_path=None,
print_table=True):
"""
Creates a table representing the legacy Eutester/QA reprsentation of a Eucalyptus
cloud. This can be used for legacy eutester tests, etc..
:param repo_info: bool, if True will use the work REPO in place of Zone for the 5th column
:param print_method: method used to print this table, defaults to self.log.info
:param print_table: bool, if False will return the table obj
:param file_path: string representing a local file path to save this information to
:return: table obj if print_table is False
"""
ret = ""
print_method = print_method or self.log.info
if repo_info:
rz_col = 'REPO'
else:
rz_col = 'ZONE'
pt = PrettyTable(['# HOST', 'DISTRO', 'VER', 'ARCH', rz_col, 'SERVICE CODES'])
pt.align = 'l'
pt.border = 0
for ip, host in self.eucahosts.iteritems():
split = host.summary_string.split()
service_codes = " ".join(split[5:])
if repo_info:
rz_col = 'REPO'
else:
rz_col = split[4]
pt.add_row([split[0], split[1], split[2], split[3], rz_col, service_codes])
ret += "{0}\n".format(host.summary_string)
if file_path:
with open(file_path, 'w') as save_file:
save_file.write(str(pt))
save_file.flush()
if print_table:
print_method("\n{0}\n".format(str(pt)))
else:
return pt
@staticmethod
def vm_state_markup(state):
if state in ['shutting-down', 'stopped', 'stopping']:
return [1, 91]
if state == 'terminated':
return [1, 97]
if state == 'running':
return [1, 92]
return [1, 93]
def show_hosts(self, hosts=None, partition=None, service_type=None, serv_columns=None,
update=True, print_method=None, print_table=True, save_file=None):
print_method = print_method or self._show_method
ins_id_len = 10
ins_type_len = 13
ins_dev_len = 16
ins_st_len = 15
ins_total = (ins_id_len + ins_dev_len + ins_type_len + ins_st_len) + 5
machine_hdr = (markup('MACHINE INFO'), 30)
service_hdr = (markup('EUCALYPTUS SERVICES'), 90)
pt = PrettyTable([machine_hdr[0], service_hdr[0]])
pt.header = False
pt.align = 'l'
pt.hrules = 1
pt.max_width[machine_hdr[0]] = machine_hdr[1]
total = []
eucahosts = {}
if hosts is None:
eucahosts = self.eucahosts
elif isinstance(hosts, list):
for host in hosts:
eucahosts[host.hostname] = host
elif isinstance(hosts, EucaHost):
eucahosts[hosts.hostname] = hosts
if not isinstance(eucahosts, dict):
raise ValueError('show_machine_mappings requires dict example: '
'{"host ip":[host objs]}, got:"{0}/{1}"'
.format(eucahosts, type(eucahosts)))
# To format the tables services, print them all at once and then sort the table
# rows string into the machines columns
for hostip, host in eucahosts.iteritems():
for serv in host.services:
if update:
serv.update()
total.append(serv)
if serv.child_services:
total.extend(serv.child_services)
# Create a table showing the service states, grab the first 3 columns
# for type, name, state, and zone
servpt = self.show_services(total, print_table=False)
# Get a subset of the show services fields...
if serv_columns is None:
fields = servpt._field_names[0:4]
else:
fields = servpt._fields_names[serv_columns]
serv_lines = servpt.get_string(border=0, padding_width=2, fields=fields).splitlines()
header = serv_lines[0]
ansi_escape = re.compile(r'\x1b[^m]*m')
# Now build the machine table...
threads = []
hostlock = threading.Lock()
# Method to allow host info to be gathered concurrently
def add_host(hostip, host, self=self):
assert isinstance(host, EucaHost)
servbuf = header + "\n"
mservices = []
# Get the child services (ie for UFS)
for serv in host.services:
mservices.append(serv)
mservices.extend(serv.child_services)
for serv in mservices:
for line in serv_lines:
# Remove the ansi markup for parsing purposes, but leave it in the
# displayed line
clean_line = ansi_escape.sub('', line)
splitline = clean_line.split()
if len(splitline) < 2:
continue
line_type = splitline[0]
line_name = splitline[1]
# Pull matching lines out of the pre-formatted service table...
if (splitline and re.match("^{0}$".format(serv.type), line_type) and
re.match("^{0}$".format(serv.name), line_name)):
# Add this line to the services to be displayed for this machine
if line_name not in servbuf:
servbuf += line + "\n"
if serv.type == 'node':
if getattr(serv, 'instances', None):
if serv.instances:
vm_pt = PrettyTable([markup('INSTANCES', [1, 4]),
markup('STATE:', [1, 4]),
markup('VMTYPE:', [1, 4]),
markup('ROOT_DEV:', [1, 4])])
vm_pt.align = 'l'
vm_pt.border = 1
vm_pt.vrules = 2
vm_pt.hrules = 0
for x in serv.instances:
vm_pt.add_row([x.id,
markup(x.state, self.vm_state_markup(x.state)),
x.instance_type,
x.root_device_type])
servbuf += "{0}\n".format(vm_pt)
av_pt = host.helpers.node_controller.show_availability_for_node(
print_table=False)
servbuf += av_pt.get_string()
ps_sum_pt = host.show_euca_process_summary(print_table=False)
servbuf += "\n" + ps_sum_pt.get_string(border=1, vrules=2, hrules=0)
host_info = markup('Euca Versions:').ljust(machine_hdr[1])
host_info += "Cloud: {0}".format(host.get_eucalyptus_version()).ljust(machine_hdr[1])
host_info += "2ools: {0}".format(host.get_euca2ools_version()).ljust(machine_hdr[1])
host_info += markup("Hostname:").ljust(machine_hdr[1])
host_info += str(host.hostname).ljust(machine_hdr[1])
sys_pt = host.show_sys_info(print_table=False)
host_info += "{0}".format(sys_pt)
with hostlock:
pt.add_row([markup("HOST:") + markup(hostip, [1, 94]),
markup('EUCALYPTUS SERVICES:') +
markup('[ {0} ]'
.format(" ".join(str(x) for x in host.euca_service_codes)),
[1, 34])])
pt.add_row([host_info, servbuf])
for hostip, host in eucahosts.iteritems():
t = threading.Thread(target=add_host, args=(hostip, host))
t.start()
threads.append(t)
for t in threads:
t.join()
if save_file:
with open(save_file, 'w') as sf:
sf.write("\n{0}\n".format(pt.get_string()))
if print_table:
# print_method("\n{0}\n".format(pt.get_string(sortby=pt.field_names[1])))
print_method("\n{0}\n".format(pt.get_string()))
else:
return pt
def build_machine_dict_from_config(cls):
raise NotImplementedError()
def build_machine_dict_from_cloud_services(self):
raise NotImplementedError('not yet implemented')
| {"/cloud_admin/systemconnection.py": ["/cloud_admin/access/autocreds.py"]} |
64,567 | mbacchi/adminapi | refs/heads/master | /cloud_utils/system_utils/__init__.py | import subprocess
def local(cmd):
"""
Run a command on the localhost
:param cmd: str representing the command to be run
:return: :raise: CalledProcessError on non-zero return code
"""
args = cmd.split()
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
bufsize=4096)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output.split("\n")
| {"/cloud_admin/systemconnection.py": ["/cloud_admin/access/autocreds.py"]} |
64,568 | mbacchi/adminapi | refs/heads/master | /cloud_admin/access/autocreds.py | """
AutoCreds is a convenience class which attempts to provide utilities for reading in
credentials data from multiple sources.
The auto_create flag (set to True by default) attempts to automatically produce credentials
based upon the information provided to this AutoCreds obj.
- If any ssh connect arguments (outside of hostname) are provided then only the remote
machine tried for existing creds at 'self._credpath'.
- If credpath was provided the local file system will first be tried for existing
credentials
- If aws access and secret keys were provided allong with hostname, will attempt to
derive service credpaths from the Eucalyptus Admin api.
-Finally if a hostname was provided an ssh attempt will be made (with any other
connection kwargs provided)to fetch from the remote system as well.
If password or keypath was not provided, this assumes keys have been sync'd between the
localhost and the remote machine.
Upon the first successful discovery of credentials, the local obj is populated with
eucarc attributes and returns.
Some examples:
In [7]: from cloud_admin.access.autocreds import AutoCreds
# From a remote machine..
In [8]: creds = AutoCreds(credpath='', hostname='10.111.5.156', password='foobar')
In [9]: creds.ec2_url
Out[9]: 'http://10.111.5.156:8773/services/compute'
# From a local filepath:
In [11]: creds = AutoCreds(credpath='eucarc-10.111.5.156-eucalyptus-admin/eucarc')
In [12]: creds.s3_url
Out[12]: 'http://10.111.5.156:8773/services/objectstorage'
# Finally from the Eucalyptus Admin Api...
creds = AutoCreds(aws_access_key=aws_access_key, aws_secret_key=aws_secret_key,
hostname='10.111.5.156')
# Access the credential values as attributes of the AutoCreds obj such as:
In [21]: admin_connection = ServiceConnection(host='10.111.5.156',
aws_access_key_id=creds.aws_access_key,
aws_secret_key=creds.aws_secret_key)
# All creds can be fetched in a dict using:
In [22]: creds.get_eucarc_attrs()
Out[22]:
{'aws_access_key': 'AKIAAAI765C6PIO7QMS7',
'aws_auto_scaling_url': 'http://10.111.5.156:8773/services/AutoScaling',
'aws_cloudformation_url': 'http://10.111.5.156:8773/services/CloudFormation',
'aws_cloudwatch_url': 'http://10.111.5.156:8773/services/CloudWatch',
'aws_credential_file': None,
'aws_elb_url': 'http://10.111.5.156:8773/services/LoadBalancing',
'aws_iam_url': 'http://10.111.5.156:8773/services/Euare',
'aws_secret_key': 'lqi6Bp6hHAIwkXwicRyDKxHDckr2vrnDd7I1xu6d',
'aws_simpleworkflow_url': 'http://10.111.5.156:8773/services/SimpleWorkflow',
'ec2_access_key': 'AKIAAAI765C6PIO7QMS7',
'ec2_account_number': None,
'ec2_cert': None,
'ec2_jvm_args': None,
'ec2_private_key': None,
'ec2_secret_key': 'lqi6Bp6hHAIwkXwicRyDKxHDckr2vrnDd7I1xu6d',
'ec2_url': 'http://10.111.5.156:8773/services/compute',
'ec2_user_id': None,
'euare_url': 'http://10.111.5.156:8773/services/Euare',
'eucalyptus_cert': None,
'eustore_url': 'http://emis.eucalyptus.com/',
's3_url': 'http://10.111.5.156:8773/services/objectstorage',
'token_url': 'http://10.111.5.156:8773/services/Tokens'}
# For easy viewing, they can be shown in table format as well:
In [23]: creds.show()
[2015-05-18 15:47:12,249] [AutoCreds] [DEBUG]:
+------------------------+--------------------------------------------------+
| ec2_account_number | None |
+------------------------+--------------------------------------------------+
| euare_url | http://10.111.5.156:8773/services/Euare |
+------------------------+--------------------------------------------------+
| ec2_user_id | None |
+------------------------+--------------------------------------------------+
| token_url | http://10.111.5.156:8773/services/Tokens |
+------------------------+--------------------------------------------------+
| ec2_url | http://10.111.5.156:8773/services/compute |
+------------------------+--------------------------------------------------+
| aws_elb_url | http://10.111.5.156:8773/services/LoadBalancing |
+------------------------+--------------------------------------------------+
| aws_cloudformation_url | http://10.111.5.156:8773/services/CloudFormation |
+------------------------+--------------------------------------------------+
| aws_secret_key | lqi6Bp6hHAIwkXwicRyDKxHDckr2vrnDd7I1xu6d |
+------------------------+--------------------------------------------------+
| aws_cloudwatch_url | http://10.111.5.156:8773/services/CloudWatch |
+------------------------+--------------------------------------------------+
| eucalyptus_cert | None |
+------------------------+--------------------------------------------------+
| s3_url | http://10.111.5.156:8773/services/objectstorage |
+------------------------+--------------------------------------------------+
| aws_iam_url | http://10.111.5.156:8773/services/Euare |
+------------------------+--------------------------------------------------+
| aws_simpleworkflow_url | http://10.111.5.156:8773/services/SimpleWorkflow |
+------------------------+--------------------------------------------------+
| ec2_jvm_args | None |
+------------------------+--------------------------------------------------+
| ec2_private_key | None |
+------------------------+--------------------------------------------------+
| ec2_access_key | AKIAAAI765C6PIO7QMS7 |
+------------------------+--------------------------------------------------+
| ec2_secret_key | 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 |
+------------------------+--------------------------------------------------+
| aws_access_key | AKIAAAI765C6PIO7QMS7 |
+------------------------+--------------------------------------------------+
| eustore_url | http://emis.eucalyptus.com/ |
+------------------------+--------------------------------------------------+
| aws_credential_file | None |
+------------------------+--------------------------------------------------+
| ec2_cert | None |
+------------------------+--------------------------------------------------+
| aws_auto_scaling_url | http://10.111.5.156:8773/services/AutoScaling |
+------------------------+--------------------------------------------------+
| UNPARSED LINES | None |
+------------------------+--------------------------------------------------+)
"""
import os.path
import re
from urlparse import urlparse
from cloud_utils.file_utils.eucarc import Eucarc
from cloud_utils.log_utils import get_traceback
from cloud_utils.system_utils.machine import Machine
from cloud_admin.services.serviceconnection import ServiceConnection
from cloud_utils.net_utils.sshconnection import CommandExitCodeException
from cloud_admin.hosts.eucahost import EucaHost
eucarc_to_service_map = {
"euare_url": 'euare',
"ec2_url": 'compute',
"token_url": 'tokens',
"aws_elb_url": 'loadbalancing',
"aws_cloudformation_url": 'cloudformation',
"aws_cloudwatch_url": 'cloudwatch',
"s3_url": 'objectstorage',
"aws_iam_url": 'euare',
"aws_simpleworkflow_url": 'simpleworkflow',
"aws_auto_scaling_url": 'autoscaling'}
class AutoCreds(Eucarc):
def __init__(self,
auto_create=True,
aws_access_key=None,
aws_secret_key=None,
aws_account_name=None,
aws_user_name=None,
hostname=None,
username='root',
password=None,
keypath=None,
credpath=None,
proxy_hostname=None,
proxy_username='root',
proxy_password=None,
proxy_keypath=None,
logger=None,
eucarc_obj=None):
super(AutoCreds, self).__init__(logger=logger)
self._serviceconnection = None
self._clc_ip = hostname
self._clc_machine = None
self._credpath = credpath
self._account_name = aws_account_name
self._user_name = aws_user_name
self.aws_secret_key = aws_secret_key
self.aws_access_key = aws_access_key
self.debug = self.log.debug
self._has_updated_connect_args = False # used to speed up auto find credentials
if (username != 'root' or proxy_username != 'root' or password or keypath or
proxy_hostname or proxy_keypath or proxy_password):
self._has_updated_connect_args = True
self._clc_connect_kwargs = {
'hostname': hostname,
'username': username,
'password': password,
'keypath': keypath,
'proxy_hostname': proxy_hostname,
'proxy_username': proxy_username,
'proxy_password': proxy_password,
'proxy_keypath': proxy_keypath
}
if eucarc_obj:
self.__dict__.update(eucarc_obj.__dict__)
if not eucarc_obj and auto_create:
self.auto_find_credentials()
@property
def clc_machine(self):
if not self._clc_machine:
self._clc_machine = self.connect_to_clc()
return self._clc_machine
@clc_machine.setter
def clc_machine(self, newclc):
self._clc_machine = newclc
@property
def serviceconnection(self):
if not self._serviceconnection:
self._serviceconnection = self._connect_services()
return self._serviceconnection
def _connect_services(self):
if self.aws_secret_key and self.aws_access_key and self._clc_ip:
self._serviceconnection = ServiceConnection(hostname=self._clc_ip,
aws_access_key=self.aws_access_key,
aws_secret_key=self.aws_secret_key)
return self._serviceconnection
def _close_adminpi(self):
"""
If open, Attempts to close/cleanup the AutoCred's serviceconnection obj
"""
if self._serviceconnection:
try:
self._serviceconnection.close()
self._serviceconnection = None
except:
pass
def update_attrs_from_cloud_services(self):
"""
Attempts to update the current eucarc artifacts (service paths) from services
gathered via the Eucalyptus admin api interface
:returns dict mapping eucarc common key-values to the discovered service URIs.
"""
if not self.serviceconnection:
raise RuntimeError('Can not fetch service paths from cloud without an '
'ServiceConnection\n This requires: clc_ip, aws_access_key, '
'aws_secret_key')
path_dict = self._get_service_paths_from_serviceconnection(self.serviceconnection)
if not path_dict.get('ec2_access_key'):
path_dict['ec2_access_key'] = self.aws_access_key
if not path_dict.get('ec2_secret_key'):
path_dict['ec2_secret_key'] = self.aws_secret_key
self.__dict__.update(path_dict)
self._close_adminpi()
return path_dict
@classmethod
def _get_service_paths_from_serviceconnection(cls, serviceconnection):
"""
Reads the Eucalyptus services, maps them to common eucarc key values, and returns
the dict of the mapping.
:params serviceconnection: an ServiceConnection obj w/ active connection.
:returns dict mapping eucarc common key-values to the discovered service URIs.
"""
assert isinstance(serviceconnection, ServiceConnection)
services = serviceconnection.get_services()
ret_dict = {}
for service in services:
for key, serv_value in eucarc_to_service_map.iteritems():
if service.type == serv_value:
ret_dict[key] = str(service.uri)
return ret_dict
def get_local_eucarc(self, credpath):
"""
Reads in eucarc contents from a local file path. Checks to make sure the credpath
given is an existing file, if a dir was provide it will check for a file name 'eucarc'
in that dir.
:params credpath: string representing the path to the eucarc file
:return dict of eucarc read in
"""
if not str(credpath).endswith('eucarc') and os.path.isdir(credpath):
credpath = os.path.join(credpath, 'eucarc')
if os.path.isfile(credpath):
return self._from_filepath(credpath)
return None
def get_remote_eucarc(self, credpath, machine=None):
"""
Reads in eucarc contents from a remote file path on the provided Machine().
Checks to make sure the credpath given is an existing file, if a dir was provide it will
check for a file name 'eucarc' in that dir.
:params credpath: string representing the path to the eucarc file
:params machine: Machine() obj
:returns dict of eucarc read in
"""
machine = machine or self.clc_machine
if not str(credpath).endswith('eucarc') and machine.is_dir(credpath):
credpath = os.path.join(credpath, 'eucarc')
if machine.is_file(credpath):
return self._from_filepath(filepath=credpath, sshconnection=machine._ssh)
return None
def connect_to_clc(self, connect_kwargs=None):
"""
Attempts to create a Machine by connecting to the remote host, usually the CLC.
:params connect_kwargs: Dictionary set of arguments to be provided to Machine().__init__()
returns machine obj upon success
"""
connect_kwargs = connect_kwargs or self._clc_connect_kwargs
machine = Machine(**connect_kwargs)
self.clc_machine = machine
return machine
def assume_role_on_remote_clc(self, machine=None):
machine = machine or self.clc_machine
cred_string = []
out = machine.sys('clcadmin-assume-system-credentials', code=0)
for line in out:
if line:
line = line.strip(';')
line = str(line).replace('127.0.0.1', machine.hostname)
cred_string.append(line)
return self._from_string(string=cred_string)
def auto_find_credentials(self):
"""
Convenience method which attempts to automatically produce credentials based upon the
information provided to this AutoCreds obj.
- If any ssh connect arguments (outside of hostname) are provided then only the remote
machine tried for existing creds at 'self._credpath'.
- If credpath was provided the local file system will first be tried for existing
credentials
- If aws access and secret keys were provided allong with hostname, will attempt to
derivce service credpaths from the Eucalyptus Admin api.
-Finally if a hostname was provided an ssh attempt will be made (with any other
connection kwargs provided)to fetch from the remote system as well.
If password or keypath was not provided, this assumes keys have been sync'd between the
localhost and the remote machine.
Upon the first successful discovery of credentials, the local obj is populated with
eucarc attributes and returns.
"""
def try_local(self):
if self._credpath:
try:
res = self.get_local_eucarc(credpath=self._credpath)
if res:
self.debug('Found local creds at: "{0}"'.format(self._credpath))
return res
except IOError:
pass
def try_serviceconnection(self):
if self.aws_secret_key and self.aws_access_key and self._clc_ip:
self._connect_services()
try:
res = self.update_attrs_from_cloud_services()
if res:
self.debug('Derived creds from serviceconnection')
return res
except RuntimeError as RE:
self.debug('{0}\nFailed to update creds using serviceconnection, err:"{1}"'
.format(get_traceback(), str(RE)))
self._close_adminpi()
def try_assume_admin_on_clc(self):
if not self.aws_secret_key and not self.aws_access_key:
try:
self.assume_role_on_remote_clc()
res = try_serviceconnection(self)
return res
except Exception as AE:
self.debug('{0}\nFailed to update creds using '
'"clcadmin-assume-system-credentials", err:"{1}"'
.format(get_traceback(), str(AE)))
def try_remote(self):
if self._clc_ip and self._credpath:
try:
machine = self.clc_machine or self.connect_to_clc()
if machine:
try:
if not self._keysdir:
self._keysdir = machine.get_abs_path(self._credpath)
except:
pass
res = self.get_remote_eucarc(credpath=self._credpath, machine=machine)
if res:
self.debug('Found remote creds on:"{0}", at path:"{1}"'
.format(self.clc_machine.ssh.host, self._credpath))
return res
except Exception as e:
self.debug("{0}\nFailed to fetch creds remotely, err:'{1}'"
.format(get_traceback(), str(e)))
def try_clc_db(self):
self.debug('trying clc db...')
if self._clc_ip and self.aws_account_name and self.aws_user_name:
machine = self.clc_machine or self.connect_to_clc()
if machine:
try:
res = self.get_existing_keys_from_clc(account=self.aws_account_name,
user=self.aws_user_name,
machine=machine)
try:
# With keys, try filling in remainder with service urls/attributes
# using the admin api interface...
res = self.update_attrs_from_cloud_services()
except:
pass
return res
except RuntimeError as RE:
self.debug('{0}\nFailed to fetch creds from clc db, err:{1}'
.format(get_traceback(), str(RE)))
default_order = [try_local, try_serviceconnection, try_assume_admin_on_clc,
try_remote, try_clc_db]
if self._clc_ip and self._credpath and self._has_updated_connect_args:
# if any ssh related arguements were provided, assume the user would like
# to try remote first
if try_remote(self):
return
default_order.remove(try_remote)
raise ValueError('Could not find "remote" creds with provided information.')
else:
for meth in default_order:
if meth(self):
return
raise ValueError("Could not find path with provided information.")
def get_existing_keys_from_clc(self, account, user, machine=None, eucahome=None, port=8777,
dbuser='eucalyptus', p12file=None, pkfile=None,
passphrase=None, db=None, pargs=None, verbose=False):
ret = {}
db = db or 'eucalyptus_shared'
pargs = pargs or ""
machine = machine or self.clc_machine
passphrase = None or 'eucalyptus'
eucahome = eucahome or EucaHost._get_eucalyptus_home(machine) or '/'
EucaP12File = p12file or os.path.join(eucahome, '/var/lib/eucalyptus/keys/euca.p12')
CloudPKFile = pkfile or os.path.join(eucahome, '/var/lib/eucalyptus/keys/cloud-pk.pem')
cmd = ("echo -n '{0}' | openssl SHA256 -sign {1} | sha256sum"
.format(passphrase, CloudPKFile))
out = machine.sys(cmd, code=0, verbose=verbose)
if out:
dbpass = str(out[0]).split()[0]
dbsel = ("\"select k.auth_access_key_query_id, k.auth_access_key_key, "
"a.auth_account_number, a.auth_account_name, c.auth_certificate_pem "
"from eucalyptus_auth.auth_access_key k "
"join eucalyptus_auth.auth_user u on k.auth_access_key_owning_user=u.id "
"join eucalyptus_auth.auth_cert c on c.auth_certificate_owning_user=u.id "
"join eucalyptus_auth.auth_group_has_users gu on gu.auth_user_id = u.id "
"join eucalyptus_auth.auth_group g on gu.auth_group_id=g.id "
"join eucalyptus_auth.auth_account a on g.auth_group_owning_account=a.id "
"where a.auth_account_name = '{0}' and g.auth_group_name = '{1}'\";"
.format(account, "_" + user))
dbcmd = ('export PGPASSWORD={0}; psql {1} -A -F "," -h 127.0.0.1 -p {2} -U {3} -d {4} '
'-c {5}'.format(dbpass, pargs, port, dbuser, db, dbsel))
qout = machine.sys(dbcmd, code=0, verbose=verbose)
if qout:
try:
names = qout[0].split(',')
values = qout[1].split(',')
ret['AWS_ACCESS_KEY'] = values[names.index('auth_access_key_query_id')]
ret['AWS_SECRET_KEY'] = values[names.index('auth_access_key_key')]
ret['EC2_ACCOUNT_NUMBER'] = values[names.index('auth_account_number')]
ret['EC2_ACCOUNT_NAME'] = values[names.index('auth_account_name')]
ret['CERT'] = values[names.index('auth_certificate_pem')]
self.aws_access_key = ret['AWS_ACCESS_KEY']
self.aws_secret_key = ret['AWS_SECRET_KEY']
self.ec2_user_id = ret['EC2_ACCOUNT_NUMBER']
self.ec2_account_number = ret['EC2_ACCOUNT_NUMBER']
self.ec2_account_name = ret['EC2_ACCOUNT_NAME']
except Exception as PE:
self.log.error('Output:\n{0}\nFailed parsing creds for account:"{0}", '
'user:"{1}".\nLookup commands output:{2}'
.format(account, user, "\n".join(qout), str(PE)))
raise PE
return ret
def create_local_creds(self, local_destdir, machine=None, overwrite=False):
"""
Attempts to create a local set of files containing the current credential artifacts
in this AutoCreds obj. The following files will be written to the provided
'local_destdir' directory:
- A eucarc file containing the "export key=value" syntax to resolve service urls
and the location of any credentials related files.
- Any current attributes with an sftp:// uri will be downloaded to local_destdir. At this
time the AutoCred eucarc attributes will be updated to represent their now local filepath,
an the local eucarc written will also reflect the new location.
:params local_destdir: local directory to write cred files to.
Will create if does not exist.
:params machine: The Machine() obj to download any sftp:// files from
:params overwrite: bool, if True will overwrite any existing items at 'local_destdir'
"""
machine = machine or self.clc_machine
has_sftp_items = False
local_destdir = os.path.abspath(local_destdir)
for key, value in self.get_eucarc_attrs().iteritems():
if re.search('^sftp://', value):
has_sftp_items = True
if has_sftp_items:
if not machine:
if not self._has_updated_connect_args:
self.log.info('Remote machine info has not been provided, '
'skipping remote creds download')
else:
machine = self.connect_to_clc()
self._download_remote_artifacts(local_destdir=local_destdir, machine=machine,
overwrite=overwrite)
self.debug('Finished creating new local creds at: {0}'.format(local_destdir))
# Now write the eucarc file. Any downloaded files should have updated the
# local euarc attrs replacing the sftp uri with a local file path
eucarc_path = os.path.join(local_destdir, 'eucarc')
with open(eucarc_path, 'w') as eucarc:
eucarc.seek(0)
for key, value in self.get_eucarc_attrs().iteritems():
if not re.search('^sftp://', value):
eucarc.write("export {0}={1}\n".format(str(key).upper(), value))
eucarc.flush()
self.debug('Finished creating new local creds at: {0}'.format(local_destdir))
def _download_remote_artifacts(self, local_destdir, machine, sftp_prefix='^sftp://',
overwrite=False):
"""
Attempts to download any eucarc artifacts which current has an sftp:// url.
To see these values use self.show() or self.get_eucarc_attrs() dict.
:params local_destdir: Local directory to download the remote files to
:params machine: remote machine object to download the files from
:params sftp_prefeix: The search criteria for determining which eucarc artifacts
should be downloaded.
:params overwrite: bool, if True will overwrite any existing items at 'local_destdir'
returns the local path (string) items were downloaded to upon success
"""
if not isinstance(machine, Machine):
raise ValueError('_download_remote_artifacts requires Machine() type. Got:"{0}/{1}"'
.format(machine, type(machine)))
if not isinstance(local_destdir, basestring):
raise ValueError('_download_remote_artifacts requires string for local_destdir(). '
'Got:"{0}/{1}"'.format(local_destdir, type(local_destdir)))
if not os.path.exists(local_destdir):
os.makedirs(local_destdir)
else:
if not os.path.isdir(local_destdir):
raise ValueError('Provided local_destdir exists and is not a directory:"{0}"'
.format(local_destdir))
if not overwrite:
raise ValueError('local_destdir exists. set "overwrite=True" to write over '
'existing contents: {0}'.format(local_destdir))
local_destdir = os.path.abspath(local_destdir)
for key, path in self.get_eucarc_attrs().iteritems():
if not key.startswith('_') and re.search(sftp_prefix, str(path)):
urlp = urlparse(path)
if not self.clc_machine.hostname == urlp.hostname:
raise ValueError('sftp uri hostname:{0} does not match current Machines:{1}'
.format(urlp.hostname, machine.hostname))
artifact_name = os.path.basename(urlp.path)
localpath = os.path.join(local_destdir, artifact_name)
machine.sftp.get(remotepath=urlp.path, localpath=localpath)
setattr(self, key, localpath)
self.debug('Wrote: {0} to local:{1}'.format(key, localpath))
return local_destdir
# Todo Clean up the legacy methods below...
def _legacy_create_credentials(self, clc, admin_cred_dir, account, user, zipfile='creds.zip'):
zipfilepath = os.path.join(admin_cred_dir, zipfile)
output = self.credential_exist_on_remote_machine(zipfilepath)
if output['status'] == 0:
self.debug("Found creds file, skipping euca_conf --get-credentials.")
else:
cmd_download_creds = str("{0}/usr/sbin/euca_conf --get-credentials {1}/creds.zip "
"--cred-user {2} --cred-account {3}"
.format(self.eucapath, admin_cred_dir, user, account))
if self.clc.found(cmd_download_creds, "The MySQL server is not responding"):
raise IOError("Error downloading credentials, looks like CLC was not running")
if self.clc.found("unzip -o {0}/creds.zip -d {1}"
.format(admin_cred_dir, admin_cred_dir),
"cannot find zipfile directory"):
raise IOError("Empty ZIP file returned by CLC")
return zipfilepath
def get_active_cert_for_creds(self, credzippath=None, account=None, user=None, update=True,
machine=None):
if credzippath is None:
if hasattr(self, 'cred_zipfile') and self.cred_zipfile:
credzippath = self.cred_zipfile
elif self.credpath:
credzippath = self.credpath
else:
raise ValueError('cred zip file not provided or set for AutoCred obj')
machine = machine or self.clc_machine
account = account or self.account_name
user = user or self.aws_username
admin_cred_dir = os.path.dirname(credzippath)
clc_eucarc = os.path.join(admin_cred_dir, 'eucarc')
# backward compatibility
certpath_in_eucarc = machine.sys(". {0} &>/dev/null && "
"echo $EC2_CERT".format(clc_eucarc))
if certpath_in_eucarc:
certpath_in_eucarc = certpath_in_eucarc[0]
self.debug('Current EC2_CERT path for {0}: {1}'.format(clc_eucarc, certpath_in_eucarc))
if certpath_in_eucarc and self.get_active_id_for_cert(certpath_in_eucarc):
self.debug("Cert/pk already exist and is active in '" +
admin_cred_dir + "/eucarc' file.")
else:
# Try to find existing active cert/key on clc first. Check admin_cred_dir then
# do a recursive search from ssh user's home dir (likely root/)
self.debug('Attempting to find an active cert for this account on the CLC...')
certpaths = (self.find_active_cert_and_key_in_dir(dir=admin_cred_dir) or
self.find_active_cert_and_key_in_dir())
self.debug('Found Active cert and key paths')
if not certpaths:
# No existing and active certs found, create new ones...
self.debug('Could not find any existing active certs on clc, '
'trying to create new ones...')
certpaths = self.create_new_user_certs(admin_cred_dir, account, user)
# Copy cert and key into admin_cred_dir
certpath = certpaths.get('certpath')
keypath = certpaths.get('keypath')
newcertpath = os.path.join(admin_cred_dir, os.path.basename(certpath))
newkeypath = os.path.join(admin_cred_dir, os.path.basename(keypath))
self.debug('Using certpath:{0} and keypath:{1} on clc'
.format(newcertpath, newkeypath))
machine.sys('cp {0} {1}'.format(certpath, newcertpath))
machine.sys('cp {0} {1}'.format(keypath, newkeypath))
# Update the existing eucarc with new cert and key path info...
self.debug("Setting cert/pk in '" + admin_cred_dir + "/eucarc'")
machine.sys("echo 'export EC2_CERT=${EUCA_KEY_DIR}/" + "{0}' >> {1}"
.format(os.path.basename(newcertpath), clc_eucarc))
machine.sys("echo 'export EC2_PRIVATE_KEY=${EUCA_KEY_DIR}/" + "{0}' >> {1}"
.format(os.path.basename(newkeypath), clc_eucarc))
self.debug('updating zip file with new cert, key and eucarc: {0}'
.format(credzippath))
for updatefile in [os.path.basename(newcertpath), os.path.basename(newkeypath),
os.path.basename(clc_eucarc)]:
machine.sys('cd {0} && zip -g {1} {2}'
.format(os.path.dirname(credzippath),
os.path.basename(credzippath),
updatefile), code=0)
return credzippath
def create_new_user_certs(self, admin_cred_dir, account, user, force_cert_create=False,
newcertpath=None, newkeypath=None, machine=None):
machine = machine or self.clc_machine
eucarcpath = os.path.join(admin_cred_dir, 'eucarc')
newcertpath = newcertpath or os.path.join(admin_cred_dir, "euca2-cert.pem")
newkeypath = newkeypath or os.path.join(admin_cred_dir, "/euca2-pk.pem")
# admin_certs = machine.sys("source {0} && /usr/bin/euare-userlistcerts | grep -v Active"
# .format(eucarcpath))
admin_certs = []
for cert in self.get_active_certs():
admin_certs.append(cert.get('certificate_id'))
if len(admin_certs) > 1:
if force_cert_create:
self.debug("Found more than one certs, deleting last cert")
machine.sys(". {0} &>/dev/null && "
"/usr/bin/euare-userdelcert -c {1} --user-name {2}"
.format(eucarcpath,
admin_certs[admin_certs.pop()],
user),
code=0)
else:
raise RuntimeWarning('No active certs were found on the clc, and there are 2'
'certs outstanding. Either delete an existing '
'cert or move and active cert into clc root dir.'
'The option "force_cert_create" will "delete" an existing'
'cert automatically and replace it.'
'Warning: deleting existing certs may leave signed'
'objects in cloud unrecoverable.')
self.debug("Creating a new signing certificate for user '{0}' in account '{1}'."
.format(user, account))
self.debug('New cert name:{0}, keyname:{1}'.format(os.path.basename(newcertpath),
os.path.basename(newkeypath)))
machine.sys(". {0} &>/dev/null && "
"/usr/bin/euare-usercreatecert --user-name {1} --out {2} --keyout {3}"
.format(eucarcpath,
user,
newcertpath,
newkeypath),
code=0)
return {"certpath": newcertpath, "keypath": newkeypath}
def get_active_certs(self):
"""
Query system for active certs list
:returns :list of active cert dicts
"""
if not hasattr(self, 'euare') or not self.euare:
self.critical(self.markup('Cant update certs until euare interface '
'is initialized', 91))
return []
certs = []
resp = self.euare.get_all_signing_certs()
if resp:
cresp = resp.get('list_signing_certificates_response')
if cresp:
lscr = cresp.get('list_signing_certificates_result')
if lscr:
certs = lscr.get('certificates', [])
return certs
def get_active_id_for_cert(self, certpath, machine=None):
"""
Attempt to get the cloud's active id for a certificate at 'certpath' on
the 'machine' filesystem. Also see is_ec2_cert_active() for validating the current
cert in use or the body (string buffer) of a cert.
:param certpath: string representing the certificate path on the machines filesystem
:param machine: Machine obj which certpath exists on
:returns :str() certificate id (if cert is found to be active) else None
"""
if not certpath:
raise ValueError('No ec2 certpath provided or set for eutester obj')
machine = machine or self.clc
self.debug('Verifying cert: "{0}"...'.format(certpath))
body = str("\n".join(machine.sys('cat {0}'.format(certpath), verbose=False))).strip()
certs = []
if body:
certs = self.get_active_certs()
for cert in certs:
if str(cert.get('certificate_body')).strip() == body:
self.debug('verified certificate with id "{0}" is still valid'
.format(cert.get('certificate_id')))
return cert.get('certificate_id')
self.debug('Cert: "{0}" is NOT active'.format(certpath or body))
return None
def find_active_cert_and_key_in_dir(self, dir="", machine=None, recursive=True):
"""
Attempts to find an "active" cert and the matching key files in the provided
directory 'dir' on the provided 'machine' via ssh.
If recursive is enabled, will attempt a recursive search from the provided directory.
:param dir: the base dir to search in on the machine provided
:param machine: a Machine() obj used for ssh search commands
:param recursive: boolean, if set will attempt to search recursively from the dir provided
:returns dict w/ values 'certpath' and 'keypath' or {} if not found.
"""
machine = machine or self.clc_machine
ret_dict = {}
if dir and not dir.endswith("/"):
dir += "/"
if recursive:
rec = "r"
else:
rec = ""
certfiles = machine.sys(
'grep "{0}" -l{1} {2}*.pem'.format('^-*BEGIN CERTIFICATE', rec, dir))
for f in certfiles:
if self.get_active_id_for_cert(f, machine=machine):
dir = os.path.dirname(f)
keypath = self.get_key_for_cert(certpath=f, keydir=dir, machine=machine)
if keypath:
self.debug('Found existing active cert and key on clc: {0}, {1}'
.format(f, keypath))
return {'certpath': f, 'keypath': keypath}
return ret_dict
def get_key_for_cert(self, certpath, keydir, machine=None, recursive=True):
"""
Attempts to find a matching key for cert at 'certpath' in the provided directory 'dir'
on the provided 'machine'.
If recursive is enabled, will attempt a recursive search from the provided directory.
:param dir: the base dir to search in on the machine provided
:param machine: a Machine() obj used for ssh search commands
:param recursive: boolean, if set will attempt to search recursively from the dir provided
:returns string representing the path to the key found or None if not found.
"""
machine = machine or self.clc_machine
self.debug('Looking for key to go with cert...')
if keydir and not keydir.endswith("/"):
keydir += "/"
if recursive:
rec = "r"
else:
rec = ""
certmodmd5 = machine.sys('openssl x509 -noout -modulus -in {0} | md5sum'
.format(certpath))
if certmodmd5:
certmodmd5 = str(certmodmd5[0]).strip()
else:
return None
keyfiles = machine.sys('grep "{0}" -lz{1} {2}*.pem'
.format("^\-*BEGIN RSA PRIVATE KEY.*\n.*END RSA PRIVATE KEY\-*",
rec, keydir))
for kf in keyfiles:
keymodmd5 = machine.sys('openssl rsa -noout -modulus -in {0} | md5sum'.format(kf))
if keymodmd5:
keymodmd5 = str(keymodmd5[0]).strip()
if keymodmd5 == certmodmd5:
self.debug('Found key {0} for cert {1}'.format(kf, certpath))
return kf
return None
def is_ec2_cert_active(self, certbody=None):
"""
Attempts to verify if the current self.ec2_cert @ self.ec2_certpath is still active.
:param certbody
:returns the cert id if found active, otherwise returns None
"""
certbody = certbody or self.ec2_cert
if not certbody:
raise ValueError('No ec2 cert body provided or set for eutester to check for active')
if isinstance(certbody, dict):
checkbody = certbody.get('certificate_body')
if not checkbody:
raise ValueError('Invalid certbody provided, did not have "certificate body" attr')
for cert in self.get_active_certs():
body = str(cert.get('certificate_body')).strip()
if body and body == str(certbody).strip():
return cert.get('certificate_id')
return None
def credential_exist_on_remote_machine(self, cred_path, machine=None):
machine = machine or self.clc_machine
return machine.ssh.cmd("test -e " + cred_path)
def download_creds_from_clc(self, admin_cred_dir, zipfile="creds.zip"):
zipfilepath = os.path.join(admin_cred_dir, zipfile)
self.debug("Downloading credentials from " + self.clc.hostname + ", path:" + zipfilepath +
" to local file: " + str(zipfile))
self.sftp.get(zipfilepath, zipfilepath)
unzip_cmd = "unzip -o {0} -d {1}".format(zipfilepath, admin_cred_dir)
self.debug('Trying unzip cmd: ' + str(unzip_cmd))
self.local(unzip_cmd)
# backward compatibility
cert_exists_in_eucarc = self.found("cat " + admin_cred_dir + "/eucarc", "export EC2_CERT")
if cert_exists_in_eucarc:
self.debug("Cert/pk already exist in '" + admin_cred_dir + "/eucarc' file.")
else:
self.download_certs_from_clc(admin_cred_dir=admin_cred_dir, update_eucarc=True)
def download_certs_from_clc(self, admin_cred_dir=None, update_eucarc=True, machine=None):
machine = machine or self.clc_machine
admin_cred_dir = admin_cred_dir or self.credpath
self.debug("Downloading certs from " + self.clc.hostname + ", path:" +
admin_cred_dir + "/")
clc_eucarc = os.path.join(admin_cred_dir, 'eucarc')
local_eucarc = os.path.join(admin_cred_dir, 'eucarc')
remotecertpath = machine.sys(". {0} &>/dev/null && "
"echo $EC2_CERT".format(clc_eucarc))
if remotecertpath:
remotecertpath = remotecertpath[0]
remotekeypath = machine.sys(". {0} &>/dev/null && "
"echo $EC2_PRIVATE_KEY".format(clc_eucarc))
if remotekeypath:
remotekeypath = remotekeypath[0]
if not remotecertpath or not remotekeypath:
self.critical('CERT and KEY paths not provided in {0}'.format(clc_eucarc))
return {}
localcertpath = os.path.join(admin_cred_dir, os.path.basename(remotecertpath))
localkeypath = os.path.join(admin_cred_dir, os.path.basename(remotekeypath))
self.sftp.get(remotecertpath, localcertpath)
self.sftp.get(remotekeypath, localkeypath)
if update_eucarc:
self.debug("Setting cert/pk in '{0}".format(local_eucarc))
self.local("echo 'export EC2_CERT=${EUCA_KEY_DIR}/" +
str(os.path.basename(localcertpath)) + "' >> " + local_eucarc)
self.local("echo 'export EC2_PRIVATE_KEY=${EUCA_KEY_DIR}/" +
str(os.path.basename(localkeypath)) + "' >> " + local_eucarc)
return {'certpath': localcertpath, 'keypath': localkeypath}
def send_creds_to_machine(self, admin_cred_dir, machine, filename='creds.zip'):
filepath = os.path.join(admin_cred_dir, filename)
self.debug("Sending credentials to " + machine.hostname)
localmd5 = None
remotemd5 = None
try:
machine.sys('ls ' + filepath, code=0)
remotemd5 = self.get_md5_for_file(filepath, machine=machine)
localmd5 = self.get_md5_for_file(filepath)
except CommandExitCodeException:
pass
if not remotemd5 or (remotemd5 != localmd5):
machine.sys("mkdir " + admin_cred_dir)
machine.sftp.put(admin_cred_dir + "/creds.zip", admin_cred_dir + "/creds.zip")
machine.sys("unzip -o " + admin_cred_dir + "/creds.zip -d " + admin_cred_dir)
else:
self.debug("Machine " + machine.hostname + " already has credentials in place not "
" sending")
def setup_local_creds_dir(self, admin_cred_dir):
if not os.path.exists(admin_cred_dir):
os.mkdir(admin_cred_dir)
def setup_remote_creds_dir(self, admin_cred_dir):
self.sys("mkdir " + admin_cred_dir)
| {"/cloud_admin/systemconnection.py": ["/cloud_admin/access/autocreds.py"]} |
64,579 | shubhams/stock-talk | refs/heads/master | /stocktalk/models/user.py | from flask_login import UserMixin
from stocktalk import db
class User(UserMixin, db.Document):
username = db.StringField()
password = db.StringField()
meta = {'collection':'users'}
class UserSymbols(db.Document):
username = db.StringField()
symbols = db.ListField(db.StringField())
meta = {'collection': 'user_symbols'} | {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,580 | shubhams/stock-talk | refs/heads/master | /stocktalk/helpers.py | import requests
from stocktalk import app
from stocktalk.constants.app_constants import ALPHA_VANTAGE_URLS
def get_search_results(keywords):
key_val = {
'function': ALPHA_VANTAGE_URLS.FUNCTIONS.SYMBOL_SEARCH,
'keywords': keywords,
'apikey': app.config['ALPHA_VANTAGE_KEY']
}
r = requests.get(ALPHA_VANTAGE_URLS.BASE_URL, params=key_val)
print(r.json())
return r.json()
def get_time_series(sym, interval='60min'):
key_val = {
'function': ALPHA_VANTAGE_URLS.FUNCTIONS.TIME_SERIES_INTRADAY,
'symbol': sym,
'interval': interval,
'apikey': app.config['ALPHA_VANTAGE_KEY']
}
r = requests.get(ALPHA_VANTAGE_URLS.BASE_URL, params=key_val)
print(r.json())
return r.json() | {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,581 | shubhams/stock-talk | refs/heads/master | /stocktalk/models/forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField
from wtforms.validators import InputRequired, Length, EqualTo
class RegForm(FlaskForm):
username = StringField('username', validators=[InputRequired(), Length(max=30)])
password = PasswordField('password', validators=[InputRequired(),
EqualTo('confirm_password', message='Passwords must match'),
Length(min=8, max=30)])
confirm_password = PasswordField('Repeat Password')
class LoginForm(FlaskForm):
username = StringField('username', validators=[InputRequired(), Length(max=30)])
password = PasswordField('password', validators=[InputRequired(),
Length(min=8, max=30)])
remember = BooleanField('remember') | {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,582 | shubhams/stock-talk | refs/heads/master | /stocktalk/routes.py | from flask import request, redirect, url_for, render_template, json
from flask_login import current_user, login_user, login_required, logout_user
from mongoengine import DoesNotExist
from werkzeug.security import generate_password_hash, check_password_hash
from stocktalk import app, User
from stocktalk.helpers import get_search_results, get_time_series
from stocktalk.models.forms import RegForm, LoginForm
from stocktalk.models.user import UserSymbols
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('dashboard'))
form = LoginForm()
error_message = None
if request.method == 'POST':
if form.validate():
check_user = User.objects(username=form.username.data).first()
if check_password_hash(check_user['password'], form.password.data):
if form.remember.data:
login_user(check_user, remember=True)
else:
login_user(check_user)
return redirect(url_for('dashboard'))
else:
error_message = 'Invalid credentials'
return render_template('login.html', form=form, error=error_message)
@app.route('/register', methods=['GET', 'POST'])
def register():
form = RegForm()
if request.method == 'POST':
if form.validate():
existing_user = User.objects(username=form.username.data).first()
if existing_user is None:
pass_hash = generate_password_hash(form.password.data, method='sha256')
new_user = User(form.username.data, pass_hash).save()
login_user(new_user)
return redirect(url_for('dashboard'))
return render_template('register.html', form=form)
@app.route('/logout', methods=['POST'])
@login_required
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/search', methods=['GET'])
@login_required
def search_symbols():
sym = request.args.get('sym')
results = get_search_results(sym)
symbols_list = list()
for result in results['bestMatches']:
symbol = dict()
symbol['id'] = result['1. symbol']
text = result['2. name']
symbol['text'] = text + ' (' + symbol['id'] + ')'
symbols_list.append(symbol)
symbol_results = dict()
symbol_results['results'] = symbols_list
response = app.response_class(
response=json.dumps(symbol_results),
status=200,
mimetype='application/json'
)
return response
@app.route('/save', methods=['POST'])
@login_required
def save_symbol():
sym = request.form.get('sym')
if sym:
saved_symbols = UserSymbols.objects(username=current_user.username)
if not saved_symbols:
UserSymbols(current_user.username, [sym]).save()
else:
UserSymbols.objects(username=current_user.username).update_one(add_to_set__symbols=[sym])
return "Success!"
return "Failed! Try again!"
@app.route('/remove', methods=['POST'])
@login_required
def remove_symbol():
sym = request.form.get('sym')
if sym:
try:
UserSymbols.objects(username=current_user.username).update_one(pull__symbols=sym)
except DoesNotExist:
return "Failed! Try again!"
return "Failed! Try again!"
@app.route('/timeseries', methods=['GET'])
@login_required
def time_series():
sym = request.args.get('sym')
interval = request.args.get('interval')
if interval:
results = get_time_series(sym, interval)
else:
results = get_time_series(sym)
response = app.response_class(
response=json.dumps(results),
status=200,
mimetype='application/json'
)
return response
@app.route('/dashboard', methods=['GET'])
@login_required
def dashboard():
try:
saved_symbols = UserSymbols.objects.get(username=current_user.username)
return render_template('dashboard.html', symbols=saved_symbols.symbols, username=current_user.username)
except DoesNotExist:
return render_template('dashboard.html', symbols=[], username=current_user.username)
| {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,583 | shubhams/stock-talk | refs/heads/master | /stocktalk/constants/app_constants.py | class APP_CONSTANTS:
CONFIG_FROM_ENV = 'YOURAPPLICATION_SETTINGS'
class DB_CONSTANTS:
USER_COLLECTION = 'users'
class ALPHA_VANTAGE_URLS:
BASE_URL = 'https://www.alphavantage.co/query'
class FUNCTIONS:
SYMBOL_SEARCH = 'SYMBOL_SEARCH'
TIME_SERIES_INTRADAY = 'TIME_SERIES_INTRADAY' | {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,584 | shubhams/stock-talk | refs/heads/master | /stocktalk/__init__.py | from flask import Flask
from flask_mongoengine import MongoEngine
from flask_wtf.csrf import CSRFProtect
from flask_login import LoginManager
from stocktalk.constants.app_constants import APP_CONSTANTS
app = Flask(__name__)
app.config.from_envvar(APP_CONSTANTS.CONFIG_FROM_ENV)
csrf = CSRFProtect()
db = MongoEngine()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'login'
from stocktalk.models.user import User
@login_manager.user_loader
def load_user(user_id):
return User.objects(pk=user_id).first()
csrf.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from stocktalk import routes | {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,585 | shubhams/stock-talk | refs/heads/master | /stocktalk/config/dev.py | MONGODB_SETTINGS = {
'db': 'stock_talk',
'host': 'localhost',
'port': 27017
}
SECRET_KEY = b'XXXXXXXXXXXXXXXXXXXX'
ALPHA_VANTAGE_KEY = 'XXXXXXXXXXXXXXXXXXX' | {"/stocktalk/models/user.py": ["/stocktalk/__init__.py"], "/stocktalk/helpers.py": ["/stocktalk/__init__.py", "/stocktalk/constants/app_constants.py"], "/stocktalk/routes.py": ["/stocktalk/__init__.py", "/stocktalk/helpers.py", "/stocktalk/models/forms.py", "/stocktalk/models/user.py"], "/stocktalk/__init__.py": ["/stocktalk/constants/app_constants.py", "/stocktalk/models/user.py"]} |
64,586 | divyekant/slackCTCommands | refs/heads/master | /errorHandling.py | import Constants
import slackAlert
error_url = ""
def exit_code():
quit()
def setErrorURL(url):
global error_url
error_url = url
def handleError(errorMessage):
if errorMessage == Constants.error_command_not_matched:
slackAlert.simpleMessage(Constants.error_command_not_matched_message, error_url)
exit_code()
elif errorMessage == Constants.error_API_keys_mismatch:
slackAlert.simpleMessage(Constants.error_API_keys_mismatch_message, error_url)
exit_code()
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,587 | divyekant/slackCTCommands | refs/heads/master | /slackAlert.py | import json
import urllib
import requests
def simpleMessagetoChannel(data, url):
payload = {
"response_type": "in_channel",
"text": data
}
headers = {
'Content-Type': "application/json",
}
response = requests.request("POST", url, data=json.dumps(payload), headers=headers)
def simpleMessage(data, url):
payload = {
"text": data
}
headers = {
'Content-Type': "application/json",
}
response = requests.request("POST", url, data=json.dumps(payload), headers=headers)
def messagewithChart(chart, url):
payload = {
"replace_original": True,
"response_type": "in_channel",
"blocks": [
{
"type": "image",
"title": {
"type": "plain_text",
"text": "Trends"
},
"block_id": "quickchart-image",
"image_url": getQuickChartURL(chart),
"alt_text": "Chart showing latest data"
}
]
}
headers = {
'Content-Type': "application/json",
}
response = requests.request("POST", url, data=json.dumps(payload), headers=headers)
def getQuickChartURL(chart):
return "https://quickchart.io/chart?bkg=white&c=" + urllib.quote(json.dumps(chart))
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,588 | divyekant/slackCTCommands | refs/heads/master | /trendsAPI.py | import requests
import time
import Constants
def queryCall(eventName, accid, accpc, dfrom, dto, trendType, uniqueFlag):
url = "https://api.clevertap.com/1/counts/trends.json"
payload = "{\"event_name\":\"%s\",\"from\":%s,\"to\":%s,\"unique\":%s,\"groups\":{\"foo\":{\"trend_type\":\"%s\"}}}" % (
eventName, dfrom, dto, uniqueFlag.lower(), trendType.lower())
headers = {
'X-CleverTap-Account-Id': accid,
'X-CleverTap-Passcode': accpc,
'Content-Type': "application/json",
'cache-control': "no-cache",
'Postman-Token': "687db015-6665-4a5b-9706-69f78b7a03e5"
}
print "Making the Trends Api Call for Event: " + eventName
response = requests.request("POST", url, data=payload, headers=headers)
if response.status_code != 200:
return 0
else:
res = response.json()
if res["status"] == "partial":
reqid = res["req_id"]
return reqid
else:
return 0
def partialCall(reqid, accid, accpc):
url = "https://api.clevertap.com/1/counts/trends.json"
querystring = {"req_id": "%s" % reqid}
payload = ""
headers = {
'X-CleverTap-Account-Id': accid,
'X-CleverTap-Passcode': accpc,
'Content-Type': "application/json",
'cache-control': "no-cache",
'Postman-Token': "21b84377-9fc4-4709-9836-65f13452160d"
}
print "Making the Trends Api Request ID Call"
response = requests.request("GET", url, data=payload, headers=headers, params=querystring)
if response.status_code != 200:
return 0
else:
res = response.json()
if res["status"] == "partial":
return 0
return res
def fetchData(CTAPIObject):
eventName = getEventName(CTAPIObject)
uniqueFlag = getUniqueFlag()
accid = getAccountID(CTAPIObject)
accpcode = getAccountPasscode(CTAPIObject)
dfrom = getFromDate(CTAPIObject)
dto = getToDate(CTAPIObject)
trendType = getTrendType()
if uniqueFlag == "E":
uniqueFlag = "False"
else:
uniqueFlag = "True"
print "Doing for Event: " + eventName
# get req id
reqID = queryCall(eventName, accid, accpcode, dfrom, dto, trendType, uniqueFlag)
while reqID == 0:
time.sleep(5)
reqID = queryCall(eventName, accid, accpcode, dfrom, dto, trendType, uniqueFlag)
retryFlag = True
retryCount = 0
while retryFlag and retryCount <= 10:
res = partialCall(reqID, accid, accpcode)
if res == 0:
retryFlag = True
time.sleep(5)
retryCount = retryCount + 1
else:
retryFlag = False
if retryCount <= 10:
if res["status"] == "success":
data = res["foo"]
else:
print "Non - Success Status returned "
print res
else:
print "Too Many retries for Event: " + eventName
print "Done for Event: " + eventName
return data
def getEventName(data):
return data[Constants.event_name]
def getFromDate(data):
return data[Constants.from_date]
def getToDate(data):
return data[Constants.to_date]
def getAccountID(data):
return data[Constants.account_id]
def getAccountPasscode(data):
return data[Constants.account_passcode]
def getTrendType():
return "daily"
def getUniqueFlag():
return "E"
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,589 | divyekant/slackCTCommands | refs/heads/master | /Constants.py | # All command Match Templates
trend_base_template_with_property3 = "Show me the (.*) of (.*) where (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_property4 = "Show me the (.*) for (.*) where (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_property5 = "Show me (.*) of (.*) where (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_property6 = "Show me (.*) for (.*) where (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_property7 = "Show (.*) of (.*) where (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_property8 = "Show (.*) for (.*) where (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_time3 = "Show me the (.*) of (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_time4 = "Show me the (.*) for (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_time5 = "Show me (.*) of (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_time6 = "Show me (.*) for (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_time7 = "Show (.*) of (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template_with_time8 = "Show (.*) for (.*) in the last (.*) days with (.*) and (.*)"
trend_base_template3 = "Show me the (.*) of (.*) with (.*) and (.*)"
trend_base_template4 = "Show me the (.*) for (.*) with (.*) and (.*)"
trend_base_template5 = "Show me (.*) of (.*) with (.*) and (.*)"
trend_base_template6 = "Show me (.*) for (.*) with (.*) and (.*)"
trend_base_template7 = "Show (.*) of (.*) with (.*) and (.*)"
trend_base_template8 = "Show (.*) for (.*) with (.*) and (.*)"
command_templates = [trend_base_template_with_property3, trend_base_template_with_property4,
trend_base_template_with_property5, trend_base_template_with_property6,
trend_base_template_with_property7, trend_base_template_with_property8,
trend_base_template_with_time3,
trend_base_template_with_time4, trend_base_template_with_time5, trend_base_template_with_time6,
trend_base_template_with_time7, trend_base_template_with_time8, trend_base_template3,
trend_base_template4, trend_base_template5,
trend_base_template6, trend_base_template7, trend_base_template8]
# templatetoken constants
template_replacement_token = "(.*)"
trend_base_template_token_count = trend_base_template3.count(template_replacement_token)
trend_base_template_with_time_token_count = trend_base_template_with_time3.count(template_replacement_token)
trend_base_template_with_property_token_count = trend_base_template_with_property3.count(template_replacement_token)
# ERROR CODES
error_command_not_matched = "command_not_matched"
error_command_not_matched_message = "Oops! the command message does not follow the pattern. Please try /cleverbot help " \
"to know exact commands "
error_API_keys_mismatch = "error_API_keys_mismatch"
error_API_keys_mismatch_message = "Oops! Mandatory keys not provided in the command. Please try /cleverbot help " \
"to know exact commands "
# CommandTypes
trend = "trend"
# Command Keys
event_name = "event_name"
from_date = "from_date"
to_date = "to_date"
user_property_name = "user_property_name"
event_property_name = "event_property_name"
account_id = "account_id"
account_passcode = "account_passcode"
command_type = "command_type"
days = "days"
# Command Structure
trend_command_base_keys = [command_type, event_name, days, account_id, account_passcode]
trend_command_base_keys_with_time = [command_type, event_name, days, account_id, account_passcode]
trend_command_base_keys_with_property = [command_type, event_name, event_property_name, days, account_id,
account_passcode]
# Styling
BLACK_COLOUR_HEX = '#000000'
WHITE_COLOUR_HEX = '#ffffff'
# CT API Query Constants
default_lookback_days = 30
trendsAPI_mandatory_keys = [event_name, account_id, account_passcode]
# Charts defaults
chart_trend_style = {
"fill": False,
"borderColor": 'rgba(25, 108, 248, 1)'
}
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,590 | divyekant/slackCTCommands | refs/heads/master | /commandInterpreter.py | import re
import Constants
def getCommandKeys(text):
templates = Constants.command_templates
isTokenSet = False
for template in templates:
tokens = re.match(template, text)
if tokens is not None:
isTokenSet = True
break
if not isTokenSet:
return Constants.error_command_not_matched
else:
commandStructure = getCommandStructure(tokens)
return generateCommandObject(commandStructure, tokens)
def getCommandStructure(tokens):
cType = tokens.group(1)
if cType.lower() == Constants.trend:
if tokens.lastindex == Constants.trend_base_template_token_count:
return Constants.trend_command_base_keys
elif tokens.lastindex == Constants.trend_base_template_with_property_token_count:
return Constants.trend_command_base_keys_with_property
elif tokens.lastindex == Constants.trend_base_template_with_time_token_count:
return Constants.trend_command_base_keys_with_time
def generateCommandObject(commandStructure, tokens):
commandObject = {}
for i in range(0, len(commandStructure)):
if tokens.group(i + 1) is not None:
commandObject[commandStructure[i]] = tokens.group(i + 1)
else:
commandObject[commandStructure[i]] = None
return commandObject
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,591 | divyekant/slackCTCommands | refs/heads/master | /CTAPIConverter.py | import Constants
import datetime
import errorHandling
def generateObject(commandObject):
CTAPIObject = {}
commandObjectKeys = commandObject.keys()
hasTrendsAPIKeysCheck(commandObjectKeys)
if commandObject[Constants.command_type].lower() == Constants.trend:
for i in range(0, len(commandObjectKeys)):
if commandObjectKeys[i] == Constants.days:
dates = getDatesforCTAPI(commandObject[commandObjectKeys[i]])
CTAPIObject[Constants.to_date] = dates[0]
CTAPIObject[Constants.from_date] = dates[1]
elif commandObject[commandObjectKeys[i]] is None or commandObject[commandObjectKeys[i]] == "":
continue
else:
CTAPIObject[commandObjectKeys[i]] = commandObject[commandObjectKeys[i]]
return CTAPIObject
def getDate(diff=0):
today = datetime.datetime.now()
DD = datetime.timedelta(days=diff)
earlier = today - DD
return earlier.date()
def getDatesforCTAPI(days):
dates = []
today = getDate()
dates.append(today.strftime("%Y%m%d"))
if days is None or days == "":
dates.append(getDate(Constants.default_lookback_days).strftime("%Y%m%d"))
else:
dates.append(getDate(int(days)).strftime("%Y%m%d"))
return dates
def hasTrendsAPIKeysCheck(keys):
check = all(item in keys for item in Constants.trendsAPI_mandatory_keys)
if not check:
errorHandling.handleError(Constants.error_API_keys_mismatch)
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,592 | divyekant/slackCTCommands | refs/heads/master | /chartGenerater.py | import datetime
import Constants
def generateChart(CTData, cType):
chart = {}
if cType.lower() == Constants.trend:
CTData = runDataCleaner(CTData, Constants.trend)
chart = generateLineChart(CTData)
return chart
def generateLineChart(data):
line_data = []
value_data = []
for key in sorted(data):
line_data.append(key)
value_data.append(data[key])
options = {
"scales": {
"xAxes": [{
"gridLines": {
"color": "rgba(0, 0, 0, 0)"
},
"ticks": {
"fontSize": 10
}
}],
"yAxes": [{
"scaleLabel": {
"display": "true",
"labelString": 'Event Count'
},
"ticks": {
"fontSize": 10
}
}]
}
}
chart = {
"type": 'line',
"data": {
"labels": line_data,
"datasets": [{
"label": "Event Trend",
"data": value_data
}]
},
"options": options
}
chart = setChartDataSetStyle(chart, Constants.trend)
return chart
def setChartDataSetStyle(chart, cType):
if cType.lower() == Constants.trend:
style = Constants.chart_trend_style
for key in style:
chart["data"]["datasets"][0][key] = style[key]
return chart
def runDataCleaner(data, cType):
cleanData = {}
if cType.lower() == Constants.trend:
sortedDates = sortDates(data)
for date in sortedDates:
cleanData[date] = data[date]
return cleanData
def sortDates(data):
dates = []
for date in data:
dates.append(date)
# Sort the list in ascending order of dates
dates.sort(key=lambda date: datetime.datetime.strptime(date, '%Y%m%d'))
return dates
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,593 | divyekant/slackCTCommands | refs/heads/master | /lambda_function.py | import CTAPIConverter
import Constants
import chartGenerater
import errorHandling
import trendsAPI
import urllib
import slackAlert
import commandInterpreter
def lambda_handler(event, context):
payload = convertBodytoJSON(event["body"])
response_url = getURL(payload, "response_url")
errorHandling.setErrorURL(response_url)
slackAlert.simpleMessage("Fetching data!", response_url)
command = getCommand(payload)
commandObject = commandInterpreter.getCommandKeys(command)
CTAPIObject = CTAPIConverter.generateObject(commandObject)
CTData = trendsAPI.fetchData(CTAPIObject)
chart = chartGenerater.generateChart(CTData, commandObject[Constants.command_type])
slackAlert.messagewithChart(chart, response_url)
return {
'statusCode': 200
}
def getCommand(data):
t = urllib.unquote(data["text"]).replace("+", " ")
return t
def convertBodytoJSON(body):
data = {}
tokens = body.split("&")
for token in tokens:
kv = token.split("=")
data[kv[0]] = kv[1]
return data
def getURL(payload, key):
return urllib.unquote(payload[key])
| {"/errorHandling.py": ["/Constants.py", "/slackAlert.py"], "/commandInterpreter.py": ["/Constants.py"], "/CTAPIConverter.py": ["/Constants.py", "/errorHandling.py"], "/chartGenerater.py": ["/Constants.py"], "/lambda_function.py": ["/CTAPIConverter.py", "/Constants.py", "/chartGenerater.py", "/errorHandling.py", "/trendsAPI.py", "/slackAlert.py", "/commandInterpreter.py"]} |
64,594 | zavyalovdv/django_hospital | refs/heads/main | /patient/migrations/0007_auto_20210411_2123.py | # Generated by Django 3.0.8 on 2021-04-11 18:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('patient', '0006_auto_20210406_2340'),
]
operations = [
migrations.AlterField(
model_name='historicalpatient',
name='admitted_hospital_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Дата и время поступления в больницу'),
),
migrations.AlterField(
model_name='historicalpatient',
name='severity_disease',
field=models.CharField(blank=True, choices=[('критическое', 'Критическое'), ('Тяжелое', 'Тяжелое'), ('средней тяжести', 'Средней тяжести'), ('среднее', 'Среднее'), ('хорошее', 'Хорошее')], max_length=15, null=True, verbose_name='Текущее состояние'),
),
migrations.AlterField(
model_name='patient',
name='admitted_hospital_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Дата и время поступления в больницу'),
),
migrations.AlterField(
model_name='patient',
name='severity_disease',
field=models.CharField(blank=True, choices=[('критическое', 'Критическое'), ('Тяжелое', 'Тяжелое'), ('средней тяжести', 'Средней тяжести'), ('среднее', 'Среднее'), ('хорошее', 'Хорошее')], max_length=15, null=True, verbose_name='Текущее состояние'),
),
]
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,595 | zavyalovdv/django_hospital | refs/heads/main | /patient/tests/models/test_api.py | from django.urls import reverse
from rest_framework.test import APITestCase
class BooksApitestCase(APITestCase):
def test_get(self):
url = '/api/patients/'
print(url)
response = self.client.get(url)
print(response.data)
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,596 | zavyalovdv/django_hospital | refs/heads/main | /patient/migrations/0004_auto_20210402_0010.py | # Generated by Django 3.0.8 on 2021-04-01 21:10
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('patient', '0003_auto_20210401_2351'),
]
operations = [
migrations.AlterField(
model_name='movementhistory',
name='prev_ward_number',
field=models.CharField(help_text='В формате - 010', max_length=4, null=True, validators=[django.core.validators.RegexValidator(message='Упс... Попробуйте снова', regex='^[0-9]{3}$')], verbose_name='Предыдущая палата'),
),
]
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,597 | zavyalovdv/django_hospital | refs/heads/main | /patient/authentification.py | from django.shortcuts import render, redirect
from django.contrib.auth import login, logout
from .forms import UserLoginForm
def user_login(request):
if request.method == 'POST':
form = UserLoginForm(data=request.POST)
if form.is_valid():
user = form.get_user()
login(request ,user)
return redirect('home')
else:
form = UserLoginForm()
return render(request, template_name='patient/login/login.html', context={'form': form})
def user_logout(request):
logout(request)
return redirect('login')
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,598 | zavyalovdv/django_hospital | refs/heads/main | /patient/admin.py | from django.contrib import admin
from .models import *
class PatientAdmin(admin.ModelAdmin):
list_display = ('social_security_number', 'surname', 'name', 'second_name', 'current_status', 'ward',
'department', 'severity_disease', 'is_discharged')
list_display_links = ('social_security_number', 'surname', 'name', 'second_name')
search_fields = ('social_security_number', 'surname')
list_editable = ('current_status', 'ward', 'severity_disease', 'is_discharged',)
list_filter = ('current_status', 'ward', 'severity_disease', 'is_discharged',)
admin.site.register(Patient, PatientAdmin)
class DoctorAdmin(admin.ModelAdmin):
list_display = ('name', 'second_name', 'surname',
'date_of_birth', 'experience',)
admin.site.register(Doctor, DoctorAdmin)
class DepartmentAdmin(admin.ModelAdmin):
list_display = ('name',)
list_display_links = ('name',)
admin.site.register(Department, DepartmentAdmin)
class WardAdmin(admin.ModelAdmin):
list_display = ('number', 'phone',)
admin.site.register(Ward, WardAdmin)
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,599 | zavyalovdv/django_hospital | refs/heads/main | /patient/const/MODELS_CONST.py | from django.core.validators import RegexValidator
""" ENUM поле для пола пациента """
# UNKNOWN_SEX = '0'
# MAN_SEX = '1'
# WOMAN_SEX = '2'
# NOT_APPLICABLE_SEX = '9'
SEX = [
('неизвестно', 'Неизвестно'),
('мужской', 'Мужской'),
('женский', 'Женский'),
('неприменимо', 'Неприменимо'),
]
""" ENUM поле для примерного возраста пациента """
PRE_AGE = [
('менее 10', 'менее 10'),
('от 10 до 25', 'от 10 до 25'),
('от 25 до 45', 'от 25 до 45'),
('от 25 до 45', 'от 45 до 65'),
('от 65 и более', 'от 65 и более'),
]
""" ENUM поле для примерного роста пациента """
PRE_HEIGHT = [
('менее 170', 'менее 170'),
('от 170 до 185', 'от 170 до 185'),
('от 185 и боле', 'от 185 и более'),
]
""" ENUM поле для цвета волос пациента """
HAIR_COLOR = [
('черные', 'Черные'),
('русые', 'Русые'),
('светлые', 'Светлые'),
('седые', 'Седые'),
('красные', 'Красные'),
('оранжевые', 'Оранжевые'),
('желтые', 'Желтые'),
('зеленые', 'Зеленые'),
('синие', 'Синие'),
('фиолетовые', 'Фиолетовые'),
('розовые', 'Розовые'),
('разноцветные', 'Разноцветные'),
('нет волос', 'Нет волос'),
]
""" Созданние ENUM поля для текущего статуса пациента """
CURRENT_STATUS = [
('в палате', 'В палате'),
('на операции', 'На операции'),
('на процедурах', 'На процедурах'),
('на приеме у врача', 'На приеме у врача'),
('неизвестно', 'Неизвестно'),
]
""" Созданние ENUM поля для способа обращения пациента """
HOW_ADMITTED = [
('направлен из поликлиники', 'Направлен из поликлиники'),
('доставлен на скорой помощи', 'Доставлен на скорой помощи'),
('обратился самостоятельно', 'Обратился самостоятельно'),
('другое', 'Другое'),
]
""" ENUM поле для оценки состояния пациента """
PATIENT_SEVERITY_DISEASE = [
('критическое', 'Критическое'),
('Тяжелое', 'Тяжелое'),
('средней тяжести', 'Средней тяжести'),
('среднее', 'Среднее'),
('хорошее', 'Хорошее'),
]
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,600 | zavyalovdv/django_hospital | refs/heads/main | /patient/migrations/0005_auto_20210402_0011.py | # Generated by Django 3.0.8 on 2021-04-01 21:11
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('patient', '0004_auto_20210402_0010'),
]
operations = [
migrations.AlterField(
model_name='movementhistory',
name='current_ward_number',
field=models.CharField(help_text='В формате - 010', max_length=3, null=True, validators=[django.core.validators.RegexValidator(message='Упс... Попробуйте снова', regex='^[0-9]{3}$')], verbose_name='Текущая палата'),
),
]
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,601 | zavyalovdv/django_hospital | refs/heads/main | /patient/templatetags/patient_tags.py | from django import template
from patient.models import *
register = template.Library()
@register.simple_tag()
def get_patients():
return Patient.objects.all()
@register.simple_tag()
def get_doctors():
return Doctor.objects.all()
@register.simple_tag()
def get_departments():
return Department.objects.all()
@register.simple_tag()
def get_wards():
return Ward.objects.all()
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,602 | zavyalovdv/django_hospital | refs/heads/main | /patient/signals.py | from hospital.models import Patient
from django.dispatch import receiver
from django.signals import pre_save, post_save
@receiver(models.signals.pre_save)
def pre_add_movement_history(instance, sender, *args, **kwargs):
print('PRE_SAVE:')
print(sender)
print(instance)
@receiver(models.signals.post_save)
def post_add_movement_history(instance, sender, *args, **kwargs):
print('POST_SAVE:')
print(sender)
print(instance)
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,603 | zavyalovdv/django_hospital | refs/heads/main | /patient/forms.py | from django import forms
from .models import *
from django.core.exceptions import ValidationError
from django.contrib.auth.forms import AuthenticationForm
import re
class PatientForm(forms.ModelForm):
admitted_hospital_date = forms.DateTimeField(
label='Дата поступления в больницу', input_formats=['%Y-%m-%dT%H:%M'], help_text='в формате d.m.Y, H:M',
widget=forms.DateTimeInput(format='%Y-%m-%dT%H:%M', attrs={'class': 'form-control', 'type': 'datetime-local'}))
change_ward_date = forms.DateTimeField(
label='Дата назначения в палату', input_formats=['%Y-%m-%dT%H:%M'], help_text='в формате d.m.Y, H:M',
widget=forms.DateTimeInput(format='%Y-%m-%dT%H:%M', attrs={'class': 'form-control', 'type': 'datetime-local'}))
class Meta:
model = Patient
fields = [
'social_security_number', 'surname', 'name', 'second_name', 'sex', 'pre_age', 'height', 'hair_color',
'special_signs', 'admitted_hospital_date', 'severity_disease', 'provisional_diagnosis', 'medical_history',
'department', 'doctor', 'ward', 'change_ward_date', 'current_status', 'how_admitted', 'is_discharged',
'discharged_hospital_date', 'cause_discharged']
widgets = {
'social_security_number': forms.TextInput(attrs={'class': 'form-control'}),
'surname': forms.TextInput(attrs={'class': 'form-control'}),
'name': forms.TextInput(attrs={'class': 'form-control'}),
'second_name': forms.TextInput(attrs={'class': 'form-control'}),
'sex': forms.Select(attrs={'class': 'form-control'}),
'pre_age': forms.Select(attrs={'class': 'form-control'}),
'height': forms.Select(attrs={'class': 'form-control'}),
'hair_color': forms.Select(attrs={'class': 'form-control'}),
'special_signs': forms.TextInput(attrs={'class': 'form-control'}),
'admitted_hospital_date': forms.DateTimeInput(attrs={'class': 'form-control', 'type': 'date'}),
'severity_disease': forms.Select(attrs={'class': 'form-control'}),
'provisional_diagnosis': forms.TextInput(attrs={'class': 'form-control'}),
'medical_history': forms.Textarea(attrs={'class': 'form-control'}),
'department': forms.Select(attrs={'class': 'form-control'}),
'doctor': forms.Select(attrs={'class': 'form-control'}),
'ward': forms.Select(attrs={'class': 'form-control'}),
'change_ward_date': forms.DateTimeInput(attrs={'class': 'form-control', 'type': 'date'}),
'current_status': forms.Select(attrs={'class': 'form-control'}),
'how_admitted': forms.Select(attrs={'class': 'form-control'}),
'is_discharged': forms.CheckboxInput(attrs={'class': 'required checkbox'}),
'discharged_hospital_date': forms.DateInput(attrs={'class': 'form-control', 'type': 'date'}),
'cause_discharged': forms.TextInput(attrs={'class': 'form-control'}),
}
def clean_surname(self):
surname = self.cleaned_data['surname']
if re.match(r'\d', surname):
raise ValidationError('Фамилия не может начинаться с цифры')
return surname
class UserLoginForm(AuthenticationForm):
username = forms.CharField(label='Имя пользователя', widget=forms.TextInput(attrs={'class': 'form-control'}))
password = forms.CharField(label='Пароль', widget=forms.PasswordInput(attrs={'class': 'form-control'}))
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,604 | zavyalovdv/django_hospital | refs/heads/main | /patient/migrations/0001_initial.py | # Generated by Django 3.0.8 on 2021-03-31 20:12
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Department',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, unique=True, verbose_name='Отделение')),
],
options={
'verbose_name': 'Отделение',
'verbose_name_plural': 'Отделения',
},
),
migrations.CreateModel(
name='Doctor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Имя')),
('second_name', models.CharField(max_length=100, verbose_name='Отчество')),
('surname', models.CharField(max_length=100, verbose_name='Фамилия')),
('date_of_birth', models.DateField(verbose_name='Дата рождения')),
('experience', models.PositiveSmallIntegerField(verbose_name='Стаж')),
('department', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='department_to_doctor', to='patient.Department', verbose_name='Отделение')),
],
options={
'verbose_name': 'Врач',
'verbose_name_plural': 'Врачи',
'ordering': ['surname'],
},
),
migrations.CreateModel(
name='Ward',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.CharField(help_text='В формате - 010', max_length=3, unique=True, validators=[django.core.validators.RegexValidator(message='Упс... Попробуйте снова', regex='^[0-9]{3}$')], verbose_name='Номер палаты')),
('phone', models.CharField(help_text='В формате - 0010', max_length=4, unique=True, validators=[django.core.validators.RegexValidator(message='Упс... Попробуйте снова', regex='^[0-9]{4}$')], verbose_name='Номер телефона палаты')),
('department', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='patient.Department', verbose_name='Закрепленное отделение')),
],
options={
'verbose_name': 'Палата',
'verbose_name_plural': 'Палаты',
'ordering': ['number'],
},
),
migrations.CreateModel(
name='Patient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('social_security_number', models.CharField(max_length=12, unique=True, validators=[django.core.validators.RegexValidator(regex='^[0-9]{6}$')], verbose_name='Номер страхового полиса')),
('surname', models.CharField(blank=True, max_length=100, verbose_name='Фамилия')),
('name', models.CharField(blank=True, max_length=100, verbose_name='Имя')),
('second_name', models.CharField(blank=True, max_length=100, verbose_name='Отчество')),
('sex', models.CharField(choices=[('неизвестно', 'Неизвестно'), ('мужской', 'Мужской'), ('женский', 'Женский'), ('неприменимо', 'Неприменимо')], max_length=15, verbose_name='Пол')),
('pre_age', models.CharField(choices=[('менее 10', 'менее 10'), ('от 10 до 25', 'от 10 до 25'), ('от 25 до 45', 'от 25 до 45'), ('от 25 до 45', 'от 45 до 65'), ('от 65 и более', 'от 65 и более')], max_length=15, verbose_name='Примерный возраст в годах')),
('height', models.CharField(choices=[('менее 170', 'менее 170'), ('от 170 до 185', 'от 170 до 185'), ('от 185 и боле', 'от 185 и более')], max_length=15, verbose_name='Примерный рост, (см)')),
('hair_color', models.CharField(choices=[('черные', 'Черные'), ('русые', 'Русые'), ('светлые', 'Светлые'), ('седые', 'Седые'), ('красные', 'Красные'), ('оранжевые', 'Оранжевые'), ('желтые', 'Желтые'), ('зеленые', 'Зеленые'), ('синие', 'Синие'), ('фиолетовые', 'Фиолетовые'), ('розовые', 'Розовые'), ('разноцветные', 'Разноцветные'), ('нет волос', 'Нет волос')], max_length=15, verbose_name='Цвет волос')),
('special_signs', models.CharField(max_length=255, verbose_name='Особые приметы')),
('admitted_hospital_date', models.DateTimeField(verbose_name='Дата и время поступления в больницу')),
('severity_disease', models.CharField(choices=[('критическое', 'Критическое'), ('Тяжелое', 'Тяжелое'), ('средней тяжести', 'Средней тяжести'), ('среднее', 'Среднее'), ('хорошее', 'Хорошее')], max_length=15, verbose_name='Текущее состояние')),
('provisional_diagnosis', models.CharField(max_length=255, verbose_name='Предварительный диагноз')),
('medical_history', models.TextField(blank=True, verbose_name='История болезни')),
('change_ward_date', models.DateTimeField(verbose_name='Дата назнечения в палату')),
('current_status', models.CharField(choices=[('в палате', 'В палате'), ('на операции', 'На операции'), ('на процедурах', 'На процедурах'), ('на приеме у врача', 'На приеме у врача'), ('неизвестно', 'Неизвестно')], max_length=30, verbose_name='Текущий статус')),
('how_admitted', models.CharField(choices=[('направлен из поликлиники', 'Направлен из поликлиники'), ('доставлен на скорой помощи', 'Доставлен на скорой помощи'), ('обратился самостоятельно', 'Обратился самостоятельно'), ('другое', 'Другое')], max_length=30, verbose_name='Способ обращения')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания профиля')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Последняя дата редактирования')),
('is_discharged', models.BooleanField(default=False, verbose_name='Пациент выписан')),
('discharged_hospital_date', models.DateField(blank=True, null=True, verbose_name='Дата выписки из больницы')),
('cause_discharged', models.CharField(blank=True, max_length=100, verbose_name='Основание для выписки')),
('department', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='department_to_patient', to='patient.Department', verbose_name='Отделение')),
('doctor', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='patient_to_doctor', to='patient.Doctor', verbose_name='Лечаший врач')),
('ward', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='ward_number_to_patient', to='patient.Ward', verbose_name='Номер палаты')),
],
options={
'verbose_name': 'карточку пациента',
'verbose_name_plural': 'Карточки пациентов',
'ordering': ['-created_at'],
},
),
migrations.CreateModel(
name='MovementHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('current_ward_number', models.CharField(help_text='В формате - 010', max_length=4, unique=True, validators=[django.core.validators.RegexValidator(message='Упс... Попробуйте снова', regex='^[0-9]{3}$')], verbose_name='Текущаяя палата')),
('next_ward_number', models.CharField(help_text='В формате - 010', max_length=3, unique=True, validators=[django.core.validators.RegexValidator(message='Упс... Попробуйте снова', regex='^[0-9]{3}$')], verbose_name='Номер будущей палаты')),
('ward_movement_date', models.DateTimeField(auto_now=True, verbose_name='Дата перемещения')),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='patient.Patient', verbose_name='Пациент')),
],
),
migrations.CreateModel(
name='HistoricalPatient',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('social_security_number', models.CharField(db_index=True, max_length=12, validators=[django.core.validators.RegexValidator(regex='^[0-9]{6}$')], verbose_name='Номер страхового полиса')),
('surname', models.CharField(blank=True, max_length=100, verbose_name='Фамилия')),
('name', models.CharField(blank=True, max_length=100, verbose_name='Имя')),
('second_name', models.CharField(blank=True, max_length=100, verbose_name='Отчество')),
('sex', models.CharField(choices=[('неизвестно', 'Неизвестно'), ('мужской', 'Мужской'), ('женский', 'Женский'), ('неприменимо', 'Неприменимо')], max_length=15, verbose_name='Пол')),
('pre_age', models.CharField(choices=[('менее 10', 'менее 10'), ('от 10 до 25', 'от 10 до 25'), ('от 25 до 45', 'от 25 до 45'), ('от 25 до 45', 'от 45 до 65'), ('от 65 и более', 'от 65 и более')], max_length=15, verbose_name='Примерный возраст в годах')),
('height', models.CharField(choices=[('менее 170', 'менее 170'), ('от 170 до 185', 'от 170 до 185'), ('от 185 и боле', 'от 185 и более')], max_length=15, verbose_name='Примерный рост, (см)')),
('hair_color', models.CharField(choices=[('черные', 'Черные'), ('русые', 'Русые'), ('светлые', 'Светлые'), ('седые', 'Седые'), ('красные', 'Красные'), ('оранжевые', 'Оранжевые'), ('желтые', 'Желтые'), ('зеленые', 'Зеленые'), ('синие', 'Синие'), ('фиолетовые', 'Фиолетовые'), ('розовые', 'Розовые'), ('разноцветные', 'Разноцветные'), ('нет волос', 'Нет волос')], max_length=15, verbose_name='Цвет волос')),
('special_signs', models.CharField(max_length=255, verbose_name='Особые приметы')),
('admitted_hospital_date', models.DateTimeField(verbose_name='Дата и время поступления в больницу')),
('severity_disease', models.CharField(choices=[('критическое', 'Критическое'), ('Тяжелое', 'Тяжелое'), ('средней тяжести', 'Средней тяжести'), ('среднее', 'Среднее'), ('хорошее', 'Хорошее')], max_length=15, verbose_name='Текущее состояние')),
('provisional_diagnosis', models.CharField(max_length=255, verbose_name='Предварительный диагноз')),
('medical_history', models.TextField(blank=True, verbose_name='История болезни')),
('change_ward_date', models.DateTimeField(verbose_name='Дата назнечения в палату')),
('current_status', models.CharField(choices=[('в палате', 'В палате'), ('на операции', 'На операции'), ('на процедурах', 'На процедурах'), ('на приеме у врача', 'На приеме у врача'), ('неизвестно', 'Неизвестно')], max_length=30, verbose_name='Текущий статус')),
('how_admitted', models.CharField(choices=[('направлен из поликлиники', 'Направлен из поликлиники'), ('доставлен на скорой помощи', 'Доставлен на скорой помощи'), ('обратился самостоятельно', 'Обратился самостоятельно'), ('другое', 'Другое')], max_length=30, verbose_name='Способ обращения')),
('created_at', models.DateTimeField(blank=True, editable=False, verbose_name='Дата создания профиля')),
('updated_at', models.DateTimeField(blank=True, editable=False, verbose_name='Последняя дата редактирования')),
('is_discharged', models.BooleanField(default=False, verbose_name='Пациент выписан')),
('discharged_hospital_date', models.DateField(blank=True, null=True, verbose_name='Дата выписки из больницы')),
('cause_discharged', models.CharField(blank=True, max_length=100, verbose_name='Основание для выписки')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('department', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='patient.Department', verbose_name='Отделение')),
('doctor', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='patient.Doctor', verbose_name='Лечаший врач')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('ward', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='patient.Ward', verbose_name='Номер палаты')),
],
options={
'verbose_name': 'historical карточку пациента',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,605 | zavyalovdv/django_hospital | refs/heads/main | /patient/tests/urls/tests.py | import unittest
from django.test import Client
class SimpleLoginTest(unittest.TestCase):
def test_home(self):
client = Client()
response = client.get('/')
self.assertEqual(response.status_code, 200)
def test_admin(self):
client = Client()
response = client.get('/admin')
self.assertEqual(response.status_code, 200)
def test_login(self):
client = Client()
response = client.get('/login')
self.assertEqual(response.status_code, 200)
def test_api_patients(self):
client = Client()
response = client.get('/api/patients')
self.assertEqual(response.status_code, 200)
def test_patients(self):
client = Client()
response = client.get('/patients')
self.assertEqual(response.status_code, 200)
def test_doctors(self):
client = Client()
response = client.get('/doctors')
self.assertEqual(response.status_code, 200)
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,606 | zavyalovdv/django_hospital | refs/heads/main | /patient/api/urls.py | from django.urls import path
from .views import *
urlpatterns = [
path('patients/', APIPatientsListView.as_view(), name='api_patients'),
path('patient/<int:pk>/', APIPatientDetailView.as_view(), name='api_patient'),
path('doctors/', APIDoctorsListView.as_view(), name='api_doctors'),
path('doctor/<int:pk>', APIDoctorDetailView.as_view(), name='api_doctor'),
path('departments/', APIDepartmentsListView.as_view(), name='api_departments'),
path('department/<int:pk>', APIDepartmentDetailView.as_view(), name='api_department'),
path('wards/', APIWardsListView.as_view(), name='api_wards'),
path('ward/<int:pk>/', APIWardDetailView.as_view(), name='api_ward'),
path('create-patient/', APIPatientsListView.as_view(), name='api_create_patient'),
] | {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,607 | zavyalovdv/django_hospital | refs/heads/main | /patient/views.py | from django.shortcuts import render, redirect, HttpResponse, get_object_or_404
from django.views.generic.edit import CreateView, UpdateView
from django.views.generic import ListView, DetailView, DeleteView, FormView
from django.views.generic.detail import SingleObjectMixin
from django.dispatch import receiver
from django.contrib.auth.mixins import LoginRequiredMixin
from patient.models import *
from .forms import PatientForm, UserLoginForm
class HomePage(ListView):
template_name = 'patient/home/home.html'
#Required method
def get_queryset(self):
return HttpResponse('')
class PatientsList(LoginRequiredMixin ,ListView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
paginate_by = 20
model = Patient
template_name = 'patient/patient/patients_list.html'
extra_context = {
'title': 'Список пациентов',
}
allow_empty = False
context_object_name = 'object'
def get_context_data(self, *, object_list=None, **kwargs):
context = super(PatientsList, self).get_context_data(**kwargs)
history = Patient.history.all()
context['title'] = 'Список пациентов'
return context
def get_queryset(self):
return Patient.objects.filter(is_discharged=False).select_related('ward', 'doctor', 'department')
class PatientDetail(LoginRequiredMixin, DetailView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Patient
form_class = PatientForm
template_name = 'patient/patient/patient_detail.html'
context_object_name = 'object'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
patient = Patient.objects.get(pk=self.kwargs['pk'])
movement = MovementHistory.objects.filter(patient=self.kwargs['pk'])
if patient.ward != patient.was_ward:
print(f'Ward: {patient.ward}, Was ward: {patient.was_ward}')
context['patient'] = patient
context['movement'] = movement
return context
class PatientUpdate(LoginRequiredMixin, UpdateView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Patient
form_class = PatientForm
template_name = 'patient/patient/patient_update.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
class PatientCreate(LoginRequiredMixin, CreateView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Patient
form_class = PatientForm
template_name = 'patient/patient/add_patient.html'
class DoctorsList(LoginRequiredMixin, ListView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Doctor
template_name = 'patient/doctor/doctors_list.html'
extra_context = {
'title': 'Список врачей',
}
allow_empty = False
def get_queryset(self):
return Doctor.objects.all().select_related('department')
class DoctorDetail(LoginRequiredMixin, DetailView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Doctor
template_name = 'patient/doctor/doctor.html'
context_object_name = 'doctor'
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(**kwargs)
patient = Patient.objects.filter(doctor=self.kwargs['pk'])
context['patient'] = patient
return context
class DepartmentsList(LoginRequiredMixin, ListView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Department
template_name = 'patient/department/departments_list.html'
extra_context = {
'title': 'Список отделений',
}
allow_empty = False
class DepartmentDetail(LoginRequiredMixin, DetailView):
model = Department
template_name = 'patient/department/department.html'
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(**kwargs)
doctor = Doctor.objects.filter(department=self.kwargs['pk'])
patient = Patient.objects.filter(department=self.kwargs['pk'])
context['patient'] = patient
context['doctor'] = doctor
return context
class WardsList(LoginRequiredMixin, ListView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Ward
template_name = 'patient/ward/wards_list.html'
extra_context = {
'title': 'Список палат',
}
allow_empty = False
def get_queryset(self):
return Ward.objects.all().select_related('department')
class WardDetail(LoginRequiredMixin, DetailView):
model = Ward
template_name = 'patient/ward/ward.html'
context_object_name = 'ward'
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(**kwargs)
patient = Patient.objects.filter(ward=self.kwargs['pk'])
context['patient'] = patient
return context
class HistoryDetail(LoginRequiredMixin, DeleteView):
login_url = '/login/'
redirect_field_name = 'redirect_to'
model = Patient
template_name = 'patient/history/history.html'
context_object_name = 'patient'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
patient_history = MovementHistory.objects.filter(patient=self.kwargs['pk'])
context['patient_history'] = patient_history
return context
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,608 | zavyalovdv/django_hospital | refs/heads/main | /patient/models.py | import logging
from django.db import models
from .const.MODELS_CONST import *
from django.core.validators import RegexValidator
from django.urls import reverse
from simple_history.models import HistoricalRecords
from django.utils import timezone
from django.core.exceptions import ObjectDoesNotExist
logger = logging.getLogger(__name__)
class MovementHistory(models.Model):
prev_ward_number = models.CharField(verbose_name='Предыдущая палата', max_length=4, validators=[
RegexValidator(regex='^[0-9]{3}$', message='Упс... Попробуйте снова')], help_text='В формате - 010', null=True)
current_ward_number = models.CharField(verbose_name='Текущая палата', max_length=3, validators=[
RegexValidator(regex='^[0-9]{3}$', message='Упс... Попробуйте снова')], help_text='В формате - 010', null=True)
patient = models.ForeignKey('Patient', verbose_name='Пациент', on_delete=models.CASCADE, null=True)
ward_movement_date = models.DateTimeField(verbose_name='Дата перемещения', null=True)
def get_absolute_url(self):
return reverse('movementhistory', kwargs={'pk': self.pk})
class Ward(models.Model):
number = models.CharField('Номер палаты', max_length=3, unique=True, validators=[
RegexValidator(regex='^[0-9]{3}$', message='Упс... Попробуйте снова')], help_text='В формате - 010')
phone = models.CharField('Номер телефона палаты', max_length=4, unique=True, validators=[
RegexValidator(regex='^[0-9]{4}$', message='Упс... Попробуйте снова')], help_text='В формате - 0010')
department = models.ForeignKey('Department', verbose_name='Закрепленное отделение', on_delete=models.PROTECT)
class Meta:
verbose_name = 'Палата'
verbose_name_plural = 'Палаты'
ordering = ['number']
def get_absolute_url(self):
return reverse('ward', kwargs={'pk': self.pk})
def __str__(self):
return self.number
class Department(models.Model):
name = models.CharField('Отделение', max_length=100, unique=True)
class Meta:
verbose_name = 'Отделение'
verbose_name_plural = 'Отделения'
def get_absolute_url(self):
return reverse('department', kwargs={'pk': self.pk})
def __str__(self):
return self.name
class Doctor(models.Model):
name = models.CharField('Имя', max_length=100, )
second_name = models.CharField(
'Отчество', max_length=100, )
surname = models.CharField('Фамилия', max_length=100)
date_of_birth = models.DateField('Дата рождения')
experience = models.PositiveSmallIntegerField('Стаж')
department = models.ForeignKey(
Department, verbose_name='Отделение', on_delete=models.PROTECT, null=True, related_name='department_to_doctor')
class Meta:
verbose_name = 'Врач'
verbose_name_plural = 'Врачи'
ordering = ['surname']
def get_absolute_url(self):
return reverse('doctor', kwargs={'pk': self.pk})
def __str__(self):
return '{} {} {}'.format(self.surname, self.name, self.second_name)
class Patient(models.Model):
social_security_number = models.CharField(
'Номер страхового полиса', max_length=12, unique=True, validators=[RegexValidator(regex='^[0-9]{6}$')], )
surname = models.CharField('Фамилия', max_length=100, blank=True)
name = models.CharField('Имя', max_length=100, blank=True)
second_name = models.CharField('Отчество', max_length=100, blank=True)
sex = models.CharField('Пол', max_length=15, choices=SEX)
pre_age = models.CharField('Примерный возраст в годах', max_length=15, choices=PRE_AGE)
height = models.CharField('Примерный рост, (см)', max_length=15, choices=PRE_HEIGHT)
hair_color = models.CharField('Цвет волос', max_length=15, choices=HAIR_COLOR)
special_signs = models.CharField('Особые приметы', max_length=255, )
admitted_hospital_date = models.DateTimeField('Дата и время поступления в больницу', blank=True, null=True)
severity_disease = models.CharField('Текущее состояние', max_length=15, choices=PATIENT_SEVERITY_DISEASE, blank=True, null=True)
provisional_diagnosis = models.CharField('Предварительный диагноз', max_length=255)
medical_history = models.TextField('История болезни', blank=True)
department = models.ForeignKey(
Department, verbose_name='Отделение', on_delete=models.PROTECT, related_name='department_to_patient')
doctor = models.ForeignKey(
Doctor, verbose_name='Лечаший врач', on_delete=models.PROTECT, related_name='patient_to_doctor')
ward = models.ForeignKey(
Ward, verbose_name='Номер палаты', on_delete=models.PROTECT, related_name='ward_number_to_patient')
change_ward_date = models.DateTimeField(verbose_name='Дата назнечения в палату')
current_status = models.CharField('Текущий статус', max_length=30, choices=CURRENT_STATUS, )
how_admitted = models.CharField('Способ обращения', max_length=30, choices=HOW_ADMITTED)
created_at = models.DateTimeField('Дата создания профиля', auto_now_add=True)
updated_at = models.DateTimeField('Последняя дата редактирования', auto_now=True)
is_discharged = models.BooleanField('Пациент выписан', default=False)
discharged_hospital_date = models.DateField('Дата выписки из больницы', blank=True, null=True)
cause_discharged = models.CharField('Основание для выписки', max_length=100, blank=True)
movement_date = models.DateTimeField('Дата перемещения', blank=True, null=True)
history = HistoricalRecords()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
try:
self.was_ward = self.ward
except ObjectDoesNotExist:
logger.warning('Patient, atribute "was_ward" does_not_exist')
logger.warning(self.__dict__)
def get_absolute_url(self):
return reverse('patient', kwargs={'pk': self.pk})
def __str__(self):
return f'{self.surname} {self.name} {self.second_name}'
def save(self, *args, **kwargs):
try:
if self.was_ward:
pass
except ObjectDoesNotExist:
logger.warning('Patient, atribute "was_ward" does_not_exist')
logger.warning(self.__dict__)
else:
if self.ward != self.was_ward:
self.movement_date = timezone.now()
MovementHistory.objects.create(prev_ward_number=self.was_ward, current_ward_number=self.ward,
patient=self, ward_movement_date=self.movement_date)
self.change_ward_date = timezone.now()
finally:
return super().save(*args, **kwargs)
class Meta:
verbose_name = 'Карточка пациента'
verbose_name_plural = 'Карточки пациентов'
ordering = ['-created_at']
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,609 | zavyalovdv/django_hospital | refs/heads/main | /hospital/settings.py | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.getenv('django_hospital_secret_key')
DEBUG = True
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'patient.apps.PatientConfig',
'simple_history',
'debug_toolbar',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'hospital.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hospital.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django_hospital',
'USER': os.getenv('db_username'),
'PASSWORD': os.getenv('db_password'),
'HOST': 'devsrv1.zvproject.ru',
'PORT': '5432',
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y, %H:%M', # '01.01.2020, 04:33'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
]
LANGUAGE_CODE = 'ru'
TIME_ZONE = 'Europe/Moscow'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
#STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
LOGIN_REQUIRED_URLS = (
r'/(.*)$',
)
LOGIN_REQUIRED_URLS_EXCEPTIONS = (
r'/login(.*)$',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
# 'console': {
# 'format': '%(name)-12s %(levelname)-8s %(message)s'
# },
'file': {
'format': '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'
}
},
'handlers': {
# 'console': {
# 'class': 'logging.StreamHandler',
# 'formatter': 'console'
# },
'file': {
'level': 'WARNING',
'class': 'logging.FileHandler',
'formatter': 'file',
'filename': 'warning.log'
}
},
'loggers': {
'': {
'level': 'WARNING',
# 'handlers': ['console', 'file']
'handlers': ['file']
}
}
}
INTERNAL_IPS = [
'127.0.0.1',
]
REDIS_HOST = 'cache'
REDIS_PORT = '6379'
CELERY_BROKER_URL = 'redis://' + REDIS_HOST + ':' + REDIS_PORT + '/0'
BROKER_URL = 'redis://' + REDIS_HOST + ':' + REDIS_PORT + '/0'
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600}
CELERY_RESULT_BACKEND = 'redis://' + REDIS_HOST + ':' + REDIS_PORT + '/0'
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,610 | zavyalovdv/django_hospital | refs/heads/main | /hospital/celery.py | import os
import subprocess
from celery import Celery
from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hospital.settings')
app = Celery("hospital", broker='redis://redis:6379/0')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
@app.task
def test():
print('Celery testing...')
def run_weekly_db_backup():
subprocess.call("patient/tasks/backup/db_backup.sh")
app.conf.beat_schedule = {
'add-test-night-weekly': {
'task': 'tasks.test',
'schedule': crontab(hour=23, minute=0, day_of_week=6),
'args': (),
},
'add-backup-night-weekly': {
'task': 'tasks.run_weekly_db_backup',
'schedule': crontab(hour=0, minute=0, day_of_week=6),
'args': (),
},
}
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
64,611 | zavyalovdv/django_hospital | refs/heads/main | /patient/access.py | import re
from django.conf import settings
from django.contrib.auth.decorators import login_required
class RequireLoginMiddleware(object):
def __init__(self):
self.required = tuple(re.compile(url) for url in settings.LOGIN_REQUIRED_URLS)
self.exceptions = tuple(re.compile(url) for url in settings.LOGIN_REQUIRED_URLS_EXCEPTIONS)
def process_view(self, request, view_func, view_args, view_kwargs):
if request.user.is_authenticated():
return None
for url in self.exceptions:
if url.match(request.path):
return None
for url in self.required:
if url.match(request.path):
return login_required(view_func)(request, *view_args, **view_kwargs)
return None
| {"/patient/authentification.py": ["/patient/forms.py"], "/patient/admin.py": ["/patient/models.py"], "/patient/templatetags/patient_tags.py": ["/patient/models.py"], "/patient/forms.py": ["/patient/models.py"], "/patient/api/urls.py": ["/patient/api/views.py"], "/patient/views.py": ["/patient/models.py", "/patient/forms.py"], "/patient/models.py": ["/patient/const/MODELS_CONST.py"], "/patient/urls.py": ["/patient/views.py", "/patient/authentification.py"], "/patient/api/serializers.py": ["/patient/models.py"], "/patient/api/views.py": ["/patient/models.py", "/patient/api/serializers.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.