index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
16,800 | 044ca78bdc03c5e5553f12af8249c71c04d7a9a7 | #! python3
from pyforms.gui.appmanager import start_app
from pyforms.gui.basewidget import BaseWidget
from pyforms.gui.controls.ControlButton import ControlButton
from pyforms.gui.controls.ControlDir import ControlDir
from pyforms.gui.controls.ControlFile import ControlFile
from pyforms.gui.controls.ControlList import ControlList
from pyforms.gui.controls.ControlText import ControlText
from pyforms.gui.controls.ControlTextArea import ControlTextArea
from git import Repo, InvalidGitRepositoryError, GitCommandError
from AnyQt import QtCore
from AnyQt.QtWidgets import QFileDialog
import os
os.environ['GIT_ASKPASS'] = \
'D:\Andy\Documents\PyGitLatex\pygitlatex_gitaskpass.py'
class MessageWindow(BaseWidget):
def __init__(self, msg, title='PyGitLatex Message'):
super(MessageWindow, self).__init__(title)
self.btnOk = ControlButton('Okay')
self.btnOk.value = self.close
self.set_margin(10)
self.formset = [msg,(' ','btnOk',' ')]
class YesNoDialog(BaseWidget):
def __init__(self, msg, yes_action=None, no_action=None, \
title='PyGitLatex Dialog'):
super(YesNoDialog,self).__init__(title)
self.yes_action = yes_action
self.no_action = no_action
self.btnYes = ControlButton('Yes')
self.btnYes.value = self.yes_clicked
self.btnNo = ControlButton('No')
self.btnNo.value = self.no_clicked
self.set_margin(10)
self.formset = [msg,(' ','btnYes','btnNo',' ')]
def yes_clicked(self):
if self.yes_action:
self.yes_action()
self.close()
def no_clicked(self):
if self.no_action:
self.no_action()
self.close()
class GitAddFiles(BaseWidget):
def __init__(self, repo):
super(GitAddFiles,self).__init__('Git: Add Files')
self.repo = repo
self.lstModFiles = ControlList('Modified Files:')
self.lstModFiles.readonly = True
for item in self.repo.index.diff(None):
self.lstModFiles += [item.a_path]
self.lstNewFiles = ControlList('Untracked Files:')
self.lstNewFiles.readonly = True
for file in self.repo.untracked_files:
self.lstNewFiles += [file]
self.btnAddFiles = ControlButton('Add Files')
self.btnAddFiles.value = self.add_files
self.btnAddAllFiles = ControlButton('Add All Files')
self.btnAddAllFiles.value = self.add_all_files
self.btnCancel = ControlButton('Cancel')
self.btnCancel.value = self.close
self.set_margin(10)
self.formset = ['info:Select files to add to the project.', \
'lstModFiles', \
'lstNewFiles', \
('btnAddAllFiles','btnAddFiles'), \
'btnCancel']
def add_files(self):
files_to_add = [self.lstModFiles.value[x] \
for x in self.lstModFiles.selected_rows_indexes]
files_to_add += [self.lstNewFiles.value[x] \
for x in self.lstNewFiles.selected_rows_indexes]
for file in files_to_add:
self.repo.git.add(file)
self.close()
def add_all_files(self):
for file in self.repo.untracked_files:
self.repo.git.add(file)
for item in self.repo.index.diff(None):
self.repo.git.add(item.a_path)
self.close()
class GitCommit(BaseWidget):
def __init__(self, repo):
super(GitCommit,self).__init__('Git: Commit message')
self.repo = repo
self.txtCommitMsg = ControlText()
self.btnCommit = ControlButton('Commit')
self.btnCommit.value = self.git_commit
self.btnCancel = ControlButton('Cancel')
self.btnCancel.value = self.close
self.set_margin(10)
self.formset = ['info:Input a commit message', \
'txtCommitMsg',('btnCommit','btnCancel')]
def git_commit(self):
if not self.txtCommitMsg.value:
ntwin = MessageWindow('Error: Cannot have an empty commit '\
+ 'message. Please try again.')
ntwin.show()
else:
self.repo.git.commit('-m',self.txtCommitMsg.value)
self.close()
class PyGitLatex(BaseWidget):
def __init__(self):
super(PyGitLatex,self).__init__('PyGitLatex')
# basic data attributes
self.repo = None
self.rgit = None
self.remote_name = 'origin'
self.branch_name = 'master'
self.local_proj_name = None
# define controls
self.dirProjectDir = ControlDir('Project Directory')
self.dirProjectDir.click = self.set_project_dir
self.btnGitStatus = ControlButton('Status')
self.btnGitStatus.value = self.git_status
self.btnGitAdd = ControlButton('Add')
self.btnGitAdd.value = self.git_add
self.btnGitCommit = ControlButton('Commit')
self.btnGitCommit.value = self.git_commit
self.btnGitLog = ControlButton('Log')
self.btnGitLog.value = self.git_log
self.btnGitPull = ControlButton('Pull')
self.btnGitPull.value = self.git_pull
self.btnGitPush = ControlButton('Push')
self.btnGitPush.value = self.git_push
self.txaGitConsole = ControlTextArea('Git Output')
self.txtGitCommand = ControlText('Git Command')
self.txtGitCommand.key_pressed_event = self.check_git_command_event
self.btnGitRun = ControlButton('Run Command')
self.btnGitRun.value = self.parse_git_command
self.btnGitClear = ControlButton('Clear Ouput')
self.btnGitClear.value = self.clear_git_console
self.filTexFile = ControlFile('Latex File')
self.btnTexCompile = ControlButton('Compile')
self.btnTexView = ControlButton('View')
self.btnTexEdit = ControlButton('Edit')
self.btnTexBlame = ControlButton('Blame')
self.btnTexSrcDiff = ControlButton('Source Diff')
self.btnTexPdfDiff = ControlButton('PDF Diff')
self.txaTexConsole = ControlTextArea('Latex Output')
self.txtTexCommand = ControlText('Latex Command')
self.btnTexRun = ControlButton('Run Command')
self.btnTexClear = ControlButton('Clear Ouput')
# set up the layout of the GUI
self.set_margin(10)
self.formset = [ \
'dirProjectDir', \
{'a:Git':
[('btnGitStatus','btnGitAdd','btnGitCommit'), \
('btnGitLog','btnGitPull','btnGitPush'), \
'txaGitConsole', \
'txtGitCommand', \
(' ','btnGitClear','btnGitRun')], \
'b:Latex':
['filTexFile', \
('btnTexCompile','btnTexView','btnTexEdit'), \
('btnTexBlame','btnTexSrcDiff','btnTexPdfDiff'),\
'txaTexConsole', \
'txtTexCommand', \
(' ','btnTexClear','btnTexRun')] \
} \
]
self.mainmenu = [ \
{'File': [{'Initialize Project':self.init_project}, \
{'Clone Project':self.clone_project}, \
'-', \
{'Exit':self.exit_app}] \
} \
]
def set_project_dir(self):
ControlDir.click(self.dirProjectDir)
try:
self.repo = Repo(self.dirProjectDir.value)
self.rgit = self.repo.git
self.local_proj_name = self.dirProjectDir.value.split(os.sep)[-1]
except (InvalidGitRepositoryError):
msg = 'No git repository was detected in this directory, ' \
+ 'would you like to initialize one here?'
title = 'Warning: No git repository detected'
ynwin = YesNoDialog(msg, \
lambda : self.init_project(self.dirProjectDir.value), \
self.no_git_repo_detected, title)
ynwin.show()
def init_project(self, directory=None):
if not directory:
directory = str(QFileDialog.getExistingDirectory())
self.dirProjectDir.value = directory
self.repo = Repo.init(directory)
self.rgit = self.repo.git
self.local_proj_name = directory.split(os.sep)[-1]
def no_git_repo_detected(self):
self.dirProjectDir.value = ''
self.update_git_console(output='Please select a directory with an ' \
+ 'existing project git repository or initialize a new one.' \
+ 'In order to clone an existing project, please use the ' \
+ '"File->Clone Project" command.')
def check_repo(self, level=3):
check = True
# level one check: do we have a project open, if not ask the user to
# open one
if level >= 1 and check:
if self.repo:
check = True
else:
check = False
self.update_git_console(output='Please open a project.')
# level two check: is there a correctly named remote in the repo?
# If not, ask if they want to create one
if level >= 2 and check:
try:
self.repo.remotes[self.remote_name]
check = True
except (IndexError):
check = False
ynwin = YesNoDialog(['No project remote was detected.', \
'Currently looking for a remote called: '\
+ self.remote_name, \
'Would you like to create a new remote repository?', \
'Select no if you want to try and fix the problem ' \
+ ' a different way.'], \
yes_action=self.create_project_remote_rpo, \
title='No Remote Detected')
ynwin.show()
return check
def check_git_failure(self, err):
# check for polished remote url fix... (at the least) this can happen
# on windows when the python and git installations are mixed between
# native windows and cygwin
curr_url = self.repo.remotes[self.remote_name].url
polished_url = self.rgit.polish_url(curr_url)
if curr_url != polished_url:
YesNoDialog('There seems to be a problem with the git repo \n' \
+'git repo set up. Remote URL polishing has been \n' \
+'detected as a possible fix. Would you like to \n' \
+'try? The current remote URL will be replaced by \n' \
+'the polished one. Selecte No to view the git \n' \
+'error instead.\n\n' \
+'Current URL: ' + curr_url + '\n' \
+'Polished URL: ' + polished_url, \
yes_action=self.polish_remote_url,
no_action=MessageWindow(err.stderr).show).show()
return
# unhandled problems
MessageWindow('Unhandled git error detected, details below. \n\n' \
+ err.stderr, \
title='PyGitLatex: Git Error').show()
def clone_project(self):
pass
def create_project_remote_repo(self):
out = QFileDialog.getSaveFileName(self, \
'Choose a bare git repository', \
self.local_proj_name+'.git', \
'Bare git repo (*.git)')
remote_loc = self.rgit.polish_url(out[0])
self.repo.clone(remote_loc, bare=True)
self.rgit.remote('add', self.remote_name, remote_loc)
def polish_remote_url(self):
curr_url = self.repo.remotes[self.remote_name].url
polished_url = self.rgit.polish_url(curr_url)
self.rgit.remote('set-url', self.remote_name, polished_url)
def update_git_console(self, command=None, output=None):
if command is not None:
self.txaGitConsole += '>> ' + command
if output is not None:
self.txaGitConsole += output + '\n'
def clear_git_console(self):
self.txaGitConsole.value = ''
def check_git_command_event(self, event):
if event.key() == QtCore.Qt.Key_Return \
or event.key() == QtCore.Qt.Key_Enter:
self.parse_git_command()
def parse_git_command(self):
if not self.check_repo(1):
return
command = self.txtGitCommand.value
parts = command.split()
if parts[0] != 'git':
self.update_git_console(command, \
'Error: Git command must start with "git".')
else:
try:
out = getattr(self.rgit, parts[1])(*parts[2:])
self.update_git_console(command, out)
except:
self.update_git_console(command, \
"Error: Problem with git command.")
self.txtGitCommand.value = ''
def git_add(self):
if not self.check_repo(1):
return
if not self.repo.untracked_files and not self.repo.is_dirty():
self.update_git_console(command='git add', \
output='No files to add.')
return
gawin = GitAddFiles(self.repo)
gawin.show()
def git_commit(self):
if not self.check_repo(1):
return
if not self.repo.is_dirty():
self.update_git_console(command='git commit',
output='No files to commit. '\
'(Try add first.)')
return
gcwin = GitCommit(self.repo)
gcwin.show()
def git_log(self):
if self.check_repo(1):
self.update_git_console('git log', self.rgit.log())
def git_pull(self):
if not self.check_repo(2):
return
try:
out = self.rgit.pull(self.remote_name, self.branch_name)
self.update_git_console('git pull', out)
except GitCommandError as err:
self.check_git_failure(err)
def git_push(self):
if not self.check_repo(2):
return
try:
out = self.rgit.push(self.remote_name, self.branch_name)
self.update_git_console('git push', out)
except GitCommandError as err:
self.check_git_failure(err)
def git_status(self):
if self.check_repo(1):
self.update_git_console(command='git status', \
output=self.rgit.status())
def exit_app(self):
self.close()
exit()
# run the app if this is executed as a script
if __name__ == "__main__":
start_app(PyGitLatex, geometry=(100,100,620,520) ) |
16,801 | bc0ea9ae8748256fbfab6042a09a67f755addb1d | from flask import Flask, render_template, url_for
from forms import RegistrationForm, LoginForm
app = Flask(__name__)
app.config['SECRET_KEY'] = '41c907f395dc1244c69ab23a40fa363b'
posts = [
{
'author': 'Corey Schafer',
'title': 'Blog Post 1',
'content': 'First post content',
'date_posted': 'April 20, 2018'
},
{
'author': 'Jane Doe',
'title': 'Blog Post 2',
'content': 'Second post content',
'date_posted': 'April 21, 2018'
}
]
@app.route("/")
# cv page
@app.route("/cv")
def cv():
return render_template('cv.html', posts=posts, title="CV")
# project page
@app.route("/projects")
def projects():
return render_template('projects.html', title="Projects")
# blog page
@app.route("/blog")
def blog():
return render_template('blog.html', posts=posts, title="Blog")
# register and login
@app.route("/register")
def register():
form = RegistrationForm()
return render_template('register.html', form = form, title="Register")
@app.route("/login")
def register():
form = LoginForm
return render_template('login.html', form = form, title="Login")
#if __name__ == '__main__':
# app.run(debug=True) |
16,802 | 3f70d7a3d253f4c6fdf992544b82df44fe622345 | from django.apps import AppConfig
class CassetteDjangoConfig(AppConfig):
name = 'cassette_django'
|
16,803 | 92e77c0d3ce942662092d4edb3237220058f068a | import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
from model import createNetwork, createDeconvNetwork, createLeanNetwork
from mesh_optimizer import Optimizer
from prepare_data import prepareData, createBatches, prepare_mask
import tensorflow as tf
import numpy as np
import time
# ==============================================================================
def prepareTrainData(path = ['./data/content/objnet/airplane/test'], ratio=0):
X, Y, faces = prepareData(path, lean=True, fetch_faces=True)
if (ratio > 0 and ratio < 1):
outX = []
outY = []
out_faces = []
for i in np.random.choice(len(X), size=int(ratio*len(X)), replace=False):
outX.append(X[i])
outY.append(Y[i])
out_faces.append(faces[i])
X = outX
Y = outY
faces = out_faces
X = tf.constant(X, shape=[len(X), 1, 400, 400])
return X, Y, faces
# ==============================================================================
def prepareNN(hidden_size=1024, out_verts=162,
learning_rate=0.001,
targ_obj_path='./models/ico_162.obj',
norm_weight=0.1):
net = createLeanNetwork(hidden_size, out_verts)
faces, mask = prepare_mask(targ_obj_path)
optim = Optimizer(net, faces=faces, mask=mask,
learning_rate=learning_rate, norm_weight=0.1)
return net, optim
# ==============================================================================
def runTraining(optim, X, Y, Y_normals,
batch_size=16, min_error=1e-3, min_step=1e-3,
checkpoint_callback=None,
num_epochs=10,
max_repets=10):
batches = createBatches(len(X), batch_size=batch_size)
losses = optim.train(X, Y, Y_normals, batches,
min_error=min_error, min_step=min_step, plot=True,
checkpoint_callback=checkpoint_callback,
num_epochs=num_epochs,
max_repets=max_repets)
return losses
# ==============================================================================
def saveCheckpoint(path = './checkpoints/check', download_callback=None):
return lambda model : { saveModel(model, path),
download_callback(path) }
def saveModel(model, path):
model.save_weights(path)
# ==============================================================================
def updateModel(model, checkpoint_path):
model.load_weights(checkpoint_path) |
16,804 | 27d190eaf8c5427b176e1ffd8da01069e9bea7b8 | from rest_framework import serializers
from main.order.models import Company
from main.api.serializers.reseller.product import ProductSerializer
class CompanySerializer(serializers.ModelSerializer):
username = serializers.SerializerMethodField()
products = serializers.HyperlinkedIdentityField(
view_name = 'api:product_for_company',
lookup_field = 'pk'
)
class Meta:
model = Company
fields = [
'pk',
'products',
'name',
'body',
'logo',
'username',
'size',
'category',
'location',
'created',
'is_deleted',
]
def get_username(self, obj):
return str(obj.user.username)
class CompanyViewSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name = 'api:company_detail_view',
lookup_field = 'pk'
)
username = serializers.SerializerMethodField()
Location = serializers.SerializerMethodField()
class Meta:
model = Company
fields = [
'url',
'pk',
'name',
'body',
'logo',
'user',
'username',
'size',
'category',
'Location',
'created',
'is_deleted',
]
def get_username(self, obj):
return str(obj.user)
def get_Location(self, obj):
return str(obj.location.address)
class CompanyUCViewSerializer(serializers.ModelSerializer):
products = serializers.HyperlinkedIdentityField(
view_name = 'api:product_for_company',
lookup_field = 'pk'
)
class Meta:
model = Company
fields = [
'pk',
'products',
'name',
'body',
'logo',
'size',
'category',
'location',
'created',
'is_deleted',
]
|
16,805 | 93fbedc1d37b35d28f4d9529dc9e56cab2dcd02f | import pygame, sys
import numpy as np
#initialize pygame
pygame.init()
#variables
width= 600
height= 600
line_width= 10
crimson=(220, 20, 60)
line_color= (52, 235, 232)
board_rows=3
board_cols=3
circle_radius= 50
circle_width= 15
circle_color = (153, 102, 255)
square_color = (153, 102, 255)
square_width= 15
space= 55
player= 1
game_over= False
#display parameters
screen= pygame.display.set_mode( (width,height) )
pygame.display.set_caption( 'TIC_TAC_TOE ')
screen.fill( crimson )
board= np.zeros( (board_rows, board_cols))
#draw circles and squares
def draw_figures():
for row in range(board_rows):
for col in range(board_cols):
if board[row][col]== 1:
pygame.draw.circle(screen, circle_color, (int(col * 200 + 100), int(row * 200 + 100)),circle_radius,circle_width)
elif board[row][col]== 2:
pygame.draw.line(screen, square_color, (col*200+space, row*200+200-space), (col*200+200-space, row*200+space), square_width)
pygame.draw.line(screen, square_color, (col*200+space, row*200+space), (col*200+200-space, row*200+200-space), square_width)
#draw the board
def line():
pygame.draw.line( screen, line_color, (400,0), (400,600), line_width )
pygame.draw.line( screen, line_color, (200,0), (200,600), line_width )
pygame.draw.line( screen, line_color, (0,200), (600,200), line_width )
pygame.draw.line( screen, line_color, (0,400), (600,400), line_width )
line()
#mark squares
def mark_squares(rows,cols,player):
board[rows][cols]= player
#check if squares are available
def available_square(row,col):
return board[row][col]== 0
#check if board is full
def isboardfull():
for row in range(board_rows):
for col in range(board_cols):
if board[row][col]== 0:
return False
return True
#check win conditions
def check_win(player):
for col in range(board_cols):
if board[0][col] == player and board[1][col] == player and board[2][col] == player:
vertical_win_line(col, player)
return True
for row in range(board_rows):
if board[row][0] == player and board[row][1] == player and board[row][2] == player:
horizontal_win_line(row, player)
return True
if board[2][0] == player and board[1][1] == player and board[0][2] == player:
diagonal_asc_line(player)
return True
if board[0][0] == player and board[1][1] == player and board[2][2] == player:
diagonal_desc_line(player)
return True
#the winning lines
def vertical_win_line(col,player):
X= col*200 + 100
if player == 1:
color = circle_color
elif player == 2:
color = square_color
pygame.draw.line(screen, color, (X,15), (X, height-15), 15)
def horizontal_win_line(row,player):
Y = row*200 + 100
if player == 1:
color = circle_color
elif player == 2:
color = square_color
pygame.draw.line(screen, color, (15, Y), (width-15, Y), 15)
def diagonal_asc_line(player):
if player == 1:
color = circle_color
elif player == 2:
color = square_color
pygame.draw.line(screen, color, (15, height-15), (width-15, 15), 15)
def diagonal_desc_line(player):
if player == 1:
color = circle_color
elif player == 2:
color = square_color
pygame.draw.line(screen, color, (15, 15), (width-15, height-15), 15)
#press R to restart function
def restart():
screen.fill(crimson)
line()
player = 1
for row in range(board_rows):
for col in range(board_cols):
board[row][col] = 0
#function body for tic tac toe operation
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type== pygame.MOUSEBUTTONDOWN:
mouseX= event.pos[0]
mouseY= event.pos[1]
clicked_row= int(mouseY//200)
clicked_col= int(mouseX//200)
if available_square(clicked_row, clicked_col) and not game_over:
if player == 1:
mark_squares(clicked_row, clicked_col, player)
if check_win(player):
game_over= True
player= 2
draw_figures()
elif player== 2:
mark_squares(clicked_row, clicked_col, player)
if check_win(player):
game_over= True
player= 1
draw_figures()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_r:
restart()
game_over= False
pygame.display.update()
|
16,806 | 1272fc6f4f24ad69ddcc8be9c7749005009c159f | # Copyright (c) Alibaba, Inc. and its affiliates.
import datetime
import os
import os.path as osp
from collections import OrderedDict
import json
import torch
from torch import distributed as dist
from modelscope.metainfo import Hooks
from modelscope.trainers.hooks.builder import HOOKS
from modelscope.trainers.hooks.logger.base import LoggerHook
from modelscope.utils.constant import LogKeys, ModeKeys
from modelscope.utils.json_utils import EnhancedEncoder
from modelscope.utils.torch_utils import get_dist_info, is_master
@HOOKS.register_module(module_name=Hooks.TextLoggerHook)
class TextLoggerHook(LoggerHook):
"""Logger hook in text, Output log to both console and local json file.
Args:
by_epoch (bool, optional): Whether EpochBasedtrainer is used.
Default: True.
interval (int, optional): Logging interval (every k iterations).
It is interval of iterations even by_epoch is true. Default: 10.
ignore_last (bool, optional): Ignore the log of last iterations in each
epoch if less than :attr:`interval`. Default: True.
reset_flag (bool, optional): Whether to clear the output buffer after
logging. Default: False.
out_dir (str): The directory to save log. If is None, use `trainer.work_dir`
"""
def __init__(self,
by_epoch=True,
interval=10,
ignore_last=True,
reset_flag=False,
out_dir=None):
super(TextLoggerHook, self).__init__(interval, ignore_last, reset_flag,
by_epoch)
self.by_epoch = by_epoch
self.time_sec_tot = 0
self.out_dir = out_dir
self._logged_keys = [] # store the key has been logged
def before_run(self, trainer):
super(TextLoggerHook, self).before_run(trainer)
if self.out_dir is None:
self.out_dir = trainer.work_dir
if not osp.exists(self.out_dir) and is_master():
os.makedirs(self.out_dir)
trainer.logger.info('Text logs will be saved to {}'.format(
self.out_dir))
self.start_iter = trainer.iter
self.json_log_path = osp.join(self.out_dir,
'{}.log.json'.format(trainer.timestamp))
if hasattr(trainer, 'meta') and trainer.meta is not None:
self._dump_log(trainer.meta)
def _get_max_memory(self, trainer):
device = getattr(trainer.model, 'output_device', None)
mem = torch.cuda.max_memory_allocated(device=device)
mem_mb = torch.tensor([mem / (1024 * 1024)],
dtype=torch.int,
device=device)
_, world_size = get_dist_info()
if world_size > 1:
dist.reduce(mem_mb, 0, op=dist.ReduceOp.MAX)
return mem_mb.item()
def _log_info(self, log_dict, trainer):
lr_key = LogKeys.LR
epoch_key = LogKeys.EPOCH
iter_key = LogKeys.ITER
mode_key = LogKeys.MODE
iter_time_key = LogKeys.ITER_TIME
data_load_time_key = LogKeys.DATA_LOAD_TIME
eta_key = LogKeys.ETA
if log_dict[mode_key] == ModeKeys.TRAIN:
if isinstance(log_dict[lr_key], dict):
lr_str = []
for k, val in log_dict[lr_key].items():
lr_str.append(f'{lr_key}_{k}: {val:.3e}')
lr_str = ' '.join(lr_str)
else:
lr_str = f'{lr_key}: {log_dict[lr_key]:.3e}'
if self.by_epoch:
log_str = f'{epoch_key} [{log_dict[epoch_key]}][{log_dict[iter_key]}/{trainer.iters_per_epoch}]\t'
else:
log_str = f'{iter_key} [{log_dict[iter_key]}/{trainer.max_iters}]\t'
log_str += f'{lr_str}, '
self._logged_keys.extend([lr_key, mode_key, iter_key, epoch_key])
if iter_time_key in log_dict.keys():
self.time_sec_tot += (log_dict[iter_time_key] * self.interval)
time_sec_avg = self.time_sec_tot / (
trainer.iter - self.start_iter + 1)
eta_sec = time_sec_avg * (trainer.max_iters - trainer.iter - 1)
eta_str = str(datetime.timedelta(seconds=int(eta_sec)))
log_str += f'{eta_key}: {eta_str}, '
log_str += f'{iter_time_key}: {log_dict[iter_time_key]:.3f}, '
log_str += f'{data_load_time_key}: {log_dict[data_load_time_key]:.3f}, '
self._logged_keys.extend([
iter_time_key,
data_load_time_key,
])
else:
# val/test time
# here 1000 is the length of the val dataloader
# by epoch: epoch[val] [4][1000]
# by iter: iter[val] [1000]
if self.by_epoch:
log_str = f'{epoch_key}({log_dict[mode_key]}) [{log_dict[epoch_key]}][{log_dict[iter_key]}]\t'
else:
log_str = f'{iter_key}({log_dict[mode_key]}) [{log_dict[iter_key]}]\t'
self._logged_keys.extend([mode_key, iter_key, epoch_key])
log_items = []
for name, val in log_dict.items():
if name in self._logged_keys:
continue
if isinstance(val, float):
val = f'{val:.4f}'
log_items.append(f'{name}: {val}')
log_str += ', '.join(log_items)
if is_master():
trainer.logger.info(log_str)
def _dump_log(self, log_dict):
# dump log in json format
json_log = OrderedDict()
for k, v in log_dict.items():
json_log[k] = self._round_float(v)
if is_master():
with open(self.json_log_path, 'a+') as f:
json.dump(json_log, f, cls=EnhancedEncoder)
f.write('\n')
def _round_float(self, items, ndigits=5):
if isinstance(items, list):
return [self._round_float(item) for item in items]
elif isinstance(items, float):
return round(items, ndigits)
else:
return items
def log(self, trainer):
cur_iter = self.get_iter(trainer, inner_iter=True)
log_dict = OrderedDict(
mode=trainer.mode, epoch=self.get_epoch(trainer), iter=cur_iter)
# statistic memory
if torch.cuda.is_available():
log_dict[LogKeys.MEMORY] = self._get_max_memory(trainer)
log_dict = dict(log_dict, **trainer.log_buffer.output)
self._log_info(log_dict, trainer)
self._dump_log(log_dict)
return log_dict
|
16,807 | 1d0c3600974853e81eb3795c0475572beda01572 | #################
## imports
import os
import json
import time
from flask import Flask, render_template, redirect, url_for, request, flash
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
################
## config
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('local_settings.py', silent=True)
app.secret_key = 'SECRET_KEY'
Bootstrap(app)
app.config['BOOTSTRAP_SERVE_LOCAL'] = True
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
@app.route('/')
def index():
return render_template("index.html")
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
import project.commutetime
@app.route('/map', methods=['GET', 'POST'])
def map():
userinput = request.form['address'].split('|')
print('userinput = {}'.format(userinput))
work_address = userinput[0]
lowprice, highprice = userinput[-2], userinput[-1]
print(work_address, lowprice, highprice)
records = project.commutetime.comtime(work_address, lowprice, highprice)
print(records)
if records == -1:
flash('invalid input!', 'warning')
return redirect(url_for('index'))
house_locations = [[record[5], record[6]] for record in records]
print(house_locations)
work_location = [records[0][2], records[0][3]]
return render_template('map.html', work_location=work_location, house_locations=house_locations, rows=records)
@app.route("/about")
def about():
return render_template('about.html')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
16,808 | 4da0ad39685cb4af0f0a564e5db959a088fe8a5c | from browser import alert, window, ajax
from javascript import JSON
class MapRenderer:
def __init__(self, center, zoom, get_location_url, camera_view_url):
self.center = center
self.zoom = zoom
# get data
self.leaflet = window.L
self.shapes = {}
self.markers = None
self.user_coord = None
self.user_mark = []
self.get_location_url = get_location_url
self.camera_view_url = camera_view_url
self.openstreet = self.leaflet.tileLayer(
"https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",
{
"maxZoom": 19,
"attribution": """©
<a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors""",
},
)
self.positron = self.leaflet.tileLayer(
"https://{s}.basemaps.cartocdn.com/rastertiles/light_all/{z}/{x}/{y}.png",
{
"attribution": """© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors,
© <a href="http://cartodb.com/attributions">CartoDB</a>""",
"subdomains": "abcd",
"maxZoom": 19,
},
)
self.world_imagery = self.leaflet.tileLayer(
"https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}",
{
"maxZoom": 17,
"attribution": """Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye,
Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community""",
},
)
self.world_topo = self.leaflet.tileLayer(
"""https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}""",
{
"attribution": """Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap,
iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI,
Esri China (Hong Kong), and the GIS User Community"""
},
)
self.base_maps = {
"<b><span style='color: grey'>Positron</span></b>": self.positron,
"<b><span style='color: blue'>OpenStreet Map</span></b>": self.openstreet,
"<b><span style='color: green'>World Map</span></b>": self.world_imagery,
"<b><span style='color: teal'>World Topo Map</span></b>": self.world_topo,
}
self.map = self.leaflet.map(
"mapbrython",
{
"center": self.center,
"zoom": self.zoom,
"layers": self.positron,
"renderer": self.leaflet.canvas({"padding": 0.5}),
"smoothWheelZoom": True, # enable smooth zoom
"smoothSensitivity": 1, # zoom speed. default is 1
},
)
def navi(pos):
self.user_coord = (pos.coords.latitude, pos.coords.longitude)
self.user_mark = (
self.leaflet.marker(
self.user_coord,
{"icon": self.get_icon("my_location"), "zIndexOffset": 1000},
)
.addTo(self.map)
.bindPopup("เธเธณเนเธซเธเนเธเธเธญเธเธเธธเธ")
)
def nonavi(error):
alert("Your browser doesn't support geolocation")
window.navigator.geolocation.getCurrentPosition(
navi, nonavi
) # set user's current location on map(success, error)
self.leaflet.control.layers(self.base_maps).addTo(self.map)
def fly_to_user(self):
self.map.flyTo(self.user_coord, 16)
self.user_mark.openPopup()
def zoom_out(self):
self.map.zoomOut(1)
def zoom_in(self):
self.map.zoomIn(1)
def on_each_feature(self, feature, layer): # feature = layer.feature
def zoom_to_feature(e):
if self.map.getZoom() < 11:
self.map.fitBounds(e.target.getBounds())
def reset_highlight(e):
for key in self.shapes:
if self.shapes[key].hasLayer(layer):
self.shapes[key].resetStyle(e.target)
def highlight_feature(e):
layer = e.target
layer.setStyle(
{"weight": 2, "color": "black", "dashArray": "", "fillOpacity": 0.8}
)
layer.on(
{
"mouseover": highlight_feature,
"mouseout": reset_highlight,
"click": zoom_to_feature,
}
)
def get_icon(self, type="mark_white", size=[35, 35]):
return self.leaflet.icon(
dict(
iconUrl=f"/static/brython/maps/resources/marks/{type}.svg",
iconSize=size,
iconAnchor=[22, 40],
popupAnchor=[0, -30],
)
)
def set_camera_marker(self, data):
layers = []
def on_click_device(camera_id):
window.location.href = f"{self.camera_view_url}&camera_id={camera_id}"
camera_icon = self.get_icon(type="camera_marker", size=[80, 80])
for camera_data in data:
tooltip_detail = f"""<div align="left" style="font-size: 15px;"> <b>{camera_data["name"]}</b>"""
print(camera_data["location"][1])
marker = (
self.leaflet.marker(
[camera_data["location"][1], camera_data["location"][0]],
{
"customId": camera_data["camera_id"],
"icon": camera_icon,
},
)
.bindTooltip(
tooltip_detail,
{"offset": (0, 30), "className": "tooltip-marker"},
)
.addTo(self.map)
.on("click", lambda e: on_click_device(e.sourceTarget.options.customId))
)
layers.append(marker)
self.leaflet.layerGroup(layers).addTo(self.map)
def on_get_data_complete(self, res):
data = JSON.parse(res.text)
self.set_camera_marker(data)
# print(data)
def start(self):
ajax.get(self.get_location_url, oncomplete=self.on_get_data_complete)
# print(self.get_location_url)
|
16,809 | 7233a337ba4237dfe08d47d72a9a3ba9cde36a5e | #Copyright 2010 Thomas A Caswell
#tcaswell@uchicago.edu
#http://jfi.uchicago.edu/~tcaswell
#
#This program is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 3 of the License, or (at
#your option) any later version.
#
#This program is distributed in the hope that it will be useful, but
#WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, see <http://www.gnu.org/licenses>.
from util import cord_pairs
import scipy.odr as sodr
import numpy as np
import scipy.optimize
import general
import plots
def fun_flipper(fun):
def ffun(a,b):
return fun(b,a)
return ffun
###################
# functional forms#
###################
# for g(r)
def fun_decay_exp_inv_gen(r0):
"""Returns C/r exp(- (r-r0)/a) cos(K(r)+phi_0) + 1
evaluated at r. p = (a,K,C,phi_0)"""
def fit_fun(p,r):
return (p[2] / (r-p[3])) * np.exp(-(r-p[3])/p[0] ) * np.sin(p[1]*(r- p[3])) +1
return fit_fun
def fun_decay_exp_inv_dr_gen(r0):
""" d(C/r exp(- (r-r_0)/a) cos(K(r)+phi_0) + b)/dr
evaluated at r. p = (a,K,C,phi_0)"""
def ret_fun(p,r):
return (p[2] /( r-p[3])) * np.exp(-(r-p[3])/p[0]) * np.sin(p[1]*(r - p[3]))*\
-1*(1/p[0] + 1/r + p[1]/ np.tan(p[1] * (r - p[3])))
return ret_fun
def fun_decay_exp_gen(r0):
"""Returns C exp(- r/a) cos(K(r)+phi_0)
evaluated at r. p = (a,K,C,phi_0)"""
def fit_fun(p,r):
return (p[2] ) * np.exp(-(r-r0)/p[0] ) * np.cos(p[1]*r + p[3]) + (r)*p[4] + p[5]
return fit_fun
def fun_decay_inv(p,r):
"""Returns C/r cos(K(r)+phi_0)
evaluated at r. p = (K,C,phi_0)"""
return (p[1]*r0 / r) * np.cos(p[0]*r + p[2])
# for van Hove
def fun_lorentzian(p,r):
"""Returns C/((r/a)^2 +1) evaluated at r, p = (a,C)"""
return p[1] / ((r/p[0])**2 + 1)
def fun_lorentzian_p_gauss(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,C2)"""
return p[1] / ((r/p[0])**2 + 1) + p[2] * np.exp(-((r/p[0])**2)/2)
def fun_lorentzian_t_gauss(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,C2)"""
return p[2] / ((r/p[0])**2 + 1) * np.exp(-((r/p[1])**2)/2)
def fun_gauss(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,a2,C2)"""
return p[1] * np.exp(-((r/p[0])**2))
def fun_exp_p_gauss(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,a2,C2)"""
return p[1] * np.exp(-((r**2/p[0]))) + p[3] * np.exp(-((np.abs(r)/p[2])))
def fun_exp_p_exp(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,a2,C2)"""
return p[1] * np.exp(-((np.abs(r)/p[0]))) + p[3] * np.exp(-((np.abs(r)/p[2])))
def fun_exp_t_gauss(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,a2,C2)"""
return p[2] * np.exp(-((r**2/p[0]))-((np.abs(r)/p[1])))
def fun_gauss_gauss(p,r):
"""Returns C/((r/a)^2 +1) + C_2 exp(-(r/a_2)^2) evaluated at r, p = (a,C,a2,C2)"""
return p[1] * np.exp(-((r/p[0])**2)) + p[3] * np.exp(-((r/p[2])**2))
####################
# fitting functions#
####################
def fit_curve(x,y,p0,func):
"""Fits y = func(x|p) with the initial parameters of p0. func
must be of the form y = func(p,x). uses scipy odr code """
ifixx = np.zeros(np.array(x).shape)
data = sodr.Data(x,y)
model = sodr.Model(func)
worker = sodr.ODR(data,model,p0,ifixx=ifixx,maxit=500)
out = worker.run()
out = worker.restart()
return out
def display_fit(x,y,p,func,fig=None):
"""Displays the raw data and the fit. fig is an plots.tac_figure
object"""
if fig is None:
fig = plots.tac_figure('x','y','fitting')
fig.plot(x,np.log(y),label='data')
fig.plot(x,np.log(func(p,x)),'--x',label=func.__name__ + '('+
','.join(['%.1e'%k for k in p])+ ')')
return fig
#########################
# functions to be killed#
#########################
def fit_tmp_series_gofr2D(res,conn):
'''Takes in a sample name and fits all of the 2D gofrs'''
temps = []
fits = []
for r in res:
gofr = general.get_gofr2D(r[0],conn)
fits.append(fit_gofr2(gofr,2,fun_decay_exp,(2,7.35,1.5,0,0,1)))
try:
temps.append(float(r[2]))
except (ValueError,TypeError ) :
temps.append(25)
return zip(fits,temps)
def fit_quad_to_peak(x,y):
"""Fits a quadratic to the data points handed in """
def quad(B,x):
return B[0] *(x -B[1]) ** 2 + B[2]
beta = (0,np.mean(x),y[val_to_indx(x,np.mean(x))])
data = sodr.Data(x,y)
model = sodr.Model(quad)
worker = sodr.ODR(data,model,beta)
out = worker.run()
## plts.figure()
## plts.plot(x,y)
## plts.plot(x,quad(out.beta,x))
## plts.title(out.beta[1])
return out
def try_fits(dset_key,conn):
"""Try's a couple of fits and plots the results """
# get out the computation number
res = conn.execute("select comp_key from comps where function = 'gofr3D' and dset_key = ?",(dset_key,)).fetchall()
if not len(res) == 1:
raise "die"
# get gofr
gofr = gen.get_gofr3D(res[0][0],conn)
gofr = fitting._trim_gofr(gofr,.2)
# fits
(p_out1_2,cov1_2,err1_2) = fitting.fit_gofr(gofr,2,fitting.fun_decay_exp_inv,(2,7.35,1.5,0,0,0))
(p_out2_2,cov2_2,err2_2) = fitting.fit_gofr(gofr,2,fitting.fun_decay_exp,(1.5,7.35,1.5,0,0,0))
# plots
# check interactive plotting and turn it off
istatus = plt.isinteractive();
print istatus
if istatus:plt.ioff()
leg_hands = []
leg_str = []
fig = plt.figure()
ax = fig.add_axes([.1,.1,.8,.8])
ax.hold(True)
ax.grid(True)
#ax.set_aspect('equal')
leg_hands.append(ax.step(gofr.x,gofr.y-1))
leg_str.append("g(r)")
leg_hands.append(ax.step(gofr.x,fitting.fun_decay_exp_inv(p_out1_2,gofr.x)))
leg_str.append("exp inv 2")
leg_hands.append(ax.step(gofr.x,fitting.fun_decay_exp(p_out2_2,gofr.x)))
leg_str.append("exp 2")
print p_out1_2
print "exp inv 2 err: " + str(err1_2)
print p_out2_2
print "exp 2 err: " + str(err2_2)
ax.legend(leg_hands,leg_str)
ax.set_title('g(r) fitting')
ax.set_xlabel(r' r [$\mu$m]')
ax.set_ylabel('g(r)')
if istatus:
print "displaying figure"
plt.ion()
plt.show()
else:
print "closing figure"
plt.close(fig)
|
16,810 | 239ab87d3065e6d7280a89d313d0ea3f6f81d19c | import random
import numpy as np
from sklearn.metrics import roc_auc_score
import pandas
from tqdm import tqdm
# from tut
import ujson as json
import node2vec
import networkx as nx
from gensim.models import Word2Vec
import logging
import random
import numpy as np
from sklearn.metrics import roc_auc_score
# from tut
def divide_data(input_list, group_number):
local_division = len(input_list) / float(group_number)
random.shuffle(input_list)
return [input_list[int(round(local_division * i)): int(round(local_division * (i + 1)))] for i in
range(group_number)]
def get_G_from_edges(edges):
edge_dict = dict()
# calculate the count for all the edges
for edge in edges:
edge_key = str(edge[0]) + '_' + str(edge[1])
if edge_key not in edge_dict:
edge_dict[edge_key] = 1
else:
edge_dict[edge_key] += 1
tmp_G = nx.DiGraph()
for edge_key in edge_dict:
weight = edge_dict[edge_key]
# add edges to the graph
tmp_G.add_edge(edge_key.split('_')[0], edge_key.split('_')[1])
# add weights for all the edges
tmp_G[edge_key.split('_')[0]][edge_key.split('_')[1]]['weight'] = weight
return tmp_G
def randomly_choose_false_edges(nodes, true_edges):
tmp_list = list()
all_edges = list()
for i in range(len(nodes)):
for j in range(len(nodes)):
all_edges.append((i, j))
random.shuffle(all_edges)
for edge in all_edges:
if edge[0] == edge[1]:
continue
if (nodes[edge[0]], nodes[edge[1]]) not in true_edges and (nodes[edge[1]], nodes[edge[0]]) not in true_edges:
tmp_list.append((nodes[edge[0]], nodes[edge[1]]))
return tmp_list
def get_neighbourhood_score(local_model, node1, node2):
# Provide the plausibility score for a pair of nodes based on your own model.
# from tut
try:
vector1 = local_model.wv.syn0[local_model.wv.index2word.index(node1)]
vector2 = local_model.wv.syn0[local_model.wv.index2word.index(node2)]
return np.dot(vector1, vector2) / (np.linalg.norm(vector1) * np.linalg.norm(vector2))
except:
return random.random()
def get_AUC(model, true_edges, false_edges):
true_list = list()
prediction_list = list()
for edge in true_edges:
tmp_score = get_neighbourhood_score(model, str(edge[0]), str(edge[1]))
true_list.append(1)
prediction_list.append(tmp_score)
for edge in false_edges:
tmp_score = get_neighbourhood_score(model, str(edge[0]), str(edge[1]))
true_list.append(0)
prediction_list.append(tmp_score)
y_true = np.array(true_list)
y_scores = np.array(prediction_list)
return roc_auc_score(y_true, y_scores)
if __name__ == "__main__":
directed = True
p = 1
q = 1
num_walks = 10
walk_length = 80
dimension = 128
window_size = 50
num_workers = 8
iterations = 20
# number_of_groups = 2
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
# Start to load the train data
train_edges = list()
raw_train_data = pandas.read_csv('train.csv')
for i, record in raw_train_data.iterrows():
train_edges.append((str(record['head']), str(record['tail'])))
train_edges = list(set(train_edges))
# from tut
# edges_by_group = divide_data(train_edges, number_of_groups)
print('finish loading the train data.')
# Start to load the valid/test data
valid_positive_edges = list()
valid_negative_edges = list()
raw_valid_data = pandas.read_csv('valid.csv')
for i, record in raw_valid_data.iterrows():
if record['label']:
valid_positive_edges.append((str(record['head']), str(record['tail'])))
else:
valid_negative_edges.append((str(record['head']), str(record['tail'])))
valid_positive_edges = list(set(valid_positive_edges))
valid_negative_edges = list(set(valid_negative_edges))
# Start to load the test data
test_edges = list()
raw_test_data = pandas.read_csv('test.csv')
for i, record in raw_test_data.iterrows():
test_edges.append((str(int(record['head'])), str(int(record['tail']))))
test_edges = list(set(test_edges))
print( test_edges)
print('finish loading the valid/test data.')
print("train: ",len(train_edges),"test: ",len(test_edges),"difference: ",len(set(train_edges).difference(set(test_edges))))
print("train: ",len(train_edges),"valid: ",len(valid_positive_edges+valid_negative_edges),"difference: ",len(set(train_edges).difference(set(valid_positive_edges+valid_negative_edges))))
print("test: ",len(test_edges),"valid: ",len(valid_positive_edges+valid_negative_edges),"difference: ",len(set(test_edges).difference(set(valid_positive_edges+valid_negative_edges))))
cycle=0
while True:
# write code to train the model here
G = node2vec.Graph(get_G_from_edges(train_edges), directed, p, q)
# Calculate the probability for the random walk process
G.preprocess_transition_probs()
# Conduct the random walk process
walks = G.simulate_walks(num_walks, walk_length)
# Train the node embeddings with gensim word2vec package
model = Word2Vec(walks, size=dimension, window=window_size, min_count=0, sg=1, workers=num_workers, iter=iterations)
# Save the resulted embeddings (you can use any format you like)
resulted_embeddings = dict()
# for i, w in enumerate(model.wv.index2word):
# resulted_embeddings[w] = model.wv.syn0[i]
# replace 'your_model' with your own model and use the provided evaluation code to evaluate.
tmp_AUC_score = get_AUC(model, valid_positive_edges, valid_negative_edges)
print('tmp_accuracy:', tmp_AUC_score)
print('end')
# predicting
prediction_list = list()
label_list=list()
for edge in test_edges:
tmp_score = get_neighbourhood_score(model, str(edge[0]), str(edge[1]))
prediction_list.append(tmp_score)
label_list.append(("True","False")[tmp_score<=0.5])
df = raw_test_data
df["score"] = prediction_list
df["label"]= label_list
filename="test_"+str(cycle)+".csv"
df.to_csv(filename, index=False)
with open("acc_record_for_test.txt", "a") as myfile:
myfile.write("acc for "+filename+" is "+str(tmp_AUC_score)+"\n")
cycle+=1 |
16,811 | 71d4a9dc57be7fc542311a0810c631ba7b51ea6d | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import re
import string
with open('1.txt','r',encoding='utf-8') as f:
string = f.read()
def Find(string):
# findall() ๆฅๆพๅน้
ๆญฃๅ่กจ่พพๅผ็ๅญ็ฌฆไธฒ
url = re.findall(r"https://ftopx.com/pic/1920x1080(.+?)\"", string)
new_li1 = list(set(url))
str1='\n'.join(new_li1)
#for x in range(len(url)):
#str11 = 'https://ftopx.com/pic/1920x1080' + str1
return str1
str123 = ( Find(string))
fo = open("666.txt", "w")
fo.write(str123)
fo.close()
# In[ ]:
with open('666.txt','r',encoding='utf-8') as f:
line = f.readline() # ่ฐ็จๆไปถ็ readline()ๆนๆณ
while line:
line = 'https://ftopx.com/pic/1920x1080'+ f.readline()
print(line, end = '') # ๅจ Python 3ไธญไฝฟ็จ
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
|
16,812 | ae95a0bbe55f3f3ce5441437d34dc6be1d8041bb | import pytest
import time
import math
from selenium import webdriver
@pytest.fixture(scope="function")
def browser():
print("\nstart browser for test..")
browser = webdriver.Chrome()
yield browser
print("\nquit browser..")
browser.quit()
@pytest.mark.parametrize('numbers', ["236895", "236896", "236897", "236898", "236899", "236903", "236904", "236905"])
def test_guest_should_see_login_link(browser, numbers):
browser.implicitly_wait(8)
link = f"https://stepik.org/lesson/{numbers}/step/1"
browser.get(link)
#browser.find_element_by_css_selector("#login_link")
time.sleep(2)
input1 = browser.find_element_by_css_selector(".textarea")
#time.sleep(2)
input1.send_keys(str(math.log(int(time.time()))))
#time.sleep(1)
button1 = browser.find_element_by_class_name("submit-submission")
button1.click()
time.sleep(1)
message = browser.find_element_by_css_selector(".smart-hints__hint")
time.sleep(1)
print(message.text)
assert "Correct!" == message.text , "Everything ok!"
|
16,813 | 20b4a709a1ea17515562a3fd0b74859653390a77 | import rospy
from colab_reachy_ros.srv import Speak
def say_something(sentence: str):
rospy.wait_for_service("/speak")
speech_service = rospy.ServiceProxy("/speak", Speak)
speech_service(sentence) |
16,814 | e89cde63736d073818bf363950de6a3a77341c86 |
s = input('่ฏท่พๅ
ฅๆฐๅญ๏ผ')
x = int(s)
print("็ปๆๆฏ:",x*2)
print(1,2,3,4) # 1 2 3 4
print(1, 2, 3, 4, sep=' ')
print(1, 2, 3, 4, sep='#')
print(1, 2, 3, 4, sep=' ', end='\n')
print(5, 6, 7, 8, end='')
print("AAAAAA",end='')
s = input('่ฏท่พๅ
ฅ็ฌฌไธไธชๆฐๅญ๏ผ')
x = int(s)
y = int(input('่ฏท่พๅ
ฅ็ฌฌไบไธชๆฐๅญ๏ผ'))
print('ๅไธบ๏ผ',x + y)
print('็งฏไธบ๏ผ'๏ผx * y)
x = int(input('่ฏท่พๅ
ฅไธไธชๆดๆฐ๏ผ'))
if x % 2 == 1:
print(x,"ๆฏๅฅๆฐ")
else:
print(x,"ๆฏๅถๆฐ")
x = int(input('่ฏท่พๅ
ฅไธไธชๆดๆฐ๏ผ'))
if 50 <= x <= 100 :
print(x,"ๅจ50ไธ100ไน้ด")
else:
print(x,"ไธๅจ50~100ไน้ด")
if x < 0 :
print("ๅฐไบ0")
else:
print("ไธๅฐไบ0")
x = int(input('่ฏท่พๅ
ฅไธไธชๆดๆฐ๏ผ'))
if 50 <= x <= 100 :
print(x,"ๅจ50ไธ100ไน้ด")
else:
print(x,"ไธๅจ50~100ไน้ด")
if x < 0 :
print("ๅฐไบ0")
else:
print("ไธๅฐไบ0")
x = int(input('่ฏท่พๅ
ฅไธไธชๆฐๅญ๏ผ'))
if x > 0:
print(x,'ๆฏๆญฃๆฐ')
elif x < 0:
print(x,'ๆฏ่ดๆฐ')
else:
print(x,'ๆฏ้ถ')
season = int(input('่ฏทๆจ่พๅ
ฅๅญฃๅบฆ(1~4)๏ผ'))
if season == 1:
print("ๆฅๅญฃๆไธ๏ผไบ๏ผไธๆ")
elif season == 2:
print("ๅคๅญฃๆๅ๏ผไบ๏ผๅ
ญๆ")
elif season == 3:
print("็งๅญฃๆไธ๏ผๅ
ซ๏ผไนๆ")
elif season == 4:
print('ๅฌๅญฃๆๅ๏ผๅไธ๏ผๅไบๆ')
else:
print('ๆจ่พ้ไบ')
month = int(input('่ฏทๆจ่พๅ
ฅๆไปฝ(1~12)๏ผ'))
if 1<=month<=3:
print("ๆฅๅญฃ")
elif month<=6:
print('ๅคๅญฃ')
elif month<=9:
print('็งๅญฃ')
elif month<=12:
print('ๅฌๅญฃ')
else:
print('ๆจ่พ้ไบ')
month = int(input('่ฏท่พๅ
ฅๆไปฝ๏ผ๏ผ๏ฝ๏ผ๏ผ๏ผ๏ผ'))
if 1<=month<=12:
print('ๅๆณ็ๆไปฝ')
if month <=3:
print('ๆฅๅญฃ')
elif month <= 6:
print('ๅคๅญฃ')
elif month <= 9:
print('็งๅญฃ')
else month <= 12:
print('ๅฌๅญฃ')
else :
print('ๆจ้ไบ')
money = int(input('่ฏท่พๅ
ฅๅๅๆป้้ข๏ผ'))
pay = money - 20 if money >= 100 else money
print("้ๆฏไป๏ผ",pay,"ๅ
")
x = int(input('่ฏท่พๅ
ฅไธไธชๆฐ๏ผ'))
if x < 0:
result =-x
else:
result = x
print(x,'็็ปๅฏนๅผ๏ผ',result)
result = -x if x < 0 else x
|
16,815 | b707c5bfc810611a5c7cb7fa8e3296937c179ae5 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from rest_framework import status
from django.test import TestCase, Client
from django.urls import reverse
from FHLBuilder import models as db
from FHLBuilder import serializers as ss
from FHLBuilder import collection, choices
# Create your tests here.
# initialize the APIClient app
client = Client()
class SongTestAdd(TestCase):
""" no views, just check database creation """
def setUp(self):
self.user = db.User.objects.create(username='testb',
password='nothing',is_staff=True)
print(self.user)
client.login(username='testb',password='nothing')
a = collection.CollectionMixins()
kind = choices.SONG
tag = 'abc'
path = 'mp3s/Duffy'
album, artist = a.add_members(path,2,kind,tag)
def test1(self):
rf = db.Collection.objects.get(title__icontains='rockferry')
self.assertEqual(rf.title, 'Rockferry')
def test2(self):
rf = db.Song.objects.get(title__icontains='mercy')
self.assertEqual(rf.track,7)
def test3(self):
self.assertEqual(db.Song.objects.count(),10)
class SongTestGet(TestCase):
""" view functions for get """
def setUp(self):
self.user = db.User.objects.create(username='testb',
password='nothing',is_staff=True)
client.login(username='testb',password='nothing')
a = collection.CollectionMixins()
kind = choices.SONG
tag = 'abc'
path = 'mp3s/Duffy'
album, artist = a.add_members(path,2,kind,tag)
def test_get_all_songs(self):
response = client.get(reverse('builder_song_list'))
# get data from db
songs = db.Song.objects.all()
serializer = ss.SongSerializer(songs,many=True)
self.assertEqual(response.data,serializer.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_valid_single_song(self):
rf = db.Song.objects.get(title__icontains='mercy')
self.assertEqual(rf.track,7)
response = client.get(reverse('builder_song_detail',
kwargs={'slug': rf.slug }))
serializer=ss.SongSerializer(rf)
self.assertEqual(response.data,serializer.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_invalid_single_song(self):
response = client.get(reverse('builder_song_detail',
kwargs={'slug': 'no_song_like_this' }))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class SongTestCreate(TestCase):
def setUp(self):
self.user = db.User.objects.create(username='testb',
password='nothing',is_staff=True)
client.login(username='testb',password='nothing')
a = collection.CollectionMixins()
kind = choices.SONG
tag = 'abc'
path = 'mp3s/Duffy'
album, artist = a.add_members(path,2,kind,tag)
rf = db.Song.objects.get(title__icontains='mercy')
self.valid_payload = {
'year': rf.year,
'title': rf.title,
'slug': 'a_slug',
'fileName': rf.fileName,
'track': rf.track,
#'collection': album # todo figure out nested
}
self.invalid_payload = {
'year': rf.year,
'title': rf.title,
'slug': 'b_slug',
'fileName': rf.fileName,
'track': 'a__a__a', # not valid integer
#'collection': album # todo figure out nested
}
self.invalid_payload1 = {
'year': rf.year,
'title': 'title',
'slug': rf.slug, # error not unique
'fileName': rf.fileName,
'track': rf.track,
#'collection': album # todo figure out nested
}
def test_create_valid_song(self):
response = client.post(
reverse('builder_song_list'),
data=json.dumps(self.valid_payload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_invalid_song(self):
response = client.post(
reverse('builder_song_list'),
data=json.dumps(self.invalid_payload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_invalid_song1(self):
response = client.post(
reverse('builder_song_list'),
data=json.dumps(self.invalid_payload1),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class SongTestUpdate(TestCase):
def setUp(self):
self.user = db.User.objects.create(username='testb',
password='nothing',is_staff=True)
client.login(username='testb',password='nothing')
a = collection.CollectionMixins()
kind = choices.SONG
tag = 'abc'
path = 'mp3s/Duffy'
album, artist = a.add_members(path,2,kind,tag)
self.rf = db.Song.objects.get(title__icontains='mercy')
self.valid_payload = {
'year': '1994',
'title': self.rf.title,
'slug': 'a_slug',
'fileName': self.rf.fileName,
'track': self.rf.track,
}
self.invalid_payload = {
'year': '1938',
'title': self.rf.title,
'slug': 'b_slug',
'fileName': self.rf.fileName,
'track': 'a__a__a', # not valid integer
}
def test_update_valid_song(self):
response = client.put(
reverse('builder_song_detail',kwargs={'slug':self.rf.slug}),
data=json.dumps(self.valid_payload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_invalid_song(self):
response = client.put(
reverse('builder_song_detail',kwargs={'slug':self.rf.slug}),
data=json.dumps(self.invalid_payload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class SongTestDelete(TestCase):
def setUp(self):
self.user = db.User.objects.create(username='testb',
password='nothing',is_staff=True)
client.login(username='testb',password='nothing')
a = collection.CollectionMixins()
kind = choices.SONG
tag = 'abc'
path = 'mp3s/Duffy'
album, artist = a.add_members(path,2,kind,tag)
def test_delete_valid_single_song(self):
rf = db.Song.objects.get(title__icontains='mercy')
self.assertEqual(rf.track,7)
response = client.delete(reverse('builder_song_detail',
kwargs={'slug': rf.slug }))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_invalid_single_song(self):
response = client.delete(reverse('builder_song_detail',
kwargs={'slug': 'no_song_like_this' }))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
16,816 | 3e4e1499350cbc156640fc63b55a47f459b95060 | import numpy as np
import cv2 as cv
import matplotlib.pyplot as plt
import random
def sp_noise(image,prob):
output = np.zeros(image.shape, np.uint8)
thres=1-prob
for i in range(image.shape[0]):
for j in range(image.shape[1]):
rdn=random.random()
if rdn<prob:
output[i][j]=0
elif rdn>thres:
output[i][j]=255
else:
output[i][j]=image[i][j]
return output
image = cv.imread('./images/kakashi.jpeg',0)
imag= cv.imread('./images/kakashi.jpeg')
noise_img=sp_noise(image,0.2)
n_img=sp_noise(imag,0.2)
plt.subplot(141),plt.imshow(image,cmap="gray")
plt.title("original Image"),plt.xticks([]),plt.yticks([])
plt.subplot(142),plt.imshow(noise_img,cmap="gray")
plt.title("S&P noise Image"),plt.xticks([]),plt.yticks([])
plt.subplot(143),plt.hist(noise_img.ravel(),256,[0,256])
plt.title("Histogram"),plt.xticks([]),plt.yticks([])
dst = cv.fastNlMeansDenoisingColored(n_img,None,10,10,7,21)
plt.subplot(144), plt.imshow(dst)
plt.show()
|
16,817 | 0084c18006ab4ff8c1f8e33cbc2e67c35482d72c | import os
import pickle
from random import shuffle
from argparse import ArgumentParser
from collections import namedtuple
Data = namedtuple('Data', ['x', 'y', 'pos', 'total'])
def process_command():
parser = ArgumentParser(description="Combine small bins into a large bin.")
parser.add_argument(
'--src', type=str, default=os.path.join(os.curdir, "all_bins"),
help="Path to directory that stores small bins. (default: %(default)s)"
)
parser.add_argument(
'--dst', type=str, default=os.curdir,
help="Path of the output folder. (default: %(default)s)"
)
parser.add_argument(
'--bin_name', type=str, default="tensor.bin",
help="Name of the large bin. (default: %(default)s)"
)
parser.add_argument(
'--shuffle_data', type=bool, default=False,
help="Shuffle data after loaded all data. (default: %(default)s)"
)
return parser.parse_args()
def load_data_from_one_file_path(file_path):
X = []
Y = []
pos = []
total = 0
with open(file_path, "rb") as f:
total = int(pickle.load(f))
X = pickle.load(f)
Y = pickle.load(f)
pos = pickle.load(f)
return Data(x=X, y=Y, pos=pos, total=total)
def load_data_from(directory_path, need_shuffle_file_paths=False):
X = []
Y = []
pos = []
total = 0
file_paths = os.listdir(directory_path)
file_paths.sort()
if need_shuffle_file_paths:
shuffle(file_paths)
absolute_file_paths = []
for file_path in file_paths:
absolute_file_paths.append(os.path.abspath(os.path.join(directory_path, file_path)))
for absolute_file_path in absolute_file_paths:
data = load_data_from_one_file_path(absolute_file_path)
total += data.total
X += data.x
Y += data.y
pos += data.pos
print("[INFO] Data loaded: {}".format(absolute_file_path))
return Data(x=X, y=Y, pos=pos, total=total)
def pickle_dump(obj, file):
return pickle.dump(obj, file, protocol=pickle.HIGHEST_PROTOCOL)
def output_data(dst, data):
print("[INFO] Output: {}".format(os.path.abspath(dst)))
with open(dst, "wb") as f:
pickle_dump(data.total, f)
pickle_dump(data.x, f)
pickle_dump(data.y, f)
pickle_dump(data.pos, f)
def main():
args = process_command()
data = load_data_from(
directory_path=args.src,
need_shuffle_file_paths=args.shuffle_data
)
output_data(
dst=os.path.join(args.dst, args.bin_name),
data=data
)
if __name__ == "__main__":
main()
|
16,818 | bdea0dafdf9a9e828ee73ed046bb0782ddccd267 | from __future__ import division
import random
from deap import creator, base, tools, algorithms
import os
import time
import config
import melody
import write
import miditotxt
import discriminator
def evaluate(individual):
global tim
config.listToConfig(individual)
config.init()
x = 0
for i in range(config.evaluationToAverage):
tim += time.process_time()
melody.generate()
write.write([melody.melody], [melody.rhythm])
write.save()
track = miditotxt.rewrite('output.mid')
tim -= time.process_time()
x += discriminator.evaluate(track)
return x/config.evaluationToAverage,
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", list, fitness=creator.FitnessMax)
toolbox = base.Toolbox()
toolbox.register("attribute", random.random)
toolbox.register("individual", tools.initRepeat, creator.Individual,
toolbox.attribute, n=config.IND_SIZE)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=0.2, indpb=0.1)
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("evaluate", evaluate)
if not os.path.exists("geneticresults/"):
os.makedirs("geneticresults/")
def evolve():
pop = toolbox.population(n=config.geneticPopulationSize)
CXPB, MUTPB, NGEN = config.geneticCXPB, config.geneticMUTPB, config.geneticNGEN
# Evaluate the entire population
fitnesses = map(toolbox.evaluate, pop)
for ind, fit in zip(pop, fitnesses):
ind.fitness.values = fit
for g in range(NGEN):
# Select the next generation individuals
offspring = toolbox.select(pop, len(pop))
# Clone the selected individuals
offspring = list(map(toolbox.clone, offspring))
# Apply crossover and mutation on the offspring
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < CXPB:
toolbox.mate(child1, child2)
del child1.fitness.values
del child2.fitness.values
for mutant in offspring:
if random.random() < MUTPB:
toolbox.mutate(mutant)
del mutant.fitness.values
# Make sure everyone is in boundaries
for i in range(len(offspring)):
for j in range(len(offspring[i])):
offspring[i][j] = min(1.0, max(0.0, offspring[i][j]))
# Evaluate the individuals with an invalid fitness
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
fitnesses = map(toolbox.evaluate, invalid_ind)
for ind, fit in zip(invalid_ind, fitnesses):
ind.fitness.values = fit
# The population is entirely replaced by the offspring
pop[:] = offspring
return pop
def autorun():
global tim
tim -= time.process_time()
pop = evolve()
maximum = -1
maxIndividual = []
for i in pop:
if maximum < evaluate(i)[0]:
maximum = evaluate(i)[0]
maxIndividual = i
print(maximum)
f = open("geneticresults/%d.txt" % len([name for name in os.listdir('geneticresults/')
if os.path.isfile('geneticresults/' + name)]), "w+")
for i in maxIndividual:
f.write("%f\n" % i)
config.listToConfig(maxIndividual)
tim += time.process_time()
def load(i):
indidvidual = []
if i < 0:
f = open("geneticresults/%d.txt" % (len([name for name in os.listdir('geneticresults/')
if os.path.isfile('geneticresults/' + name)]) + i), "r")
else:
f = open("geneticresults/%d.txt" % i, "r")
indidvidual = list(map(float, f.read().splitlines()))
config.listToConfig(indidvidual)
|
16,819 | 800b3ebe53323ec3c08f0bbc4da66d9c674dea73 | from fastapi import FastAPI
from fastapi_utils.openapi import simplify_operation_ids
from pydantic import BaseModel, Field
from typing import Optional
from enum import Enum
from domain.security.command import login
app = FastAPI()
class Result(BaseModel):
status: str
class Empty(BaseModel):
pass
class AddTopicResultType(str, Enum):
TOPIC_ADDED = "TOPIC_ADDED"
DUPLICATE_TOPIC_ERROR = "DUPLICATE_TOPIC_ERROR"
class TopicAdded(BaseModel):
id: str
class AddTopicResponse(BaseModel):
result_type: AddTopicResultType
TOPIC_ADDED: Optional[TopicAdded]
DUPLICATE_TOPIC_ERROR: Optional[Empty]
app.include_router(login.router)
#@app.post(
# "/api/inventory/command/get_available_topics",
# tags=["inventory"]
#)
#async def getAvailableTopics():
# pass
#
#@app.post(
# "/api/identity/command/authenticate",
# tags=["identity"]
#)
#async def authenticate():
# pass
#
#@app.post(
# "/api/access/query/get_actions",
# tags=["access"]
#)
#async def getActions():
# pass
#
#@app.post(
# "/api/topic/command/add_topic",
# description="".join([
# "this is a description. this is a description. this is a description. this is a description. this is a description. ",
# "<br />",
# "this is a description. this is a description. this is a description. this is a description. this is a description. ",
# "<br />",
# "this is a description. this is a description. this is a description. this is a description. this is a description. "
# ]),
# tags=["topic"],
# response_model=AddTopicResponse
#)
#async def addTopic():
# pass
#
#@app.post(
# "/api/topic/command/rename_topic",
# tags=["topic"]
#)
#async def renameTopic():
# pass
#
#@app.post(
# "/api/topic/command/remove_topic",
# tags=["topic"]
#)
#async def removeTopic():
# pass
simplify_operation_ids(app)
|
16,820 | f8bcd663b293776f61809a9c704317590c741468 | # Given an array of integers nums sorted in ascending order, find the starting and ending position of a given target value.
# Your algorithm's runtime complexity must be in the order of O(log n).
# If the target is not found in the array, return [-1, -1].
# Example 1:
# Input: nums = [5,7,7,8,8,10], target = 8
# Output: [3,4]
# Example 2:
# Input: nums = [5,7,7,8,8,10], target = 6
# Output: [-1,-1]
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
length = len(nums)
i, j = 0, length - 1
left = None
while i <= j:
mid = i + (j - i) // 2
if nums[mid] < target:
i = mid + 1
elif target < nums[mid]:
j = mid - 1
else:
if mid == 0 or nums[mid - 1] != target:
left = mid
break
else:
j = mid - 1
if left == None:
return [-1, -1]
i, j = left, length - 1
right = None
while i <= j:
mid = i + (j - i) // 2
if target < nums[mid]:
j = mid - 1
else:
if mid == length - 1 or nums[mid + 1] != target:
right = mid
break
else:
i = mid + 1
return [left, right]
# test
s = Solution()
print(s.searchRange([1, 1, 1, 8, 8, 8, 9, 9], 8))
print(s.searchRange([2, 2], 2))
|
16,821 | ebbd1d1abc43ad318ed2438f653e0e1681eb3569 | # coding: utf-8
"""
Daraa
This data store is offered by CubeWerx Inc. as a demonstration of its in-progress OGC API implementation. # noqa: E501
The version of the OpenAPI document: 9.3.45
Contact: mgalluch@cubewerx.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from openapi_client.api_client import ApiClient
from openapi_client.exceptions import (
ApiTypeError,
ApiValueError
)
class MapTilesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def collections_collection_id_map_tiles_get(self, collection_id, **kwargs): # noqa: E501
"""A metadata document describing the map tilesets that are available for the specified collection. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_get(collection_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tilesets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_map_tiles_get_with_http_info(collection_id, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_map_tiles_get_with_http_info(collection_id, **kwargs) # noqa: E501
return data
def collections_collection_id_map_tiles_get_with_http_info(self, collection_id, **kwargs): # noqa: E501
"""A metadata document describing the map tilesets that are available for the specified collection. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_get_with_http_info(collection_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tilesets
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'style_id', 'f', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_map_tiles_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_map_tiles_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
query_params = []
if 'style_id' in local_var_params:
query_params.append(('styleId', local_var_params['style_id'])) # noqa: E501
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html', 'application/problem+json', 'text/xml', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/map/tiles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tilesets', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_map_tiles_tile_matrix_set_id_get(self, collection_id, tile_matrix_set_id, **kwargs): # noqa: E501
"""A metadata document describing the map tiles that are available for the specified collection in the specified tile-matrix set. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_tile_matrix_set_id_get(collection_id, tile_matrix_set_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_map_tiles_tile_matrix_set_id_get_with_http_info(collection_id, tile_matrix_set_id, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_map_tiles_tile_matrix_set_id_get_with_http_info(collection_id, tile_matrix_set_id, **kwargs) # noqa: E501
return data
def collections_collection_id_map_tiles_tile_matrix_set_id_get_with_http_info(self, collection_id, tile_matrix_set_id, **kwargs): # noqa: E501
"""A metadata document describing the map tiles that are available for the specified collection in the specified tile-matrix set. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_tile_matrix_set_id_get_with_http_info(collection_id, tile_matrix_set_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'tile_matrix_set_id', 'style_id', 'f', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_map_tiles_tile_matrix_set_id_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_get`") # noqa: E501
# verify the required parameter 'tile_matrix_set_id' is set
if ('tile_matrix_set_id' not in local_var_params or
local_var_params['tile_matrix_set_id'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix_set_id` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'tile_matrix_set_id' in local_var_params:
path_params['tileMatrixSetId'] = local_var_params['tile_matrix_set_id'] # noqa: E501
query_params = []
if 'style_id' in local_var_params:
query_params.append(('styleId', local_var_params['style_id'])) # noqa: E501
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html', 'application/problem+json', 'text/xml', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/map/tiles/{tileMatrixSetId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tileset', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get(self, collection_id, tile_matrix_set_id, tile_matrix, **kwargs): # noqa: E501
"""A legend image showing a graphical representation of the specified collection as it would appear in the corresponding map tiles of the specified zoom level. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get(collection_id, tile_matrix_set_id, tile_matrix, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(collection_id, tile_matrix_set_id, tile_matrix, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(collection_id, tile_matrix_set_id, tile_matrix, **kwargs) # noqa: E501
return data
def collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(self, collection_id, tile_matrix_set_id, tile_matrix, **kwargs): # noqa: E501
"""A legend image showing a graphical representation of the specified collection as it would appear in the corresponding map tiles of the specified zoom level. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(collection_id, tile_matrix_set_id, tile_matrix, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'tile_matrix_set_id', 'tile_matrix', 'style_id', 'f', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
# verify the required parameter 'tile_matrix_set_id' is set
if ('tile_matrix_set_id' not in local_var_params or
local_var_params['tile_matrix_set_id'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix_set_id` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
# verify the required parameter 'tile_matrix' is set
if ('tile_matrix' not in local_var_params or
local_var_params['tile_matrix'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'tile_matrix_set_id' in local_var_params:
path_params['tileMatrixSetId'] = local_var_params['tile_matrix_set_id'] # noqa: E501
if 'tile_matrix' in local_var_params:
path_params['tileMatrix'] = local_var_params['tile_matrix'] # noqa: E501
query_params = []
if 'style_id' in local_var_params:
query_params.append(('styleId', local_var_params['style_id'])) # noqa: E501
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html', 'application/problem+json', 'text/xml', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/map/tiles/{tileMatrixSetId}/{tileMatrix}/legend', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tileset', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get(self, collection_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs): # noqa: E501
"""A map tile of the specified collection. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get(collection_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param int tile_row: The zero-based row index of the tile in the specified tile matrix. (required)
:param int tile_col: The zero-based column index of the tile in the specified tile matrix. (required)
:param str datetime: Either a date-time or an interval, open or closed. Date and time expressions adhere to RFC 3339. Open intervals are expressed using double-dots. Examples: * A date-time: \"2018-02-12T23:20:50Z\" * A closed interval: \"2018-02-12T00:00:00Z/2018-03-18T12:31:12Z\" * Open intervals: \"2018-02-12T00:00:00Z/..\" or \"../2018-03-18T12:31:12Z\" Only elements that have a temporal property that intersects the value of `datetime` are selected.
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param bool transparent: Whether or not the background of the image should be transparent (when supported by the requested image format).
:param str bgcolor: Hexadecimal red-green-blue color value for the background color. If not specified, the background color specified by the style (0xFFFFFF by default) will be used.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present. A value of \"jop\" (content type \"image/x-jpegorpng\") indicates that either JPEG or PNG should be returned, whichever the server deems to be most appropriate for this tile.
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(collection_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(collection_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs) # noqa: E501
return data
def collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(self, collection_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs): # noqa: E501
"""A map tile of the specified collection. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(collection_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param int tile_row: The zero-based row index of the tile in the specified tile matrix. (required)
:param int tile_col: The zero-based column index of the tile in the specified tile matrix. (required)
:param str datetime: Either a date-time or an interval, open or closed. Date and time expressions adhere to RFC 3339. Open intervals are expressed using double-dots. Examples: * A date-time: \"2018-02-12T23:20:50Z\" * A closed interval: \"2018-02-12T00:00:00Z/2018-03-18T12:31:12Z\" * Open intervals: \"2018-02-12T00:00:00Z/..\" or \"../2018-03-18T12:31:12Z\" Only elements that have a temporal property that intersects the value of `datetime` are selected.
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param bool transparent: Whether or not the background of the image should be transparent (when supported by the requested image format).
:param str bgcolor: Hexadecimal red-green-blue color value for the background color. If not specified, the background color specified by the style (0xFFFFFF by default) will be used.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present. A value of \"jop\" (content type \"image/x-jpegorpng\") indicates that either JPEG or PNG should be returned, whichever the server deems to be most appropriate for this tile.
:return: file
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'tile_matrix_set_id', 'tile_matrix', 'tile_row', 'tile_col', 'datetime', 'style_id', 'transparent', 'bgcolor', 'f'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_matrix_set_id' is set
if ('tile_matrix_set_id' not in local_var_params or
local_var_params['tile_matrix_set_id'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix_set_id` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_matrix' is set
if ('tile_matrix' not in local_var_params or
local_var_params['tile_matrix'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_row' is set
if ('tile_row' not in local_var_params or
local_var_params['tile_row'] is None):
raise ApiValueError("Missing the required parameter `tile_row` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_col' is set
if ('tile_col' not in local_var_params or
local_var_params['tile_col'] is None):
raise ApiValueError("Missing the required parameter `tile_col` when calling `collections_collection_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'tile_matrix_set_id' in local_var_params:
path_params['tileMatrixSetId'] = local_var_params['tile_matrix_set_id'] # noqa: E501
if 'tile_matrix' in local_var_params:
path_params['tileMatrix'] = local_var_params['tile_matrix'] # noqa: E501
if 'tile_row' in local_var_params:
path_params['tileRow'] = local_var_params['tile_row'] # noqa: E501
if 'tile_col' in local_var_params:
path_params['tileCol'] = local_var_params['tile_col'] # noqa: E501
query_params = []
if 'datetime' in local_var_params:
query_params.append(('datetime', local_var_params['datetime'])) # noqa: E501
if 'style_id' in local_var_params:
query_params.append(('styleId', local_var_params['style_id'])) # noqa: E501
if 'transparent' in local_var_params:
query_params.append(('transparent', local_var_params['transparent'])) # noqa: E501
if 'bgcolor' in local_var_params:
query_params.append(('bgcolor', local_var_params['bgcolor'])) # noqa: E501
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['image/x-jpegorpng', 'image/jpeg', 'image/png', 'image/gif', 'application/x-cubewerx-wkb', 'application/problem+json', 'text/xml', 'text/html', 'application/json', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/map/tiles/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_styles_style_id_map_tiles_get(self, collection_id, style_id, **kwargs): # noqa: E501
"""A metadata document describing the map tiles that are available for the specified collection in the specified style. (Note: The output of this endpoint is likely changing.) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_get(collection_id, style_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tilesets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_styles_style_id_map_tiles_get_with_http_info(collection_id, style_id, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_styles_style_id_map_tiles_get_with_http_info(collection_id, style_id, **kwargs) # noqa: E501
return data
def collections_collection_id_styles_style_id_map_tiles_get_with_http_info(self, collection_id, style_id, **kwargs): # noqa: E501
"""A metadata document describing the map tiles that are available for the specified collection in the specified style. (Note: The output of this endpoint is likely changing.) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_get_with_http_info(collection_id, style_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tilesets
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'style_id', 'f', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_styles_style_id_map_tiles_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_styles_style_id_map_tiles_get`") # noqa: E501
# verify the required parameter 'style_id' is set
if ('style_id' not in local_var_params or
local_var_params['style_id'] is None):
raise ApiValueError("Missing the required parameter `style_id` when calling `collections_collection_id_styles_style_id_map_tiles_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'style_id' in local_var_params:
path_params['styleId'] = local_var_params['style_id'] # noqa: E501
query_params = []
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html', 'application/problem+json', 'text/xml', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/styles/{styleId}/map/tiles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tilesets', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get(self, collection_id, style_id, tile_matrix_set_id, **kwargs): # noqa: E501
"""A metadata document describing the map tiles that are available for the specified collection in the specified tile-matrix set in the specified style. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get(collection_id, style_id, tile_matrix_set_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get_with_http_info(collection_id, style_id, tile_matrix_set_id, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get_with_http_info(collection_id, style_id, tile_matrix_set_id, **kwargs) # noqa: E501
return data
def collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get_with_http_info(self, collection_id, style_id, tile_matrix_set_id, **kwargs): # noqa: E501
"""A metadata document describing the map tiles that are available for the specified collection in the specified tile-matrix set in the specified style. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get_with_http_info(collection_id, style_id, tile_matrix_set_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'style_id', 'tile_matrix_set_id', 'f', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get`") # noqa: E501
# verify the required parameter 'style_id' is set
if ('style_id' not in local_var_params or
local_var_params['style_id'] is None):
raise ApiValueError("Missing the required parameter `style_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get`") # noqa: E501
# verify the required parameter 'tile_matrix_set_id' is set
if ('tile_matrix_set_id' not in local_var_params or
local_var_params['tile_matrix_set_id'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix_set_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'style_id' in local_var_params:
path_params['styleId'] = local_var_params['style_id'] # noqa: E501
if 'tile_matrix_set_id' in local_var_params:
path_params['tileMatrixSetId'] = local_var_params['tile_matrix_set_id'] # noqa: E501
query_params = []
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html', 'application/problem+json', 'text/xml', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/styles/{styleId}/map/tiles/{tileMatrixSetId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tileset', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get(self, collection_id, style_id, tile_matrix_set_id, tile_matrix, **kwargs): # noqa: E501
"""A legend image showing a graphical representation of the specified collection as it would appear in the corresponding map tiles of the specified zoom level in the specified style. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get(collection_id, style_id, tile_matrix_set_id, tile_matrix, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(collection_id, style_id, tile_matrix_set_id, tile_matrix, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(collection_id, style_id, tile_matrix_set_id, tile_matrix, **kwargs) # noqa: E501
return data
def collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(self, collection_id, style_id, tile_matrix_set_id, tile_matrix, **kwargs): # noqa: E501
"""A legend image showing a graphical representation of the specified collection as it would appear in the corresponding map tiles of the specified zoom level in the specified style. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get_with_http_info(collection_id, style_id, tile_matrix_set_id, tile_matrix, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present.
:param bool pretty: Whether or not the output should be pretty-formatted (with whitespace, etc.).
:return: Tileset
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'style_id', 'tile_matrix_set_id', 'tile_matrix', 'f', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
# verify the required parameter 'style_id' is set
if ('style_id' not in local_var_params or
local_var_params['style_id'] is None):
raise ApiValueError("Missing the required parameter `style_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
# verify the required parameter 'tile_matrix_set_id' is set
if ('tile_matrix_set_id' not in local_var_params or
local_var_params['tile_matrix_set_id'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix_set_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
# verify the required parameter 'tile_matrix' is set
if ('tile_matrix' not in local_var_params or
local_var_params['tile_matrix'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_legend_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'style_id' in local_var_params:
path_params['styleId'] = local_var_params['style_id'] # noqa: E501
if 'tile_matrix_set_id' in local_var_params:
path_params['tileMatrixSetId'] = local_var_params['tile_matrix_set_id'] # noqa: E501
if 'tile_matrix' in local_var_params:
path_params['tileMatrix'] = local_var_params['tile_matrix'] # noqa: E501
query_params = []
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html', 'application/problem+json', 'text/xml', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/styles/{styleId}/map/tiles/{tileMatrixSetId}/{tileMatrix}/legend', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tileset', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get(self, collection_id, style_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs): # noqa: E501
"""A map tile of the specified collection in the specified style. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get(collection_id, style_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param int tile_row: The zero-based row index of the tile in the specified tile matrix. (required)
:param int tile_col: The zero-based column index of the tile in the specified tile matrix. (required)
:param str datetime: Either a date-time or an interval, open or closed. Date and time expressions adhere to RFC 3339. Open intervals are expressed using double-dots. Examples: * A date-time: \"2018-02-12T23:20:50Z\" * A closed interval: \"2018-02-12T00:00:00Z/2018-03-18T12:31:12Z\" * Open intervals: \"2018-02-12T00:00:00Z/..\" or \"../2018-03-18T12:31:12Z\" Only elements that have a temporal property that intersects the value of `datetime` are selected.
:param str style_id2: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param bool transparent: Whether or not the background of the image should be transparent (when supported by the requested image format).
:param str bgcolor: Hexadecimal red-green-blue color value for the background color. If not specified, the background color specified by the style (0xFFFFFF by default) will be used.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present. A value of \"jop\" (content type \"image/x-jpegorpng\") indicates that either JPEG or PNG should be returned, whichever the server deems to be most appropriate for this tile.
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(collection_id, style_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs) # noqa: E501
else:
(data) = self.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(collection_id, style_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs) # noqa: E501
return data
def collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(self, collection_id, style_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, **kwargs): # noqa: E501
"""A map tile of the specified collection in the specified style. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get_with_http_info(collection_id, style_id, tile_matrix_set_id, tile_matrix, tile_row, tile_col, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collection_id: The identifier of a collection in this data store. (required)
:param str style_id: The identifier of a style in this data store that's compatible with the specified collection. (required)
:param str tile_matrix_set_id: The identifier of a tile-matrix set that this collection is available in. (required)
:param str tile_matrix: The identifier of a tile matrix (typically a zoom level) within the specified tile-matrix set. (required)
:param int tile_row: The zero-based row index of the tile in the specified tile matrix. (required)
:param int tile_col: The zero-based column index of the tile in the specified tile matrix. (required)
:param str datetime: Either a date-time or an interval, open or closed. Date and time expressions adhere to RFC 3339. Open intervals are expressed using double-dots. Examples: * A date-time: \"2018-02-12T23:20:50Z\" * A closed interval: \"2018-02-12T00:00:00Z/2018-03-18T12:31:12Z\" * Open intervals: \"2018-02-12T00:00:00Z/..\" or \"../2018-03-18T12:31:12Z\" Only elements that have a temporal property that intersects the value of `datetime` are selected.
:param str style_id2: The identifier of a style in this data store that's compatible with the specified collection. If no styleId parameter is present, the default style of the collection will be rendered.
:param bool transparent: Whether or not the background of the image should be transparent (when supported by the requested image format).
:param str bgcolor: Hexadecimal red-green-blue color value for the background color. If not specified, the background color specified by the style (0xFFFFFF by default) will be used.
:param str f: A token indicating the content type to return. Overrides the HTTP \"Accept\" header if present. A value of \"jop\" (content type \"image/x-jpegorpng\") indicates that either JPEG or PNG should be returned, whichever the server deems to be most appropriate for this tile.
:return: file
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['collection_id', 'style_id', 'tile_matrix_set_id', 'tile_matrix', 'tile_row', 'tile_col', 'datetime', 'style_id2', 'transparent', 'bgcolor', 'f'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'collection_id' is set
if ('collection_id' not in local_var_params or
local_var_params['collection_id'] is None):
raise ApiValueError("Missing the required parameter `collection_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'style_id' is set
if ('style_id' not in local_var_params or
local_var_params['style_id'] is None):
raise ApiValueError("Missing the required parameter `style_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_matrix_set_id' is set
if ('tile_matrix_set_id' not in local_var_params or
local_var_params['tile_matrix_set_id'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix_set_id` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_matrix' is set
if ('tile_matrix' not in local_var_params or
local_var_params['tile_matrix'] is None):
raise ApiValueError("Missing the required parameter `tile_matrix` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_row' is set
if ('tile_row' not in local_var_params or
local_var_params['tile_row'] is None):
raise ApiValueError("Missing the required parameter `tile_row` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
# verify the required parameter 'tile_col' is set
if ('tile_col' not in local_var_params or
local_var_params['tile_col'] is None):
raise ApiValueError("Missing the required parameter `tile_col` when calling `collections_collection_id_styles_style_id_map_tiles_tile_matrix_set_id_tile_matrix_tile_row_tile_col_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collection_id' in local_var_params:
path_params['collectionId'] = local_var_params['collection_id'] # noqa: E501
if 'style_id' in local_var_params:
path_params['styleId'] = local_var_params['style_id'] # noqa: E501
if 'tile_matrix_set_id' in local_var_params:
path_params['tileMatrixSetId'] = local_var_params['tile_matrix_set_id'] # noqa: E501
if 'tile_matrix' in local_var_params:
path_params['tileMatrix'] = local_var_params['tile_matrix'] # noqa: E501
if 'tile_row' in local_var_params:
path_params['tileRow'] = local_var_params['tile_row'] # noqa: E501
if 'tile_col' in local_var_params:
path_params['tileCol'] = local_var_params['tile_col'] # noqa: E501
query_params = []
if 'datetime' in local_var_params:
query_params.append(('datetime', local_var_params['datetime'])) # noqa: E501
if 'style_id2' in local_var_params:
query_params.append(('styleId', local_var_params['style_id2'])) # noqa: E501
if 'transparent' in local_var_params:
query_params.append(('transparent', local_var_params['transparent'])) # noqa: E501
if 'bgcolor' in local_var_params:
query_params.append(('bgcolor', local_var_params['bgcolor'])) # noqa: E501
if 'f' in local_var_params:
query_params.append(('f', local_var_params['f'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['image/x-jpegorpng', 'image/jpeg', 'image/png', 'image/gif', 'application/x-cubewerx-wkb', 'application/problem+json', 'text/xml', 'text/html', 'application/json', ]) # noqa: E501
# Authentication setting
auth_settings = ['cwApiKeyHeader', 'cwApiKeyQuery', 'cwAuth', 'httpBearer', 'oauth2', 'openIdConnect', 'openIdConnect1'] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionId}/styles/{styleId}/map/tiles/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
16,822 | 343ca5cfe570dea95fda35e0912b4e66f46513e3 | print('hello')
def create_cars(canvas, window, scale):
cars=[]
window_bottom=window.winfo_height()
window_top=0
window_left=0
window_right=window.winfo_width()
road_center=window_right/2
road_width=window_right/6
NUM_WIDE=4
NUM_TALL=3
car_w=road_width/NUM_WIDE
car_h=road_width/NUM_TALL
FRONT_SPACE=200
CAR_SPACER=2
car_number=0
#Initiate Front Cars
#Front position
FRONT_START=window_bottom - (NUM_TALL*car_h + FRONT_SPACE)
for i in range (0,NUM_WIDE):
x1=road_center-(.5*road_width)+i*CAR_SPACER+i*car_w - (NUM_WIDE/2 * CAR_SPACER)
x2=x1+car_w
y1=FRONT_START
y2=y1+car_h
seg = canvas.create_rectangle(x1,
y1,
x2,
y2,
fill="green", outline="black", width=1)
carnum=canvas.create_text(x1+car_w/3, y1+car_h/4, font="Arial",
text=str(car_number))
cars.append([seg,car_number, carnum])
car_number+=1
#Init Side Cars
for i in range (0,NUM_WIDE):
x1=road_center-(.5*road_width)+i*CAR_SPACER+i*car_w - (NUM_WIDE/2 * CAR_SPACER)
x2=x1+car_w
y1=FRONT_START+ car_h+CAR_SPACER
y2=y1+car_h
seg = canvas.create_rectangle(x1,
y1,
x2,
y2,
fill="yellow", outline="black", width=1)
carnum=canvas.create_text(x1+car_w/3, y1+car_h/4, font="Arial",
text=str(car_number))
cars.append([seg,car_number, carnum])
car_number+=1
#Init Side Cars #2
for i in range (0,NUM_WIDE):
x1=road_center-(.5*road_width)+i*CAR_SPACER+i*car_w - (NUM_WIDE/2 * CAR_SPACER)
x2=x1+car_w
y1=FRONT_START+ 2*(car_h+CAR_SPACER)
y2=y1+car_h
seg = canvas.create_rectangle(x1,
y1,
x2,
y2,
fill="yellow", outline="black", width=1)
carnum=canvas.create_text(x1+car_w/3, y1+car_h/4, font="Arial",
text=str(car_number))
cars.append([seg,car_number, carnum])
car_number+=1
#Init Back Cars
for i in range (0,NUM_WIDE):
x1=road_center-(.5*road_width)+i*CAR_SPACER+i*car_w - (NUM_WIDE/2 * CAR_SPACER)
x2=x1+car_w
y1=window_bottom
y2=y1-car_h
seg = canvas.create_rectangle(x1,
y1,
x2,
y2,
fill="red", outline="black", width=1)
carnum=canvas.create_text(x1+car_w/3, y2+car_h/4, font="Arial",
text=str(car_number))
cars.append([seg,car_number, carnum])
car_number+=1
return cars
|
16,823 | 4d46a937e9b652e36e95e01489793fd14e4c1e6e | import socket, struct
import telnetlib
def readline(sc, show = True):
res = ""
while len(res) == 0 or res[-1] != "\n":
data = sc.recv(1)
if len(data) == 0:
print repr(res)
print "Server disconnected"
exit()
res += data
if show:
print repr(res[:-1])
return res[:-1]
def read_until(sc, s):
res = ""
while not res.endswith(s):
data = sc.recv(1)
if len(data) == 0:
print repr(res)
print "Server disconnected"
exit()
res += data
return res[:-(len(s))]
def pad(s):
if len(s)%9 == 0:
return s
for i in xrange((9-(len(s)%9))):
s.append(0)
return s
def printmat(matrix):
for row in matrix:
for value in row:
print value,
print ""
print ""
def flip(matrix):
out = [[0 for x in xrange(3)] for x in xrange(3)]
for rn in xrange(3):
for cn in xrange(3):
out[cn][rn] = matrix[rn][cn]
return out
def genBlockMatrix(s):
outm = [[[7 for x in xrange(3)] for x in xrange(3)] for x in xrange(len(s)/9)]
for matnum in xrange(0,len(s)/9):
for y in xrange(0,3):
for x in xrange(0,3):
outm[matnum][y][x] = s[(matnum*9)+x+(y*3)]
return outm
def fixmatrix(matrixa, matrixb):
out = [[0 for x in xrange(3)] for x in xrange(3)]
for rn in xrange(3):
for cn in xrange(3):
out[cn][rn] = (int(matrixa[rn][cn])|int(matrixb[cn][rn]))&~(int(matrixa[rn][cn])&int(matrixb[cn][rn]))
return out
def invertfix(matrixa, matrixb):
out = [[0 for x in xrange(3)] for x in xrange(3)]
for rn in xrange(3):
for cn in xrange(3):
out[cn][rn] = (int(matrixa[rn][cn])^int(matrixb[rn][cn]))
return out
sc = socket.create_connection(("vermatrix.pwn.democrat", 4201))
read_until(sc, "SEED: ")
seed = read_until(sc, "\n")
seedmatrix = genBlockMatrix(pad([ord(c) for c in seed]))
matrix = readline(sc) + " " + readline(sc) + " " + readline(sc)
res = genBlockMatrix(matrix.split())[0]
for block in seedmatrix[::-1]:
res = invertfix(res, block)
s = []
for rn in xrange(3):
for cn in xrange(3):
s.append(str(res[rn][cn]))
sc.send(",".join(s) + "\n")
while True:
data = sc.recv(16384)
if len(data) == 0:
break
for line in data.split("\n"):
print repr(line)
# flag{IV_wh4t_y0u_DiD_Th3r3}
|
16,824 | 8da89c0eaac1021cacc29882f7e7a7c5f69423b3 | import pickle
import os
def output_cache(cc):
""" output results to a pickle serialized file. """
out_file = os.path.join(cc.scene_dir, 'output', cc.scene_id+'_pickle')
if cc.atmo_src == 'narr':
out_file += '_narr'
elif cc.atmo_src == 'merra':
out_file += '_merra'
with open(out_file, 'wb') as f:
pickle.dump(cc, f)
def read_cache(cc):
""" read in results from a pickle serialized file. """
out_file = os.path.join(cc.scene_dir, 'output', cc.scene_id+'_pickle')
if cc.atmo_src == 'narr':
out_file += '_narr'
elif cc.atmo_src == 'merra':
out_file += '_merra'
if not os.path.isfile(out_file):
raise OSError('pickle_file is not in expected location %s' % out_file)
with open(out_file, 'rb') as f:
x = pickle.load(f)
return x
|
16,825 | d73ed3c3c3716f790386bf989dda084b8f29e77c | from rest_framework.viewsets import ModelViewSet
from .serializers import AnnonceSerializer
from .models import Annonce
class AnnonceViewSet(ModelViewSet):
serializer_class = AnnonceSerializer
queryset = Annonce.objects.all()
|
16,826 | 9ad45daee671bd9dee89e7ae60a18cd4e64f880e | import sys
sys.path.append('./')
import os
import numpy as np
import cv2
from config import cfg
from utils.image_reader_forward import Image_reader
class Pic_vedio():
def __init__(self):
self.reader=Image_reader(img_path=cfg.img_path,label_path=cfg.label_path)
self.vedio_dir=cfg.vedio_dir
self.vedio_name=cfg.vedio_name
def test(self):
pre_box=[50.,50.,50.,50.]
for step in range(self.reader.img_num):
img,box_ori,img_p,box_p,offset,ratio=self.reader.get_data(frame_n=step,pre_box=pre_box)
if step==0:
fourcc=cv2.VideoWriter_fourcc('M','J','P','G')
img_h,img_w,_=img.shape
videoWriter=cv2.VideoWriter(os.path.join(self.vedio_dir,self.vedio_name),fourcc,30,(img_w,img_h))
else:
videoWriter.write(img)
cv2.imshow('img',img)
cv2.waitKey(10)
videoWriter.release()
cv2.destroyAllWindows()
print('vedio is saved in '+self.vedio_dir)
if __name__=='__main__':
t=Pic_vedio()
t.test() |
16,827 | 139d0f53d0fa5f34b7bc10f5ef871aacbb70b2a9 | #!/usr/bin/env python2
# coding: utf-8
import re
import uuid
from .idbase import IDBase
from .idc_id import IDCID
from .idc_id import IDC_ID_LEN
SERVER_ID_LEN = 12
def _mac_addr(s):
s = str(s)
if re.match("^[0-9a-f]{12}$", s) is None:
raise ValueError('server id mac addr must be 12 char hex, but: {s}'.format(s=s))
return s
class ServerID(IDBase):
_attrs = (
('idc_id', 0, IDC_ID_LEN, IDCID),
('mac_addr', IDC_ID_LEN, IDC_ID_LEN + SERVER_ID_LEN, _mac_addr),
('server_id', None, None, None, 'self'),
)
_str_len = IDC_ID_LEN + SERVER_ID_LEN
_tostr_fmt = '{idc_id}{mac_addr}'
@classmethod
def local_server_id(self, idc_id):
return ServerID(idc_id, '%012x' % uuid.getnode())
|
16,828 | 1e7c070c1314372a95cbbc792dba4fe7796da30b | import torch.nn as nn
class TransparentDataParallel(nn.DataParallel):
def set_best(self, *args, **kwargs):
return self.module.set_best(*args, **kwargs)
def recover_best(self, *args, **kwargs):
return self.module.recover_best(*args, **kwargs)
def save(self, *args, **kwargs):
return self.module.save(*args, **kwargs)
def train_batch(self, *args, **kwargs):
return self.module.train_batch(*args, **kwargs)
def eval_batch(self, *args, **kwargs):
return self.module.eval_batch(*args, **kwargs)
|
16,829 | 4e0bf8a5156df07ad0ac48611076b21422295964 | #!python
from __future__ import print_function
import unittest
######################################################################
# this problem is from
# https://www.interviewcake.com/question/python/stock-price
# Writing programming interview questions hasn't made me rich. Maybe
# trading Apple stocks will.
# Suppose we could access yesterday's stock prices as a list, where:
# + The indices are the time in minutes past trade opening time, which
# was 9:30am local time.
# + The values are the price in dollars of Apple stock at that time.
# So if the stock cost $500 at 10:30am, stock_prices_yesterday[60] =
# 500.
# Write an efficient function that takes stock_prices_yesterday and
# returns the best profit I could have made from 1 purchase and 1 sale
# of 1 Apple stock yesterday.
# For example:
# stock_prices_yesterday = [10, 7, 5, 8, 11, 9]
# get_max_profit(stock_prices_yesterday)
# returns 6 (buying for $5 and selling for $11)
# No "shorting" - you must buy before you sell. You may not buy and sell
# in the same time step(at least 1 minute must pass).
######################################################################
# this solution is pretty much what is on the site as I just followed
# it along as a "free question" to help determine if these examples
# were interesting.
def get_max_profit(stock_prices_yesterday):
if len(stock_prices_yesterday) < 2:
raise IndexError('Getting a profit requires at least 2 prices')
# min_price = stock_prices_yesterday[0]
# max_profit = 0
min_price = stock_prices_yesterday[0]
max_profit = stock_prices_yesterday[1] - stock_prices_yesterday[0]
print("")
for current_price in stock_prices_yesterday:
# ensure min_price is the lowest price we've seen so far
min_price = min(min_price, current_price)
# see what our profit would be if we bought at the
# min price and sold at the current price
potential_profit = current_price - min_price
# update max_profit if we can do better
max_profit = max(max_profit, potential_profit)
print(" cur_price %s min_price %s pot_profit %s max_profit %s" %
(current_price, min_price, potential_profit, max_profit))
return max_profit
# Now let's test
class TestStockPrice(unittest.TestCase):
def test_given_obvious_edge_case(self):
stock_prices_yesterday = [10, 10, 10, 10]
max_profit = get_max_profit(stock_prices_yesterday)
self.assertEqual(0, max_profit)
def test_given_example(self):
"""test the example given at interviewcake"""
stock_prices_yesterday = [10, 7, 5, 8, 11, 9]
max_profit = get_max_profit(stock_prices_yesterday)
self.assertEqual(6, max_profit)
def test_given_example(self):
"""test a day where there are no good deals"""
stock_prices_yesterday = [10, 7, 5, 4, 2, 0]
max_profit = get_max_profit(stock_prices_yesterday)
self.assertEqual(0, max_profit)
if __name__ == "__main__":
# unittest.main()
suite = unittest.TestLoader().loadTestsFromTestCase(TestStockPrice)
unittest.TextTestRunner(verbosity=2).run(suite)
|
16,830 | 687355b0f637a9ebe2fce9cd5c576f4506569e2c | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import bandwidth as bw
from unittest import TestCase
import numpy as np
class TestBWCFfunction(TestCase):
def test_squaredband(self):
bandwidth = 7.0
central_frequency = 43.0
min_freq = 38.0
max_freq = 50.0
num_of_points = 121
nu = np.linspace(min_freq, max_freq, num_of_points)
band = np.zeros(num_of_points)
band[np.abs(nu - central_frequency) <= bandwidth / 2] = 1.0
computed_cfreq, computed_bwidth = bw.get_central_nu_bandwidth(nu, band)
self.assertAlmostEqual(computed_cfreq, central_frequency)
self.assertTrue(
np.abs(computed_bwidth - bandwidth)
< 2 * (max_freq - min_freq) / num_of_points
)
def test_triangularband(self):
min_freq = 38.0
max_freq = 50.0
num_of_points = 121
nu = np.linspace(min_freq, max_freq, num_of_points)
band = np.zeros(num_of_points)
band = (nu - min_freq) / (max_freq - min_freq)
computed_cfreq, computed_bwidth = bw.get_central_nu_bandwidth(nu, band)
self.assertAlmostEqual(computed_cfreq, (min_freq + 2 * max_freq) / 3, places=1)
|
16,831 | 3d28e3801cf3642878e3ff403a69752d4ea34f21 | def fun():
str="hye i m just try to learn python"
print(str)
print(str.replace(' ',''))
fun();
|
16,832 | 77c000bd9c236205006b69bb50bf010359a7b077 | ''' besomebody.py (c) 2013 - 2018 Matthew J. Ernisse <matt@going-flying.com>
Impersonate a variety of folks.
Redistribution and use in source and binary forms,
with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the
above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce
the above copyright notice, this list of conditions
and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import core
import os
import random
import re
from botlogger import logException
def afraid():
'''I am not the Kwisatz Haderach...'''
litany = [
'I must not fear.',
'Fear is the mind-killer.',
'Fear is the little-death that brings total obliteration.',
'I will face my fear.',
'I will permit it to pass over me and through me.',
'And when it has gone past I will turn the inner eye to see its path.',
'Where the fear has gone there will be nothing.',
'Only I will remain.',
]
return '\n'.join(litany)
def bhanat():
''' /me pours a little out for his homies who are not here. '''
ticketnum = random.randint(10000, 999999)
return "<postit>%s</postit>" % str(ticketnum)
def quote_from_disk(who, index=None):
''' emit a quote from nicks/who.txt '''
sayings = []
try:
with open(os.path.join(core.dataDir, "nick/{}.txt".format(who))) as fd:
line = fd.readline()
if line:
sayings.append(line)
except Exception as e:
logException(e)
return "I do not know of whom you speak."
if not index:
index = random.randint(0, len(sayings) - 1)
return sayings[index]
def privmsg(self, user, channel, msg):
""" Look for <nick>: be <word> and dispatch <word> to the various
handlers. If one of the handlers returns something then it will
be sent back to the channel.
"""
dst = user.split('!', 1)[0]
if channel != self.nickname:
msg = self._forMe(msg)
if not msg:
return
dst = channel
matches = re.search(
r'^be\s+([a-z0-9_.-]+)(?:\[(\d+)\])?\s*$',
msg,
re.I
)
if not matches:
return
who = matches.group(1).lower()
if not re.search(r'^[a-z0-9_.-]+$', who):
return "Nope, not gonna do it."
if who == "bhanat":
self.msg(dst, bhanat(), only=True)
elif who == "afraid":
self.msg(dst, afraid(), only=True)
else:
index = matches.group(2)
self.msg(dst, quote_from_disk(who, index), only=True)
core.register_module(__name__)
|
16,833 | 7e66e26209663bd31a22614dc4fb0181dd0684f0 | from argparse import ArgumentParser, Namespace
from dataclasses import dataclass
from pathlib import PurePath
from core_get.actions.init.init_options import InitOptions
from core_get.options.options import Options
from core_get.cli.parse.parser import Parser
@dataclass
class InitOptionsParser(Parser):
name: str = 'init'
usage: str = '<package-names>'
description: str = 'Initialize a repository'
def add_arguments(self, argument_parser: ArgumentParser) -> None:
argument_parser.add_argument('path', help='Path to initialize')
def make_options(self, namespace: Namespace) -> Options:
return InitOptions(PurePath(namespace.path))
|
16,834 | 604d9425443be25b0e0e365f454432ebbe0dee81 | from numpy import linalg
from laika.lib.coordinates import ecef2geodetic, geodetic2ecef
from laika import AstroDog
from laika.gps_time import GPSTime
import glob
import os
import numpy as np
import matplotlib.pyplot as plt
import scipy
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.python.ops.array_ops import zeros
from tensorflow.python.training.tracking import base
import tensorflow_addons as tfa
import tensorflow_datasets as tfds
import pymap3d as pm
from tqdm import tqdm
from sklearn.model_selection import train_test_split
from skimage.io import imread, imsave
import time
import pandas as pd
import loader
import read_log
import itertools
from coords_tools import *
from matplotlib import pyplot
import datetime
from slac import loadSlac
from loader import *
import tf_phone_model
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
autotune = tf.data.experimental.AUTOTUNE
tf.keras.backend.set_floatx('float64')
def get_track_path(folder, track):
phone_glob = next(os.walk(folder+"/"+track))[1]
print(folder, track, end=' ')
phones = {}
phone_names = []
if "train" in folder:
df_baseline = pd.read_csv("data/baseline_locations_train.csv")
else:
df_baseline = pd.read_csv("data/baseline_locations_test.csv")
df_baseline = df_baseline[df_baseline['collectionName'] == track]
df_baseline.rename(columns = {'latDeg':'baseLatDeg', 'lngDeg':'baseLngDeg', 'heightAboveWgs84EllipsoidM':'baseHeightAboveWgs84EllipsoidM'}, inplace = True)
df_baseline.set_index('millisSinceGpsEpoch', inplace = True)
df_baseline = df_baseline[~df_baseline.index.duplicated(keep='first')]
df_baseline.sort_index(inplace=True)
if "train" in folder:
for phonepath in phone_glob:
truepos = pd.read_csv(folder+"/" + track + "/" + phonepath + "/ground_truth.csv")
truepos.set_index('millisSinceGpsEpoch', inplace = True)
df_baseline = df_baseline.combine_first(truepos)
else:
df_baseline['latDeg'] = df_baseline['baseLatDeg']
df_baseline['lngDeg'] = df_baseline['baseLngDeg']
df_baseline['heightAboveWgs84EllipsoidM'] = df_baseline['baseHeightAboveWgs84EllipsoidM']
baseline_times = []
baseline_ecef_coords = []
gt_ecef_coords = []
for timemili, row in df_baseline.iterrows():
latbl, lonbl, altbl = float(row['baseLatDeg']),float(row['baseLngDeg']),float(row['baseHeightAboveWgs84EllipsoidM'])
baseline_times.append(timemili)
baseline_ecef_coords.append(np.array(pm.geodetic2ecef(latbl,lonbl,altbl, deg = True)))
latbl, lonbl, altbl = float(row['latDeg']),float(row['lngDeg']),float(row['heightAboveWgs84EllipsoidM'] - 61)
gt_ecef_coords.append(np.array(pm.geodetic2ecef(latbl,lonbl,altbl, deg = True)))
#baseline_ecef_coords = gt_ecef_coords.copy()
mat_local = np.zeros((3,3))
mat_local[2] = baseline_ecef_coords[0]/np.linalg.norm(baseline_ecef_coords[0], axis = -1)
mat_local[0] = np.array([0,0,1])
mat_local[0] = mat_local[0] - mat_local[2]*np.sum(mat_local[2]*mat_local[0])
mat_local[0] = mat_local[0]/np.linalg.norm(mat_local[0], axis = -1)
mat_local[1] = np.cross(mat_local[0], mat_local[2])
mat_local = np.transpose(mat_local)
#mat_local = np.eye(3)
gt_ecef_coords = np.array(gt_ecef_coords)
baseline_times = np.array(baseline_times)
baseline_ecef_coords = np.array(baseline_ecef_coords)
gt_ecef_coords = np.matmul(gt_ecef_coords,mat_local)
timeshift = 3657*24*60*60
datetimenow = int(baseline_times[0])//1000+timeshift
datetimenow = datetime.datetime.utcfromtimestamp(datetimenow)
slac_file = loadSlac(datetimenow)
slac = myLoadRinexPrevdoIndexed(slac_file)
slac_coords = np.array(myLoadRinex(slac_file).position)
slac_times = np.array([r[0] for r in slac])
slac_values = np.array([r[1] for r in slac])
phone_models = []
phone_times = []
constellations = ['GPS', 'GLONASS', 'BEIDOU','GALILEO']
dog = AstroDog(valid_const=constellations, pull_orbit=True)
phones = {}
phone_names = []
max_time = min_time = 0
bufx = []
bufy = []
bufz = []
window = 8
for i in range(len(baseline_ecef_coords)):
bufx.append(baseline_ecef_coords[i,0])
bufy.append(baseline_ecef_coords[i,1])
bufz.append(baseline_ecef_coords[i,2])
if len(bufx) > window*2+1:
bufx = bufx[1:]
bufy = bufy[1:]
bufz = bufz[1:]
if i >= window:
baseline_ecef_coords[i-window,0] = sorted(bufx)[len(bufx)//2]
baseline_ecef_coords[i-window,1] = sorted(bufy)[len(bufy)//2]
baseline_ecef_coords[i-window,2] = sorted(bufz)[len(bufz)//2]
#baseline_ecef_coords = scipy.signal.medfilt(baseline_ecef_coords, [1025,1])
baseline_ecef_coords += np.random.normal(0.,20.,baseline_ecef_coords.shape)
try:
with open(folder + "/" + track + "/export.dat", 'rb') as f:
data_file = pickle.load(f)
except:
data_file = None
for phonepath in phone_glob:
phone = phonepath
phones[phone] = len(phones)
phone_names.append(phone)
print(phone, end=' ')
if False: #data_file != None:
model, times = tf_phone_model.createGpsPhoneModelFromDataFile(data_file,phone,{ 'times':baseline_times, 'values':baseline_ecef_coords}, mat_local)
else:
try:
df_raw = pd.read_csv(folder + "/" + track + "/" + phone + "/" + phone + "_raw.csv")
except:
logs = read_log.gnss_log_to_dataframes(folder + "/" + track + "/" + phone + "/" + phone + "_GnssLog.txt")
df_raw = logs['Raw']
df_raw.to_csv(folder + "/" + track + "/" + phone + "/" + phone + "_raw.csv")
model, times = tf_phone_model.createGpsPhoneModel(df_raw,{ 'times':baseline_times, 'values':baseline_ecef_coords},mat_local,dog, { 'times':slac_times, 'values':slac_values, 'coords':slac_coords})
phone_models.append(model)
phone_times.append(times)
if min_time == 0 or min_time > times[0]:
min_time = times[0]
if max_time == 0 or max_time < times[-1]:
max_time = times[-1]
model_track, track_model_error, num_measures, start_nanos, time_tick = tf_phone_model.createTrackModel(min_time,max_time, { 'times':baseline_times, 'values':baseline_ecef_coords}, mat_local)
istart = np.searchsorted(baseline_times, start_nanos*1e-6)
iend = np.searchsorted(baseline_times, (start_nanos+time_tick*num_measures)*1e-6)
baseline_ecef_coords = baseline_ecef_coords[istart:iend]
baseline_times = baseline_times[istart:iend]
gt_ecef_coords = gt_ecef_coords[istart:iend]
track_input = np.arange(num_measures)
track_input = np.reshape(track_input,(-1,1))
def kernel_init(shape, dtype=None, partition_info=None):
kernel = np.zeros(shape)
kernel[:,0,0] = np.array([-1,1]).astype(np.float64)
return kernel
derivative = tf.keras.layers.Conv1D(1,2,use_bias=False,kernel_initializer=kernel_init, dtype = tf.float64)
def kernel_init_epoch(shape, dtype=None, partition_info=None):
kernel = np.zeros(shape).astype(np.float64)
kin = np.zeros((3)).astype(np.float64)
kin[0] = -1
kin[-1] = 1
kernel[:,0,0] = kin
return kernel
derivative_epoch = tf.keras.layers.Conv1D(1,3,use_bias=False,kernel_initializer=kernel_init_epoch, dtype = tf.float64)
@tf.function
def train_step_gnss(optimizer, physics):
for _ in range(16):
with tf.GradientTape(persistent=True) as tape:
total_loss_psevdo = 0
total_loss_delta = 0
accs_loss_large = 0
accs_loss_small = 0
speed_loss_small = 0
for i in range(len(phone_models)):
poses = model_track(phone_times[i], training=True)
poses = tf.reshape(poses,(1,-1,3))
psevdo_loss,delta_loss,delta_dif, psev_error = phone_models[i](poses, training=True)
total_loss_psevdo += psevdo_loss/10
total_loss_delta += delta_loss*2
total_loss = total_loss_delta +total_loss_psevdo
poses = track_model_error(track_input, training=True)
poses = tf.reshape(poses,(-1, 3))
poses_batch = tf.transpose(poses)
poses_batch = tf.expand_dims(poses_batch, axis=-1)
speed = derivative_epoch(poses_batch)
speed = tf.pad(speed,[[0,0],[0,1], [0,0]])
shift1 = derivative(poses_batch)
shift2 = speed*0.5
shift_loss = tf.reduce_mean(tf.abs(shift1-shift2)) * 0.01
accel = derivative(speed)
accel = tf.squeeze(accel)
accel = tf.transpose(accel)
accs_loss_large = tf.reduce_mean(tf.nn.relu(tf.abs(accel) - 4))
accs_loss_small = tf.reduce_mean(tf.abs(accel)) * 0.01
speed_loss_small = tf.reduce_mean(tf.abs(speed[2])) * 0.01 + shift_loss
'''
speed = (poses[3:] - poses[:-3])
speed_loss_small += tf.reduce_mean(tf.abs(poses[2:-1] - poses[1:-2]-speed/3))*0.01
accs = speed[1:] - speed[:-1]
acs2 = tf.linalg.norm(tf.abs(accs)+1.e-7, axis = -1)
accs_loss_small = tf.reduce_mean(acs2) / 100
accs_loss_large = tf.reduce_mean(tf.nn.relu(acs2-5))
'''
total_loss += (accs_loss_small + accs_loss_large + speed_loss_small)*5
for i in range(len(phone_models)):
grads = tape.gradient(total_loss, phone_models[i].trainable_weights)
optimizer.apply_gradients(zip(grads, phone_models[i].trainable_weights))
grads = tape.gradient(total_loss, model_track.trainable_weights)
optimizer.apply_gradients(zip(grads, model_track.trainable_weights))
grads = tape.gradient(total_loss, track_model_error.trainable_weights)
optimizer.apply_gradients(zip(grads, track_model_error.trainable_weights))
del tape
return total_loss, accs_loss_small, accs_loss_large, speed_loss_small, total_loss_psevdo, total_loss_delta, delta_dif, poses, psev_error
lr = 0.5
#optimizer = keras.optimizers.SGD(learning_rate=100., nesterov=True, momentum=0.5)
#optimizer = keras.optimizers.Adam(learning_rate=0.5)
optimizer = keras.optimizers.Adam(learning_rate=0.01)#, epsilon= 0.0001)
#optimizer = keras.optimizers.SGD(lr=0.01, momentum=0.9, clipvalue=100. )
for step in range(32*60):
total_loss, accs_loss_small, accs_loss_large, speed_loss_small = 0,0,0,0
physics = 0
for _ in range(32):
total_loss, accs_loss_small, accs_loss_large, speed_loss_small, total_loss_psevdo, total_loss_delta, delta_dif, poses, psev_error = train_step_gnss(optimizer, physics)
pred_pos = model_track(baseline_times*1000000).numpy()
poses = poses.numpy()
psev_error = psev_error.numpy()
psev_error = psev_error[np.abs(psev_error) > 0]
percents_good_psev = np.sum(np.abs(psev_error) < 1)*100/len(psev_error)
shift = pred_pos - gt_ecef_coords
meanshift = np.mean(shift,axis=0,keepdims=True)
shift = shift - meanshift
err3d = np.mean(np.linalg.norm(shift,axis = -1))
dist_2d = np.linalg.norm(shift[:,:2],axis = -1)
err2d = np.mean(dist_2d)
dist_2d = np.sort(dist_2d)
err50 = dist_2d[len(dist_2d)//2]
err95 = dist_2d[len(dist_2d)*95//100]
delta_dif = delta_dif.numpy()
delta_dif = delta_dif[np.abs(delta_dif) > 0]
percents_good = np.sum(np.abs(delta_dif) < 0.1)*100/len(delta_dif)
print( "Training loss at step %d (%.2f (%.2f),%.2f,%.2f,%.2f,%.4f): %.4f (%.2f),%.4f (%.2f),%.4f,%.4f,%.4f lr %.4f" % (step, err3d, np.linalg.norm(meanshift[0,:2]), err2d, err50, err95, (err50+err95)/2, float(total_loss_psevdo), percents_good_psev, float(total_loss_delta),percents_good,float(accs_loss_large),float(accs_loss_small), float(speed_loss_small), float(lr)), end='\r')
if(step % 32 == 0):
lr *= 0.90
optimizer.learning_rate = lr
if(step > 32):
physics = 1.
print()
if True:
plt.clf()
plt.scatter(pred_pos[:,1], pred_pos[:,0], s=0.2)
plt.scatter(gt_ecef_coords[:,1], gt_ecef_coords[:,0], s=0.2)
#fig1.canvas.start_event_loop(sys.float_info.min) #workaround for Exception in Tkinter callback
plt.savefig("fig/"+track+str(step+10000)+".png", dpi = 1000)
plt.close()
poses = track_model_error(track_input)
times = start_nanos + time_tick*track_input
poses = np.matmul(poses, mat_local.T)
d = {'nanos': np.reshape(times,(-1)), 'X': poses[:,0], 'Y': poses[:,1], 'Z': poses[:,2]}
df = pd.DataFrame(data=d)
df.to_csv(folder + "/" + track + "/track.csv")
|
16,835 | 819fe6e51c984bc83bddba9ca1c5790e79c6f063 | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', 'gallery.views.home', name='home'),
url(r'^login/$', 'django.contrib.auth.views.login', name="Kaleeri Login"),
url(r'^register/$', 'gallery.views.register', name="Kaleeri Register"),
url(r'^login/$', 'django.contrib.auth.views.login', name="Kaleeri Login"),
url(r'^logout/$', 'django.contrib.auth.views.logout', {"next_page": "/"}, name="Kaleeri Logout"),
url(r'^register/$', 'gallery.views.register', name="Kaleeri Register"),
url(r'^profile/$', 'gallery.views.user_account', name="Kaleeri Profile"),
url(r'^album/(\d+)/([a-f0-9]{40})?/?$', 'gallery.views.show_album'),
url(r'^album/(\d+)/edit/$', 'gallery.views.edit_album'),
url(r'^album/(\d+)/page/(\d+)/edit/$', 'gallery.views.edit_page'),
url(r'^album/(\d+)/page/add/(\d+)/$', 'gallery.views.add_page'),
url(r'^album/(\d+)/page/(\d+)/remove/$', 'gallery.views.remove_page'),
url(r'^album/(\d+)/page/(\d+)/photo/(\d+)/add/', 'gallery.views.add_photo'),
url(r'^album/(\d+)/page/(\d+)/photo/(\d+)/remove/', 'gallery.views.remove_photo'),
url(r'^album/(\d+)/page/(\d+)/([a-f0-9]{40})?/?$', 'gallery.views.show_page'),
url(r'^album/(\d+)/subalbums/([a-f0-9]{40})?/?$', 'gallery.views.list_albums'),
url(r'^album/list/$', 'gallery.views.list_albums'),
url(r'^album/create/$', 'gallery.views.create_album'),
url(r'^add/$', 'gallery.views.add_photo'),
url(r'^album/(\d+)/order/$', 'gallery.views.order'),
url(r'^order/checksum/(\d+)/(\d+)/(\d+)/$', 'gallery.views.order_checksum'),
url(r'^order/success/$', 'gallery.views.order_success'),
url(r'^order/cancel/$', 'gallery.views.order_cancel'),
url(r'^order/error/$', 'gallery.views.order_error'),
url(r'^layouts/$', 'gallery.views.list_layouts'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allaccess.urls'))
) |
16,836 | 9be31b839c153683ad366be4926f9d87ba855287 | from selenium import webdriver
browser = webdriver.Chrome(executable_path="/Users/shiv/desktop/drivers/chrome/chromedriver")
browser.get("https://en.wikipedia.org/wiki/List_of_state_and_union_territory_capitals_in_India")
browser.implicitly_wait(10)
# browser.maximize_window()
row = len(browser.find_elements_by_xpath("//*[@id='mw-content-text']/div[1]/table[2]/tbody/tr"))
# blocks = len(browser.find_elements_by_xpath("//*[@id='mw-content-text']/div[1]/table[2]/tbody/tr/th"))
cols = len(browser.find_elements_by_xpath("//*[@id='mw-content-text']/div[1]/table[2]/thead/tr/th"))
print(f'this table has \n{row} rows, \n{cols} columns')
# col x_path : //*[@id="mw-content-text"]/div[1]/table[2]/thead/tr/th[3]
for r in range(2, row+1):
for c in range(1, cols+1):
value = browser.find_element_by_xpath("//*[@id='mw-content-text']/div[1]/table[2]/tbody/tr["+str(r)+"]/th["+str(c)+"]").text
print(value, end=" ")
print()
# we have done type casting in above line
browser.quit()
|
16,837 | 646420353380fe1a3eecdb5135f4d83ab65175a3 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-06-13 05:48
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField(max_length=254)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('user_commneted', models.CharField(blank=True, max_length=256, null=True)),
('approved_comment', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('liked_date', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField(max_length=100)),
('body', models.TextField(max_length=254)),
('user_posted', models.CharField(blank=True, max_length=254, null=True)),
('publish', models.DateTimeField(default=django.utils.timezone.now)),
('created', models.DateTimeField(auto_now_add=True)),
('userpost', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Unlike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('unliked_date', models.DateTimeField(auto_now_add=True)),
('unliked_post', models.ManyToManyField(blank=True, null=True, to='fOn.Post')),
('userunlike', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('title', models.CharField(choices=[('MR', 'Mr.'), ('MRS', 'Mrs.'), ('MS', 'Ms,')], max_length=3)),
('gender', models.CharField(choices=[('F', 'Female'), ('M', 'Male'), ('O', 'Other')], max_length=1)),
('mobile', models.IntegerField()),
('email', models.EmailField(max_length=254)),
('friends', models.ManyToManyField(related_name='_userdetail_friends_+', to='fOn.UserDetail')),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='like',
name='liked_post',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='fOn.Post'),
),
migrations.AddField(
model_name='like',
name='userlike',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='fOn.Post'),
),
]
|
16,838 | f9c4b91c1ab06ea9055f42b4fad145a5a96ba331 | # /bin/env python
# -*- coding: utf-8 -*-
#
# Copyright ยฉ 2020 Aljosha Friemann <a.friemann@automate.wtf>
#
# Distributed under terms of the 3-clause BSD license.
from typing import Dict
import ruamel.yaml as yaml
def load(path: str) -> Dict:
with open(path, 'rb') as stream:
return yaml.safe_load(stream.read().decode())
|
16,839 | 33d01b434e52b056a752f29ccfd00ba0b7d79506 | moves = int(input())
result = 0
from_0_to_9 = 0
from_10_to_19 = 0
from_20_to_29 = 0
from_30_to_39 = 0
from_40_to_50 = 0
invalid_num = 0
for i in range(1, moves+1):
num = int(input())
if num >= 0 and num <= 9:
from_0_to_9 += 1
result += num * 0.20
elif num >= 10 and num <= 19:
from_10_to_19 += 1
result += num * 0.30
elif num >= 20 and num <= 29:
from_20_to_29 += 1
result += num * 0.40
elif num >= 30 and num <= 39:
from_30_to_39 += 1
result += 50
elif num >= 40 and num <= 50:
from_40_to_50 += 1
result += 100
elif num < 0 or num > 50:
result /= 2
invalid_num += 1
from_0_to_9_per = from_0_to_9 / moves * 100
from_10_to_19_per = from_10_to_19 / moves * 100
from_20_to_29_per = from_20_to_29 / moves * 100
from_30_to_39_per = from_30_to_39 / moves * 100
from_40_to_50_per = from_40_to_50 / moves * 100
invalid_num_per = invalid_num / moves * 100
print(f"{result:.2f}")
print(f"From 0 to 9: {from_0_to_9_per:.2f}%")
print(f"From 10 to 19: {from_10_to_19_per:.2f}%")
print(f"From 20 to 29: {from_20_to_29_per:.2f}%")
print(f"From 30 to 39: {from_30_to_39_per:.2f}%")
print(f"From 40 to 50: {from_40_to_50_per:.2f}%")
print(f"Invalid numbers: {invalid_num_per:.2f}%") |
16,840 | 54c448465184730a522dbc94231016dc5ae15f30 | # Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import falcon
from shipyard_airflow.common.notes.errors import NoteNotFoundError
from shipyard_airflow.common.notes.errors import NoteURLNotSpecifiedError
from shipyard_airflow.common.notes.errors import NoteURLRetrievalError
from shipyard_airflow.common.notes.notes_helper import NoteType
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.helpers.notes import NOTES as notes_helper
from shipyard_airflow.errors import ApiError
from shipyard_airflow.errors import InvalidFormatError
from shipyard_airflow import policy
NOTE_TYPE_RBAC = {
NoteType.ACTION: policy.GET_ACTION,
NoteType.STEP: policy.GET_ACTION_STEP,
# Anything else uses only the already checked GET_NOTEDETAILS
# new known note types should be added to the notes helper and also
# represented here.
NoteType.OTHER: None
}
# /api/v1.0/notedetails/{note_id}
class NoteDetailsResource(BaseResource):
"""Resource to service requests for note details"""
@policy.ApiEnforcer(policy.GET_NOTEDETAILS)
def on_get(self, req, resp, **kwargs):
"""Retrieves additional information for a note.
Using the specified note_id, looks up any additional information
for a note
"""
note_id = kwargs['note_id']
self.validate_note_id(note_id)
note = self.get_note_with_access_check(req.context, note_id)
resp.text = self.get_note_details(note)
resp.status = falcon.HTTP_200
def validate_note_id(self, note_id):
if not len(note_id) == 26:
raise InvalidFormatError(
title="Notes ID values are 26 character ULID values",
description="Invalid note_id: {} in URL".format(note_id)
)
def get_note_with_access_check(self, context, note_id):
"""Retrieve the note and checks user access to the note
:param context: the request context
:param note_id: the id of the note to retrieve.
:returns: the note
"""
try:
note = notes_helper.get_note(note_id)
note_type = notes_helper.get_note_assoc_id_type(note)
if note_type not in NOTE_TYPE_RBAC:
raise ApiError(
title="Unable to check permission for note type",
description=(
"Shipyard is not correctly identifying note type "
"for note {}".format(note_id)),
status=falcon.HTTP_500,
retry=False)
policy.check_auth(context, NOTE_TYPE_RBAC[note_type])
return note
except NoteNotFoundError:
raise ApiError(
title="No note found",
description=("Note {} is not found".format(note_id)),
status=falcon.HTTP_404)
def get_note_details(self, note):
"""Retrieve the note details from the notes_helper
:param note: the note with extended information
"""
try:
return notes_helper.get_note_details(note)
except NoteURLNotSpecifiedError:
raise ApiError(
title="No further note details are available",
description=("Note {} has no additional information to "
"return".format(note.note_id)),
status=falcon.HTTP_404)
except NoteURLRetrievalError:
raise ApiError(
title="Unable to retrieve URL information for note",
description=("Note {} has additional information, but it "
"cannot be accessed by Shipyard at this "
"time".format(note.note_id)),
status=falcon.HTTP_500)
|
16,841 | a444db7aaa54ad3f7e1f727f587775ba1e88dd0d | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 6 15:05:49 2020
@author: blakeconrad
"""
import pandas as pd
import numpy as np
df = pd.read_csv("DraftKingsdata_Cleaner.csv")
import os
import pyomo.environ as pyo
from pyomo.environ import *
from pyomo.opt import SolverFactory
opt = pyo.SolverFactory('glpk')
model = ConcreteModel()
|
16,842 | 626d43c14d979001b88e50903a1b28793c6a8e9b | import os
path = '../experiment/test/results-B100'
for png in os.listdir(path):
file_path = os.path.join(path, png)
if 'x1' in png:
new_file_path = png.split('x')[0][:-1]
os.rename(file_path, os.path.join(path, new_file_path+".png"))
|
16,843 | ef267dfa1b4be7817d50f6c806500f20afbd9858 | #Copyright (c) 2018-2020 William Emerison Six
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
# PURPOSE
#
# Make the rotations work correctly by thinking about the problem
# more clearly
#
# In the previous demo, The initial translate is effectively canceled out,
# leaving a rotation and then a translation.
# Translate inverse(Translate) Rotate Translate
#
# Translate inverse(Translate) = Identity. i.e. 5 * 1/5 = 1,
# so we really just need to do a rotation first, and then a translation,
# but this can be counterintuitive at first because we like to think
# in relative terms.
# To understand why the code in this demo works, you can think
# about it in one of two ways. Either there is a sequence
# of function calls, all of which happen relative to the global
# origin; or, you can read the transformations backwards,
# where instead of doing operations on points, the operations
# all modify the current axis to a new relative axis,
# and all subsequent functions move those relative axises to
# new relative axises.
# Strong suggestion for computer graphics, especially from
# modelspace to global space:
# Read the transformations in the latter.
# See the transformations below, and the associated animated gifs.
import sys
import os
import numpy as np
import math
from OpenGL.GL import *
import glfw
if not glfw.init():
sys.exit()
glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR,1)
glfw.window_hint(glfw.CONTEXT_VERSION_MINOR,4)
window = glfw.create_window(500,
500,
"ModelViewProjection Demo 9",
None,
None)
if not window:
glfw.terminate()
sys.exit()
# Make the window's context current
glfw.make_context_current(window)
# Install a key handler
def on_key(window, key, scancode, action, mods):
if key == glfw.KEY_ESCAPE and action == glfw.PRESS:
glfw.set_window_should_close(window,1)
glfw.set_key_callback(window, on_key)
glClearColor(0.0,
0.0,
0.0,
1.0)
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
def draw_in_square_viewport():
# clear to gray.
glClearColor(0.2, #r
0.2, #g
0.2, #b
1.0) #a
glClear(GL_COLOR_BUFFER_BIT)
width, height = glfw.get_framebuffer_size(window)
# figure out the minimum dimension of the window
min = width if width < height else height
# per pixel than just it's current color.
glEnable(GL_SCISSOR_TEST)
glScissor(int((width - min)/2.0), #min x
int((height - min)/2.0), #min y
min, #width x
min) #width y
glClearColor(0.0, #r
0.0, #g
0.0, #b
1.0) #a
# gl clear will only update the square to black values.
glClear(GL_COLOR_BUFFER_BIT)
# disable the scissor test, so now any opengl calls will
# happen as usual.
glDisable(GL_SCISSOR_TEST)
# But, we only want to draw within the black square.
# We set the viewport, so that the NDC coordinates
# will be mapped the the region of screen coordinates
# that we care about, which is the black square.
glViewport(int(0.0 + (width - min)/2.0), #min x
int(0.0 + (height - min)/2.0), #min y
min, #width x
min) #width y
class Vertex:
def __init__(self,x,y):
self.x = x
self.y = y
def __repr__(self):
return f"Vertex(x={repr(self.x)},y={repr(self.y)})"
def translate(self, tx, ty):
return Vertex(x=self.x + tx, y=self.y + ty)
def scale(self, scale_x, scale_y):
return Vertex(x=self.x * scale_x, y=self.y * scale_y)
def rotate(self,angle_in_radians):
return Vertex(x= self.x * math.cos(angle_in_radians) - self.y * math.sin(angle_in_radians),
y= self.x * math.sin(angle_in_radians) + self.y * math.cos(angle_in_radians))
# NEW
# removed rotate_around, as it was useless for our purpose
class Paddle:
def __init__(self,vertices, r, g, b, initial_position, rotation=0.0, input_offset_x=0.0, input_offset_y=0.0):
self.vertices = vertices
self.r = r
self.g = g
self.b = b
self.rotation = rotation
self.input_offset_x = input_offset_x
self.input_offset_y = input_offset_y
self.initial_position = initial_position
def __repr__(self):
return f"Paddle(vertices={repr(self.vertices)},r={repr(self.r)},g={repr(self.g)},b={repr(self.b)},initial_position={repr(self.initial_position)},rotation={repr(self.rotation)},input_offset_x={repr(self.input_offset_x)},input_offset_y={repr({self.input_offset_y})})"
paddle1 = Paddle(vertices=[Vertex(x=-10.0, y=-30.0),
Vertex(x= 10.0, y=-30.0),
Vertex(x= 10.0, y=30.0),
Vertex(x=-10.0, y=30.0)],
r=0.578123,
g=0.0,
b=1.0,
initial_position=Vertex(-90.0,0.0))
paddle2 = Paddle(vertices=[Vertex(x=-10.0, y=-30.0),
Vertex(x= 10.0, y=-30.0),
Vertex(x= 10.0, y=30.0),
Vertex(x=-10.0, y=30.0)],
r=1.0,
g=0.0,
b=0.0,
initial_position=Vertex(90.0,0.0))
def handle_movement_of_paddles():
global paddle1, paddle2
if glfw.get_key(window, glfw.KEY_S) == glfw.PRESS:
paddle1.input_offset_y -= 10.0
if glfw.get_key(window, glfw.KEY_W) == glfw.PRESS:
paddle1.input_offset_y += 10.0
if glfw.get_key(window, glfw.KEY_K) == glfw.PRESS:
paddle2.input_offset_y -= 10.0
if glfw.get_key(window, glfw.KEY_I) == glfw.PRESS:
paddle2.input_offset_y += 10.0
global paddle_1_rotation, paddle_2_rotation
if glfw.get_key(window, glfw.KEY_A) == glfw.PRESS:
paddle1.rotation += 0.1
if glfw.get_key(window, glfw.KEY_D) == glfw.PRESS:
paddle1.rotation -= 0.1
if glfw.get_key(window, glfw.KEY_J) == glfw.PRESS:
paddle2.rotation += 0.1
if glfw.get_key(window, glfw.KEY_L) == glfw.PRESS:
paddle2.rotation -= 0.1
TARGET_FRAMERATE = 60 # fps
# to try to standardize on 60 fps, compare times between frames
time_at_beginning_of_previous_frame = glfw.get_time()
# Loop until the user closes the window
while not glfw.window_should_close(window):
# poll the time to try to get a constant framerate
while glfw.get_time() < time_at_beginning_of_previous_frame + 1.0/TARGET_FRAMERATE:
pass
# set for comparison on the next frame
time_at_beginning_of_previous_frame = glfw.get_time()
# Poll for and process events
glfw.poll_events()
width, height = glfw.get_framebuffer_size(window)
glViewport(0, 0, width, height)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# render scene
draw_in_square_viewport()
handle_movement_of_paddles()
# draw paddle1
glColor3f(paddle1.r,
paddle1.g,
paddle1.b)
# if you read the operations below as rotate, translate1, translate2,
# you should imagine it as follows
# eog ../images/rotation1F.gif
# if instead you read them backwards, imagine the transformations
# as follows
# eog ../images/rotation1B.gif
# side note. Typically I use a debugger as an interactive evaluator,
# in order to understand how code which I do not understand works.
# In computer graphics, the debugger is of limited help because
# the transformations on the individual points is not worth
# thinking about, and therefore the intermediat results
# are worthless for reasoning.
#
# In order to be successful, I highly recommend reading the transformations
# backwards, with a moving/rotating/scaled axises.
#
# (This advise will be modified when I introduce transformation stacks,
# but the same principle will apply. Also, on the note of transformation
# stacks, N.B. that the scaling from world space to ndc is shared
# for both paddles, and that changing the code in one place would
# required changing the code for all shapes.)
#
# I prefer to think graphically instead of symbolically.
# Another way you can think of this is to rotate the the x axis
# and y axis, create graph paper (tick marks) along those new
# axis, and then draw the geometry on that new "basis",
# instead of the natural basis. (Natural basis just means
# the normal x and y axis).
# Think of basis as an origin, a unit in various directions,
# a graph paper lines drawn. Then your geometry is drawn
# in that space.
# In revisting demo 6's space, if we read all of the transformations
# below in order, it's following the order of function application.
#
# If instead we read the transformation between spaces backwards in code,
# (and going the opposite direction of the arrows), we can view a coordinate
# system that is changing (origin can move, and axises can rotate/scale)
# eog ../images/demo06.png
# ALSO, see mvpVisualization/demo.py and mvpVisualization/demoAnimation.py.
# THESE WILL SHOW THE TRANSMORTAIONS backwards much more intuitively.
glBegin(GL_QUADS)
for model_space in paddle1.vertices:
world_space = model_space.rotate(paddle1.rotation) \
.translate(tx=paddle1.initial_position.x,
ty=paddle1.initial_position.y) \
.translate(tx=paddle1.input_offset_x,
ty=paddle1.input_offset_y)
ndc_space = world_space.scale(scale_x=1.0/100.0,
scale_y=1.0/100.0)
glVertex2f(ndc_space.x,
ndc_space.y)
glEnd()
# draw paddle2
glColor3f(paddle2.r,
paddle2.g,
paddle2.b)
# Same thing for the second paddle.
# eog ../images/rotation2F.gif
# eog ../images/rotation2B.gif
glBegin(GL_QUADS)
for model_space in paddle2.vertices:
world_space = model_space.rotate(paddle2.rotation) \
.translate(tx=paddle2.initial_position.x,
ty=paddle2.initial_position.y) \
.translate(tx=paddle2.input_offset_x,
ty=paddle2.input_offset_y)
ndc_space = world_space.scale(scale_x=1.0/100.0,
scale_y=1.0/100.0)
glVertex2f(ndc_space.x,
ndc_space.y)
glEnd()
# done with frame, flush and swap buffers
# Swap front and back buffers
glfw.swap_buffers(window)
glfw.terminate()
|
16,844 | b9235ac55e9283bd8254168786f8cd2c8cf8541f | from argparse import ArgumentTypeError
from . import assert_conv_fails
from terseparse import types
import pytest
def test_Int():
t = types.Int(-0xF, 0x10)
t('-15') == -15
t('-0xF') == -0xF
t('0xF') == 15
t('15') == 15
assert_conv_fails(t, '-16')
assert_conv_fails(t, '-0x10')
assert_conv_fails(t, '0x10')
assert_conv_fails(t, '16')
def test_Int_leading_zero():
t = types.Int()
t('00') == 0
t('-00') == 0
t('043') == 43
assert_conv_fails(t, 'a')
def test_Int_u32():
t = types.Int.u32
t('0') == 0
t('0xFFFFFFFF') == 2**32 - 1
assert_conv_fails(t, '-1')
assert_conv_fails(t, '0xFFFFFFFF + 1')
def test_Or():
t = types.Or(types.Int.negative, types.Int(1, 10))
assert_conv_fails(t, '0')
assert t('-10') == -10
assert t('9') == 9
def test_Dict():
t = types.Dict({'a': types.Int(0, 2), 'b': str})
assert_conv_fails(t, 'asdf')
assert_conv_fails(t, 'a:-1')
assert_conv_fails(t, 'a:2')
assert_conv_fails(t, 'a:2 b:asdf')
assert t('a:0') == {'a': 0}
assert t('b:asdf') == {'b': 'asdf'}
assert t('a:0 b:asdf') == {'a': 0, 'b': 'asdf'}
assert t('a=0,b=asdf') == {'a': 0, 'b': 'asdf'}
assert t('a:1, b=asdf') == {'a': 1, 'b': 'asdf'}
def test_Dict_default_value():
t = types.Dict({'a': types.Int() | types.Keyword('', 5)})
assert_conv_fails(t, 'a:a')
assert t('a') == {'a': 5}
assert t('a:5') == {'a': 5}
def test_Dict_punctuation():
t = types.Dict({'a': str, 'b': str})
assert t('a:/a/b/c/d,b:/e/f/g') == {'a': '/a/b/c/d', 'b': '/e/f/g'}
def test_Dict_to_Set():
dt = types.Dict({'a': types.Int(0, 2), 'b': str})
t = dt.keys_to_set_type()
assert t('a a') == {'a'}
assert t('a b') == {'a', 'b'}
assert t('b, a') == {'a', 'b'}
def test_List():
t = types.List(types.Or(types.Keyword('a'), types.Keyword('b')))
assert_conv_fails(t, 'asdf')
assert_conv_fails(t, 'bsdf')
assert t('a') == ['a']
assert t('b') == ['b']
assert t('a b') == ['a', 'b']
def test_Keyword():
t = types.Keyword('a', 1)
assert_conv_fails(t, 'b')
assert_conv_fails(t, 1)
assert t('a') == 1
def test_Or_string():
t = types.Or('a', 'b', 'c')
assert_conv_fails(t, 'd')
assert_conv_fails(t, '0')
assert t('a') == 'a'
assert t('b') == 'b'
assert t('c') == 'c'
def test_Or_string_syntax_sugar():
t = types.Int() | 'a'
assert_conv_fails(t, 'aa')
assert t('a') == 'a'
assert t('1234') == 1234
def test_Set():
t = types.Set(types.Or('a', 'b', 'c'))
assert_conv_fails(t, 'd')
assert_conv_fails(t, 'a,b,c,d')
assert t('a') == set(('a',))
assert t('a,b') == set(('a', 'b'))
def test_Set_duplicates():
t = types.Set(types.Or('a', 'b', 'c'))
assert t('a,a,a,b') == set(('a','b'))
def test_file():
t = types.File.rw
with pytest.raises(ArgumentTypeError):
t("non-existant-file")
|
16,845 | 8b03cce1e799f0a5882fe961987990239edda6df |
def get_input_t():
inp = '''389125467'''
return [int(x) for x in inp]
def get_input():
inp = '''364297581'''
return [int(x) for x in inp]
def s2_input(inp):
for x in range(max(inp)+1,1000001):
inp.append(x)
return inp
def s1(inp,move_cnt):
cur_pos = 0
for _ in range(move_cnt):
num_to_find = inp[cur_pos] - 1
pickup = []
for _ in range(3):
pickup.append(inp.pop(c if (c:= cur_pos + 1) < len(inp) else 0))
next_cup = inp[c if (c:= cur_pos + 1) < len(inp) else 0]
while num_to_find not in inp:
num_to_find -= 1
if num_to_find < 0:
num_to_find = max(inp)
cur_pos = inp.index(num_to_find)
inp = inp[:cur_pos+1] + pickup + inp[cur_pos+1:]
cur_pos = inp.index(next_cup)
while inp[0] != 1:
inp.append(inp.pop(0))
if move_cnt <= 100:
print("".join([str(x) for x in inp if x != 1]))
else:
print(inp[1],inp[2],inp[1]*inp[2])
def s2(inp,move_cnt):
work_dict = {x: y for x, y in zip(inp, inp[1:] + [inp[0]])}
current_num = inp[0]
for _ in range(move_cnt):
tmp_num = current_num
picklist = []
picklist = [tmp_num := work_dict[tmp_num] for _ in range(3) ]
work_dict[current_num] = work_dict[tmp_num]
num_to_find = current_num - 1
while num_to_find in picklist or num_to_find not in work_dict.keys():
num_to_find -= 1
if num_to_find <= 0:
num_to_find = max(work_dict.keys())
work_dict[picklist[-1]],work_dict[num_to_find] = work_dict[num_to_find],picklist[0]
current_num = work_dict[current_num]
current_num = 1
out = []
while work_dict[current_num] != 1:
out.append(work_dict[current_num])
current_num = work_dict[current_num]
if move_cnt <= 100:
print("".join([str(x) for x in out]))
else:
print(out[0],out[1],out[0]*out[1])
s1(get_input(),100)
s2(s2_input(get_input()),10000000)
# s2(get_input(),100) |
16,846 | 9d18079184fa0501bdf3f886a559e921c1635bb6 |
import torch
from nnet import activation
def cross_entropy_loss(outputs, labels):
"""Calculates cross entropy loss given outputs and actual labels
H(y,p) = -sum (yi * pi ) of ith element
"""
# works properly
m = labels.shape[0]
p = outputs
log_likelihood = -1*torch.log(p[range(m),labels])
loss = torch.sum(log_likelihood) / m
return loss.item()
def delta_cross_entropy_softmax(outputs, labels):
"""Calculates derivative of cross entropy loss (C) w.r.t. weighted sum of inputs (Z).
dL/do = pi - yi
"""
m = labels.shape[0]
grad = outputs
grad[range(m),labels] -= torch.tensor(1.)
grad = grad/m
avg_grads = grad
return avg_grads
if __name__ == "__main__":
pass |
16,847 | 94346037d7cc9e628de28fb45cee82d1ce520d27 | import discord
from discord.ext import commands
TOKEN = 'NTIyNTc3NjY2Mjg1NTAyNDg0.DvNAZA.qqTVZxaJwvIcORzzK4OC0D1DuOg'
client = commands.Bot(command_prefix = 'a!')
client.remove_command('help')
players = {}
@client.event
async def on_ready():
await client.change_presence(game=discord.Game(name='Music on discord | version 1.0.1'))
print('Bot Online')
@client.command(pass_context=True)
async def help(ctx):
author = ctx.message.author
embed = discord.Embed(
colour = discord.Colour.orange()
)
embed.set_author(name='Auditor Commands')
embed.add_field(name='Music Commands', value='a!play <song url> a!stop <stops song a!pause <pauses song a!resume <resumes song> a!join <joins voice channel> a!leave <leaves voice channel', inline=False)
await client.send_message(author, embed=embed)
await client.say('Check your dms')
@client.command(pass_context=True)
async def join(ctx):
channel = ctx.message.author.voice.voice_channel
await client.join_voice_channel(channel)
embed = discord.Embed(
title = "Succsess!",
description = "Connected to the Voice Channel.",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
@client.command(pass_context=True)
async def leave(ctx):
server = ctx.message.server
voice_client = client.voice_client_in(server)
if voice_client is None:
embed = discord.Embed(
title = "Succsess!",
description = "Disconnected from the Voice Channel.",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
await voice_client.disconnect()
embed = discord.Embed(
title = "Succsess!",
description = "Disconnected from the Voice Channel.",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
@client.command(pass_context=True)
async def play(ctx, url):
server = ctx.message.server
voice_client = client.voice_client_in(server)
player = await voice_client.create_ytdl_player(url)
players[server.id] = player
player.start()
embed = discord.Embed(
title = "Now Playing...",
description = "Playing " + url + ".",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
@client.command(pass_context=True)
async def stop(ctx):
id = ctx.message.server.id
players[id].stop()
embed = discord.Embed(
title = "Stopping!",
description = "Stopping the music.",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
@client.command(pass_context=True)
async def resume(ctx):
id = ctx.message.server.id
players[id].resume()
embed = discord.Embed(
title = "Resuming!",
description = "Resuming the music.",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
@client.command(pass_context=True)
async def pause(ctx):
id = ctx.message.server.id
players[id].pause()
embed = discord.Embed(
title = "Pasuing!",
description = "Pasuing the music.",
colour = discord.colour.green()
)
await client.send_message(ctx.message.channel, embed=embed)
client.run(TOKEN)
|
16,848 | ef16cc721605e26cb22ead8241d7cae1d8e34016 | import requests
import os
from bs4 import BeautifulSoup
def get_web_text(url):
webpage = requests.get(url)
raw_text = BeautifulSoup(webpage.text, "html.parser")
div = raw_text.find(id="gutenb").get_text()
return div
def write_into_file(text, filename, file_path):
os.chdir(file_path)
with open("Chapter " + str(filename) + ".txt", "w", encoding='utf-8') as file:
file.writelines(text)
def main(save_path):
url = "http://gutenberg.spiegel.de/buch/der-idiot-2098/"
page = 1
while page <= 50:
text = get_web_text(url + str(page))
write_into_file(text, page, save_path)
print("Read page " + str(page) + " from 50.")
page += 1
main(os.getcwd()) |
16,849 | 3ed78bf0b4c296544cc112836f171b4b6f786f34 | # Run faster using pypy :)
# pip install pycryptodome && pip install tqdm
from Crypto.Cipher import AES
from tqdm import tqdm
with open('./provided/Readme.txt') as f:
d = f.read().split('\n')
enc_data = (
[x for x in d if 'Encrypted Data' in x][0].split()[-1].decode('hex'))
known_bytes = (
[x for x in d if 'Key Bytes' in x][0].split()[-1].decode('hex'))
with open('./provided/test.txt') as f:
d = [x.split(',') for x in f.read().strip().split('\n')]
timing = {
tuple(map(ord, x.decode('hex'))): int(y)
for x, y in d
}
print "[+] Finished reading %d timing values" % len(timing)
print "[ ] Starting correlation analysis"
max_timing = max(timing.values())
correlations = []
for i in tqdm(xrange(16)):
for j in xrange(i + 1, 16):
t = [0 for _ in xrange(256)]
for k, v in timing.iteritems():
p = k[i] ^ k[j]
t[p] = max(t[p], v)
m = min(t)
assert m != 0
if m != max_timing:
top_bytes = t.index(m) & (0xff - 0x3)
assert t.count(m) == 4
assert all(i & (0xff - 0x3) == top_bytes
for i, x in enumerate(t) if x == m)
correlations.append((i, j, t.index(m)))
print "[+] Found %d correlations" % len(correlations)
key = list(map(ord, known_bytes))
key += [None for _ in xrange(16 - len(key))]
for i, j, v in correlations:
if key[i] is None:
if key[j] is None:
continue
else:
key[i] = key[j] ^ v
else:
if key[j] is None:
key[j] = key[i] ^ v
else:
if (key[i] ^ key[j]) >> 2 != (v >> 2):
print "Weird %d %d %d %d" % (i, j, key[i] ^ key[j], v)
for i in xrange(len(known_bytes), 16):
if key[i] is not None:
key[i] = key[i] & (0xff - 0x3)
assert key[i] & 0x3 == 0, key[i]
partial_bytes = 16 - (len(known_bytes) + key.count(None))
unknown_bytes = key.count(None)
reqd_brute_bits = 2 * partial_bytes + 8 * unknown_bytes
print "[+] Inferred %d partial bytes. %d completely unknown bytes." % (
partial_bytes, unknown_bytes
)
print " %s" % repr(key)
print "[ ] Starting brute force of %d bits." % (
reqd_brute_bits
)
for brute in tqdm(xrange(2 ** reqd_brute_bits)):
key_guess = key[:]
for i in xrange(len(known_bytes), 16):
if key_guess[i] is None:
key_guess[i] = (brute & 0x3f) << 2
brute = brute >> 6
for i in xrange(len(known_bytes), 16):
key_guess[i] = key_guess[i] | (brute & 0x3)
brute = brute >> 2
assert brute == 0
cipher = AES.new(''.join(map(chr, key_guess)), AES.MODE_ECB)
res = cipher.decrypt(enc_data)
if 'flag{' in res:
print
print "[+] Found flag: %s" % repr(res)
print "[+] Finished"
# Note: Result found at 53401956
|
16,850 | feece64d33659d6479f835fabdc4e3fcf574554d | #encoding: utf-8
import csv
def lercsv():
#Abre o arquivo csv com a lista de clientes
with open('Lista-Clientes.csv', 'rb') as f:
reader = csv.reader(f, delimiter=';')
arquivolido = list(reader)
clientes = len(arquivolido)
listapronta = [[0 for x in range(3)] for y in range(clientes)]
#print len(clientslist)
for linha in range(clientes):
listapronta[linha][0] = arquivolido[linha][0]
listapronta[linha][1] = (arquivolido[linha][2]).replace('.','').replace('/','').replace('-','')
listapronta[linha][2] = arquivolido[linha][1]
return listapronta
|
16,851 | 9e8baad96472346ec0d225b546552ada7f178499 | # Generated by Django 3.2 on 2021-04-09 12:21
import datetime
import django.contrib.gis.db.models.fields
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('spot', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='place',
name='address',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='place',
name='city',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='place',
name='description',
field=models.TextField(max_length=2000, null=True),
),
migrations.AddField(
model_name='place',
name='phone',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='place',
name='tags',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='place',
name='title',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='place',
name='type_of_place',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='place',
name='location',
field=django.contrib.gis.db.models.fields.PointField(blank=True, default=datetime.datetime(2021, 4, 9, 12, 21, 14, 143291, tzinfo=utc), null=True, srid=4326),
),
]
|
16,852 | 05a16f6b575be416ea51533b5ee37cafdb9b24b4 | from django.contrib import admin
from django.urls import include, path
from . import views
urlpatterns = [
path('',views.traveller, name ='traveller')
] |
16,853 | 6f875f7a093dc10c3e3ed07978c6058e43f90afc | print("Enter number of elements for list :")
n=int(input())
list = []
s=""
for i in range(0,n):
list.append(input())
s=s+list[i]
print("Is list palindrome ?")
print(str(s==s[::-1])) |
16,854 | 476e8243df8b87dcdab3f34b0e7feffff6bafc74 | # ็ฝๆ่ดข็ป่ทๅ็20ๅนดไธ่ฏๆๆฐๅๅฒๆฐๆฎ
# ๆ นๆฎๅฝๅๆถ้ดๅ150ๅคฉ็ๅๅฒๆฐๆฎ๏ผ้ขๆตๅฝๅคฉไธ่ฏๆๆฐ็ๆถจ่ท
import pandas as pd # ๅผๅ
ฅpandasๅบ๏ผ็จๆฅๅ ่ฝฝCSVๆฐๆฎ
import numpy as np # ๅผๅ
ฅnumpyๅบ๏ผๆฏๆ้ซ็บงๅคง้็็ปดๅบฆๆฐ็ปไธ็ฉ้ต่ฟ็ฎ๏ผๆญคๅคไน้ๅฏนๆฐ็ป่ฟ็ฎๆไพๅคง้็ๆฐๅญฆๅฝๆฐๅบ
from sklearn import svm # svmๅบ
from sklearn import cross_validation # ไบคๅ้ช่ฏ
data = pd.read_csv('000777.csv', encoding='gbk', parse_dates=[0], index_col=0)
# pandas.read_csv(ๆฐๆฎๆบ,encoding=็ผ็ ๆ ผๅผไธบgbk,parse_dates=็ฌฌ0ๅ่งฃๆไธบๆฅๆ ๅ่งฃๆไธบๆฅๆ, index_col=็จไฝ่ก็ดขๅผ็ๅ็ผๅท)
data.sort_index(0, ascending=True, inplace=True)
# sort_index(axis=0(ๆ0ๅๆ ), ascending=True(ๅๅบ), inplace=False(ๆๅบๅๆฏๅฆ่ฆ็ๅๅๆฐๆฎ))
# dataๆๅๅบๆๅ
# ้ๅ150ๅคฉ็ๆฐๆฎ
dayfeature = 150
# ้ๅ5ๅๆฐๆฎไฝไธบ็นๅพ๏ผๆถ็ไปท ๆ้ซไปท ๆไฝไปท ๅผ็ไปท ๆไบค้
# ้ๅ5ไธช็นๅพ*ๅคฉๆฐ
featurenum = 5 * dayfeature
# ่ฎฐๅฝ150ๅคฉ็5ไธช็นๅพๅผ
x = np.zeros((data.shape[0] - dayfeature, featurenum + 1))
# data.shape[0]-dayfeatureๆๆๆฏๅ ไธบๆไปฌ่ฆ็จ150ๅคฉๆฐๆฎๅ่ฎญ็ป
# ๅฏนไบๆก็ฎไธบ200ๆก็ๆฐๆฎ๏ผๅชๆ50ๆกๆฐๆฎๆฏๆๅ150ๅคฉ็ๆฐๆฎๆฅ่ฎญ็ป็
# ๆไปฅ่ฎญ็ป้็ๅคงๅฐๅฐฑๆฏ200-150 = 50
# ๅฏนไบๆฏไธๆกๆฐๆฎ๏ผๅฎ็็นๅพๆฏๅ150ๅคฉ็ๆๆ็นๅพๆฐๆฎ๏ผๅณ150*5
# +1ๆฏๅฐๅฝๅคฉ็ๅผ็ไปทๅผๅ
ฅไฝไธๆก็นๅพๆฐๆฎ
# ่ฎฐๅฝๆถจๆ่
่ท
y = np.zeros((data.shape[0] - dayfeature))
for i in range(0, data.shape[0] - dayfeature):
# u'ๅผ็ไปท'ไธญ็u่กจ็คบunicode็ผ็
x[i, 0:featurenum] = np.array(data[i:i + dayfeature] \
[[u'ๆถ็ไปท', u'ๆ้ซไปท', u'ๆไฝไปท', u'ๅผ็ไปท', u'ๆไบค้']]).reshape((1, featurenum))
# ๅฐๆฐๆฎไธญ็ๅ็งไปทๆ ผๅญๅ
ฅxๆฐ็ปไธญ
# data.ixไธญ็ix่กจ็คบ็ดขๅผ
x[i, featurenum] = data.ix[i + dayfeature][u'ๅผ็ไปท']
# ๆๅไธๅ่ฎฐๅฝๅฝๆฅ็ๅผ็ไปท
for i in range(0, data.shape[0] - dayfeature):
# ๅฆๆๅฝๅคฉๆถ็ไปท้ซไบๅผ็ไปท๏ผy[i]=1ไปฃ่กจๆถจ,y[i]=0ไปฃ่กจ่ท
if data.ix[i + dayfeature][u'ๆถ็ไปท'] >= data.ix[i + dayfeature][u'ๅผ็ไปท']:
y[i] = 1
else:
y[i] = 0
# ่ฐ็จsvmๅฝๆฐ๏ผๅนถ่ฎพ็ฝฎkernelๅๆฐ๏ผ้ป่ฎคๆฏrbf๏ผๅ
ถไปๅๆฐๆlinear๏ผpoly๏ผsigmoid
clf = svm.SVC(kernel='rbf')
result = []
for i in range(5):
# xๅy็้ช่ฏ้ๅๆต่ฏ้๏ผๅๅ80-20%็ๆต่ฏ้
x_train, x_test, y_train, y_test = \
cross_validation.train_test_split(x, y, test_size=0.2)
# ไฝฟ็จ่ฎญ็ปๆฐๆฎ่ฎญ็ป
clf.fit(x_train, y_train)
# ๅฐ้ขๆตๆฐๆฎๅๆต่ฏ้็้ช่ฏๆฐๆฎๆฏๅฏน
result.append(np.mean(y_test == clf.predict(x_test)))
print("svm classifier accuacy:")
print(result)
# ไบคๅ้ช่ฏๆณๅ
ๅฐๆฐๆฎ้Dๅๅไธบkไธชๅคงๅฐ็ธไผผ็ไบๆฅๅญ้
# ๆฏไธชๅญ้้ฝๅฐฝๅฏ่ฝไฟๆๆฐๆฎๅๅธ็ไธ่ดๆง๏ผๅณไปDไธญ้่ฟๅๅฑ้ๆ ทๅพๅฐ
# ็ถๅ๏ผๆฏๆฌก็จk-1ไธชๅญ้็ๅนถ้ไฝไธบ่ฎญ็ป้๏ผไฝไธ็้ฃไธชๅญ้ไฝไธบๆต่ฏ้
# ่ฟๆ ทๅฐฑๅฏ่ทๅพk็ป่ฎญ็ป/ๆต่ฏ้๏ผไป่ๅฏ่ฟ่กkๆฌก่ฎญ็ปๅๆต่ฏ๏ผๆ็ป่ฟๅ็ๆฏ่ฟkไธชๆต่ฏ็ปๆ็ๅๅผ
# ้ๅธธๆไบคๅ้ช่ฏๆณ็งฐไธบ"kๆไบคๅ้ช่ฏ",kๆๅธธ็จ็ๅๅผๆฏ10๏ผๆญคๆถ็งฐไธบ10ๆไบคๅ้ช่ฏ
|
16,855 | 1a4d1186072a5c45bf910a5fe35ef396d047bd2c | from django.apps import AppConfig
class KhmapConfig(AppConfig):
name = 'khmap'
|
16,856 | b83af801bf5819f3d6ddbf6c6b36c5d36f75d150 | import telebot
from telebot import types
import db_users
bot = telebot.TeleBot("1299818980:AAFzO8-7l_0iKWoe2hgQopIg0Aw28BJouNM")
@bot.message_handler(commands=['start'])
def start(message):
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
item1 = types.KeyboardButton("ะะขะะซะะซ")
item2 = types.KeyboardButton("ะะะ ะะะขะะ")
item3 = types.KeyboardButton("VIP ะงะะข")
markup.add(item1, item2, item3)
bot.send_message(message.chat.id, f"<b>ะัะธะฒะตั, {message.from_user.first_name}! </b>\nะงัะพ ะถะตะปะฐะตัั?",
parse_mode='html', reply_markup=markup)
@bot.message_handler(content_types=['text'])
def mess(message):
get_message_bot = message.text
db_users.check_and_add_user(message)
if get_message_bot == "VIP ะงะะข":
bot.send_message(message.chat.id, "ะกัะพะธะผะพััั VIP ะธ ะบะฐะบ ะพะฟะปะฐัะธัั? "
"\n\nโ ะกัะพะธะผะพััั VIP ัะพััะฐะฒะปัะตั 300 ััะฑะปะตะน"
"\n\nโ ะะฟะปะฐัะฐ ะฟัะพะธะทะฒะพะดะธััั ะฝะฐ ะบะฐััั 4890XXXX3095503 "
"\n\nโ ะะฟะปะฐัะฐ ัะตัะตะท ะผะพะฑะธะปัะฝะพะต ะฟัะธะปะพะถะตะฝะธะต Cะฑะตัะฑะฐะฝะบะฐ \nะะปะฐัะตะถะธ ๏ธ ะััะณะพะผั ะงะตะปะพะฒะตะบั ะ ะดััะณะพะน ะฑะฐะฝะบ ะะพ ะฝะพะผะตัั ะบะฐััั."
"\n\nโ ะะฟะปะฐัะฐ ัะตัะตะท ะผะพะฑะธะปัะฝะพะต ะฟัะธะปะพะถะตะฝะธะต ะะปััะฐ ะะฐะฝะบ \nะะปะฐัะตะถะธ ๏ธ ะ ะดััะณะพะน ะฑะฐะฝะบ ๏ธ ะะพ ะฝะพะผะตัั ะบะฐััั "
"\n\nโ ะะฟะปะฐัะฐ ัะตัะตะท QIWI \nะะตัะตะฒะพะดั ๏ธ ะะตัะตะฒะพะด ะฝะฐ ะบะฐััั \n\nะะพัะปะต ะพะฟะปะฐัั ะฟัะตะดะพััะฐะฒััะต ัะตะบ ะพะฟะตัะฐัะธะธ ะฝะฐัะตะผั ะะดะผะธะฝะธัััะฐัะพัั ะฒ ะะก, ะฟะพัะปะต ัะตะณะพ ะพะฝ ะฒัะดะฐัั ะฒะฐะผ ัััะปะบั ะฒ ะทะฐะบััััะน ะบะฐะฝะฐะป - @kostii "
"\n\nะก ะดััะณะธะผะธ ะะฐะฝะบะฐะผะธ ะธ ัะปะตะบััะพะฝะฝัะผะธ ะบะพัะตะปัะบะฐะผะธ ะฐะฑัะพะปััะฝะพ ะฐะฝะฐะปะพะณะธัะฝะพ!",
parse_mode='html', )
if get_message_bot == "ะะขะะซะะซ":
markup = types.InlineKeyboardMarkup()
markup.add(types.InlineKeyboardButton("ะัะทัะฒั", url="https://t.me/sochineniye_otzyv"))
bot.send_message(message.chat.id, "ะะตัะตั
ะพะดะธ ะฒ ะฝะฐั ะบะฐะฝะฐะป ั ะพัะทัะฒะฐะผะธ ", parse_mode='html', reply_markup=markup)
if get_message_bot == "ะะะ ะะะขะะ":
bot.send_message(message.chat.id,"ะกะฐะผัะน ะฟะพะฟัะปััะฝัะน ะฒะพะฟัะพั, - ะะพัะตะผั ั ะดะพะปะถะตะฝ ะฒะตัะธัั ะฒะฐะผ, ะณะดะต ะณะฐัะฐะฝัะธะธ?\n\nะะฐัะฐะฝัะธะธ ััะพ ะฝะฐัะธ ะพัะทัะฒั ั 2019 ะณะพะดะฐ, ะบะพัะพััะต ะฒั ะผะพะถะตัะต ะฟะพัะผะพััะตัั ะฒ ะฝะฐัะตะผ ะบะฐะฝะฐะปะต ั ะพัะทัะฒะฐะผะธ, ะฐ ะตัั ัะพ, ััะพ ะผั ะฒัะบะปะฐะดัะฒะฐะตะผ ะฝะฐ ัะฒะพัะผ ะบะฐะฝะฐะปะต. ะ ะฟัะฑะปะธะบัะตะผ ะผั, ะฟะพะผะธะผะพ ะฟัะพััะพ ะพัะฒะตัะพะฒ, ัะฐะบะถะต ัะพัะพ ัะตะฐะปัะฝัั
ะะะะพะฒ ะะะญ. ะัะต ะทะฐะดะฐะฝะธั ะฒ ะฝะธั
ัะฝะธะบะฐะปัะฝั ะธ ะฒั ะผะพะถะตัะต ััะพ ะฟัะพะฒะตัะธัั ัะฐะผะธ. ะ ะฒะพั ัะบะฐะถะธัะต, ะณะดะต ะฒั ะตัั ะบัะพะผะต ะฝะฐั ัะฐะบะพะต ะฒะธะดะตะปะธ? ะะฐะบัะธะผัะผ, ััะพ ะฒั ะผะพะณะปะธ ะฒะธะดะตัั, ัะฐะบ ััะพ ัะพัะพ ัะฐัะฟะตัะฐัะฐะฝะฝัั
ะะะะพะฒ ั ะทะฐะดะฐะฝะธัะผะธ ะฟัะพัะปะพะณะพะดะฝะตะน ะดะฐะฒะฝะพััะธ, ะบะพัะพััะต ะปะตะณะบะพ ะฟัะพะฑะธะฒะฐัััั ะธะปะธ ะณัะณะปัััั ะฒ ะธะฝัะตัะฝะตัะต.",parse_mode='html')
bot.polling() |
16,857 | 9578a89c7eb79fe2b195d50f21fbce39a8137e15 | from srht.app import db, app
from srht.objects import User
from datetime import datetime
from docopt import docopt
#Functions driving behaviour
def remove_admin(arguments):
u = User.query.filter(User.username == arguments['<name>']).first()
if(u):
u.admin = False # remove admin
db.session.commit()
else:
print('Not a valid user')
def make_admin(arguments):
u = User.query.filter(User.username == arguments['<name>']).first()
if(u):
u.admin = True # make admin
db.session.commit()
else:
print('Not a valid user')
def list_admin(arguments):
users = User.query.filter(User.admin == True)
for u in users:
print(u.username)
def approve_user(arguments):
u = User.query.filter(User.username == arguments['<name>']).first()
if(u):
u.approved = True # approve user
u.approvalDate = datetime.now()
db.session.commit()
else:
print('Not a valid user')
def create_user(arguments):
u = User(arguments['<name>'], arguments['<email>'], arguments['<password>']);
if(u):
u.approved = True # approve user
u.approvalDate = datetime.now()
db.session.add(u)
db.session.commit()
print('User created')
else:
print('Couldn\'t create the uer')
def reset_password(arguments):
u = User.query.filter(User.username == arguments['<name>']).first()
if(u):
password = arguments['<password>']
if len(password) < 5 or len(password) > 256:
print('Password must be between 5 and 256 characters.')
return
u.set_password(password)
db.session.commit()
else:
print('Not a valid user')
interface = """
Command line admin interface
Usage:
manage admin promote <name>
manage admin demote <name>
manage admin list
manage user approve <name>
manage user create <name> <password> <email>
manage user reset_password <name> <password>
Options:
-h --help Show this screen.
"""
if __name__ == '__main__':
with app.app_context():
arguments = docopt(interface, version='make admin 0.1')
if(arguments['admin'] and arguments['promote']):
make_admin(arguments)
elif(arguments['admin'] and arguments['demote']):
remove_admin(arguments)
elif(arguments['admin'] and arguments['list']):
list_admin(arguments)
elif(arguments['user'] and arguments['approve']):
approve_user(arguments)
elif(arguments['user'] and arguments['create']):
create_user(arguments)
elif(arguments['user'] and arguments['reset_password']):
reset_password(arguments)
|
16,858 | f637fac4f16f465aa93bd132559e74c926963c8d | from socket import *
import socket, sys, threading, time, re, sqlite3
con_temp = sqlite3.connect('Users.db')
cur = con_temp.cursor()
cur.execute('CREATE TABLE Users(ID TEXT, PASSWORD TEXT)')
con_temp.close()
CRLF = re.compile(r'(.*?)\r\n')
lists = []
clients = dict()
message = {
230: '230 User logged in',
231: '231 Users:',
221: '221 service closing',
299: '299',
200: '200 OK',
500: '500 Syntax error',
530: '530 Not logged in',
531: '531 Invalid user or password'
}
class ChatServer(threading.Thread):
global clients
def __init__(self, client):
threading.Thread.__init__(self)
self.client_sock, self.client_addr = client
clients[self.client_sock.fileno()] = self.client_sock
self.client_sock_temp = self.client_sock
self.UserID = ''
self.debug = True
def removeClient(self, sock):
del clients[sock.fileno()]
sock.close()
self.cli_sock = ''
def sendResponse(self, connection, code, arg=None):
response = message[code]
if arg:
response += ' ' + arg
response += '\r\n'
connection.send(bytes(response, encoding='UTF-8'))
if self.debug:
print('<', response)
def Requestmsg(self, connection, request):
global cur, lists
con = sqlite3.connect('Users.db')
cur = con.cursor()
user = request.split()
if self.debug:
print('>', request)
words = request.split(None, 1)
command = words[0]
if command == 'USER':
if(len(user) == 2):
self.sendResponse(connection, 531)
else:
cur.execute('SELECT * FROM Users WHERE ID = "%s"' % user[1])
if cur.fetchone() :
cur.execute('SELECT * FROM Users WHERE ID = "%s"' % user[1])
if cur.fetchone()[1] == user[2] :
self.sendResponse(connection, 230)
else:
self.sendResponse(connection, 531)
else :
self.sendResponse(connection, 230)
cur.execute('INSERT INTO Users VALUES(?, ?)', (user[1], user[2]))
con.commit()
elif command == 'LIST':
cur.execute('SELECT ID FROM Users')
for userid in cur.fetchall():
lists += userid
lists = ' '.join(lists)
self.sendResponse(connection, 231, lists)
lists = []
elif command == 'QUIT':
self.sendResponse(connection, 221)
self.removeClient(connection)
connection.close()
elif command == 'FROM':
for self.cli_sock_tmp in clients.values():
if self.cli_sock_tmp is not connection:
client = self.cli_sock_tmp
self.sendResponse(client, 299, request)
self.sendResponse(connection, 200)
else:
self.sendResponse(connection, 500)
def run(self):
while True:
if self.debug:
print('Wait for readable sockets:', [sock.fileno() for sock in clients.values()])
if self.client_sock is not '':
data = self.client_sock.recv(1024).decode()
else:
break
time.sleep(1)
if not data:
sock.close()
else:
for line in CRLF.findall(data):
self.Requestmsg(self.client_sock, line)
if __name__ == "__main__":
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('127.0.0.1', 9629))
sock.listen(10)
#clients[sock.fileno()] = sock
print('Wait for readable sockets:', [sockobj.fileno() for sockobj in clients.values()])
while True:
server = sock.accept()
ChatServer(server).start() |
16,859 | e289cdfc1fdda17be24d12f23b2933addfbd9d09 | import os
import torch
from torch import optim
from torch.optim.lr_scheduler import StepLR
from torch import nn
from torch.nn import functional as F
from tvae.data.imagenet import Multiset_Preprocessor
from tvae.containers.tvae import TVAE, TVAE_with_Preprocessor
from tvae.models.mlp import FC_Encoder, FC_Decoder
from tvae.models.alexnet import create_alexnet_fc6
from tvae.containers.encoder import Gaussian_Encoder
from tvae.containers.decoder import Gaussian_Decoder
from tvae.containers.grouper import Chi_Squared_from_Gaussian_2d
from tvae.utils.logging import configure_logging, get_dirs
from tvae.utils.train_loops import train_epoch_imagenet, eval_epoch_many_imagenet
def create_model(s_dim, group_kernel, mu_init):
z_encoder = Gaussian_Encoder(FC_Encoder(n_in=9216, n_out=s_dim),
loc=0.0, scale=1.0)
u_encoder = Gaussian_Encoder(FC_Encoder(n_in=9216, n_out=s_dim),
loc=0.0, scale=1.0)
decoder = Gaussian_Decoder(FC_Decoder(n_in=s_dim, n_out=9216))
grouper = Chi_Squared_from_Gaussian_2d(nn.ConvTranspose3d(in_channels=1, out_channels=1,
kernel_size=group_kernel,
padding=(2*(group_kernel[0] // 2),
2*(group_kernel[1] // 2),
2*(group_kernel[2] // 2)),
stride=(1,1,1), padding_mode='zeros', bias=False),
lambda x: F.pad(x, (group_kernel[2] // 2, group_kernel[2] // 2,
group_kernel[1] // 2, group_kernel[1] // 2,
group_kernel[0] // 2, group_kernel[0] // 2),
mode='circular'),
n_caps=1, cap_dim=s_dim,
mu_init=mu_init)
tvae = TVAE(z_encoder, u_encoder, decoder, grouper)
preprocessing_model = create_alexnet_fc6()
return TVAE_with_Preprocessor(preprocessing_model, tvae)
def main():
config = {
'wandb_on': True,
'lr': 1e-5,
'momentum': 0.9,
'batch_size': 128,
'max_epochs': 100,
'eval_epochs': 1,
'train_datadir': 'TRAIN_DATA_DIR',
'test_datadirs': [('Objects', 'OBJECT_DATA_DIR'),
('Faces', 'FACE_DATA_DIR'),
('Bodies', 'BODIES_DATA_DIR'),
('Places', 'PLACES_DATA_DIR')
],
'seed': 1,
's_dim': 64*64,
'k': 25,
'mu_init': 40.0,
'n_is_samples': 10,
}
name = 'TVAE_Objects_Faces_Bodies_Places'
config['savedir'], _, config['wandb_dir'] = get_dirs()
savepath = os.path.join(config['savedir'], name)
preprocessor = Multiset_Preprocessor(config)
train_loader, test_loaders = preprocessor.get_dataloaders()
model = create_model(s_dim=config['s_dim'], group_kernel=(config['k'], config['k'], 1), mu_init=config['mu_init'])
model.to('cuda')
log, checkpoint_path = configure_logging(config, name, model)
# load_checkpoint_path = 'checkpoint.tar'
# model.load_state_dict(torch.load(load_checkpoint_path))
optimizer = optim.SGD(model.tvae.parameters(),
lr=config['lr'],
momentum=config['momentum'])
scheduler = StepLR(optimizer, step_size=1, gamma=1.0)
for e in range(config['max_epochs']):
log('Epoch', e)
total_loss, total_neg_logpx_z, total_kl, num_batches = train_epoch_imagenet(
model,
optimizer,
train_loader, log,
savepath, e, eval_batches=1000,
plot_weights=False,
wandb_on=config['wandb_on'])
log("Epoch Avg Loss", total_loss / num_batches)
log("Epoch Avg -LogP(x|z)", total_neg_logpx_z / num_batches)
log("Epoch Avg KL", total_kl / num_batches)
scheduler.step()
torch.save(model.state_dict(), checkpoint_path)
if e % config['eval_epochs'] == 0:
total_loss, total_neg_logpx_z, total_kl, total_is_estimate, num_batches = eval_epoch_many_imagenet(
model,
test_loaders,
['#55DDE0', '#F26419', '#F6AE2D', '#33658A'],
['Objects', 'Faces', 'Bodies', 'Places'],
log, savepath, e,
n_is_samples=config['n_is_samples'],
plot_maxact=False,
plot_class_selectivity=True,
wandb_on=config['wandb_on'],
background_color='#2F4858',
mix_color='#937B43',
select_thresh=0.85)
log("Val Avg Loss", total_loss / num_batches)
log("Val Avg -LogP(x|z)", total_neg_logpx_z / num_batches)
log("Val Avg KL", total_kl / num_batches)
log("Val IS Estiamte", total_is_estimate / num_batches)
if __name__ == '__main__':
main() |
16,860 | fc7ff8f2c1be209621b9bf46fc50b5b47d53262f | # -*- coding: utf-8 -*-
from MethodHelper import *
print "// generated file"
print "#ifndef LAZYOBJECTOPERATIONDATA_H"
print "#define LAZYOBJECTOPERATIONDATA_H"
print ''
print '#include <Config.h>'
print ''
print 'BEG_METIL_LEVEL1_NAMESPACE;'
print ''
for m in methods:
if m[ 0 ] == 'O':
print 'struct LazyObjectOperationData_' + m + ' {'
c = 0
for l in m[ 1: ]:
if l != 'o' and l != 'O':
print ' ' + cor( l ) + letters[ c ] + ';'
c += 1
print '};'
print ''
print 'END_METIL_LEVEL1_NAMESPACE;'
print ''
print "#endif // LAZYOBJECTOPERATIONDATA_H"
|
16,861 | 2a6dcc5c7a2505648212b48b1fd9e9cec649e933 | from master import db
from master import Viewrender
from models import *
def db_getuserByemail(email):
return User.query.filter_by(email=email).first()
def db_getuserByid(id):
return User.query.filter_by(id=id).first()
def db_getpostByid(id):
return Post.query.filter_by(id=id).first()
def db_gettagsByname(name):
return Tags.query.filter_by(name=name).first()
def db_getGroupByid(name):
return Group.query.filter_by(name=name).first()
def db_check_repeat_email(email):
if User.query.filter_by(email=email).first() is None:
return True
else:
return False
def db_create_user(email, password, nickname, user_group):
if not db_check_repeat_email(email):
return False
new_user = User(
email=email,
verify_email=False,
passhash=hashlib.sha256(
password.encode('utf-8')).hexdigest(),
nickname=nickname,
user_des='Wait....And Something Text About This User',
user_session='',
point='1',
url='',
user_group=user_group,
user_ban=False,
user_dirty=False,
registertime=time.time(),
MessageToMailbox=True,
avater='http://identicon.relucks.org/' + str(random.randint(200, 999)) + '?size=120')
db.session.add(new_user)
db.session.flush()
db.session.commit()
db_set_user_session(new_user.id)
def db_set_user_session(id):
obj = db_getuserByid(id)
if obj is not None:
session_random = hashlib.sha256(
str(random.randint(0, 300000)).encode('utf-8')).hexdigest()
obj.user_session = session_random
session['id'] = id
session['key'] = session_random
db.session.flush()
db.session.commit()
return session_random
return False
def get_session(type='nickname'):
if session.get('id') is None or session.get('key') is None:
session.clear()
return False
obj = db_getuserByid(session.get('id'))
if obj is None:
return False
if obj.user_session == session.get('key'):
if type == 'nickname':
return obj.nickname
elif type == 'id':
return obj.id
elif type == 'obj':
return obj
else:
session.clear()
def getPost_list(tags='', num=30):
returnPost_list = []
if tags == '':
for i in Post.query.filter_by().all()[:num]:
if i.top:
returnPost_list.insert(0, i)
else:
returnPost_list.append(i)
else:
for i in Post.query.filter_by(tags=tags).all()[:num]:
if i.top:
returnPost_list.insert(0, i)
else:
returnPost_list.append(i)
return returnPost_list
|
16,862 | 7261567473e2ac31258f4748a15b21baf5168163 | import sys
import simplejson as json
import urllib
import traceback
import gzip
import fileinput
import re
import getopt
from datetime import datetime
FILTER_PROJECT = 'en'
LIMIT = 10
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "p:m:L:")
except getopt.GetoptError, err:
print str(err)
sys.exit(2)
if(not args):
usage()
exit(2)
file = args[0]
filters = dict(
proj = FILTER_PROJECT,
limit = LIMIT)
for o, a in opts:
if o == "-p":
filters['proj'] = a
elif o == "-m":
filters['match'] = a
elif o == "-L":
filters['limit'] = a
process_file(file, filters)
def process_file(file, filters):
# this is used for all records in a file
timestamp = extract_timestamp(file);
count = 0;
processed = 0;
for line in fileinput.input(file, openhook=fileinput.hook_compressed):
if(parse(count, line, timestamp, filters)):
processed += 1
count += 1;
# cheeky way to limit th number of proccessed items before bailing
if(0 < filters['limit'] and filters['limit'] < processed):
break
def parse(count, line, timestamp, filters):
pts = line.split()
processed = False
try:
# unicode(urllib.unquote_plus(pts[1]), 'UTF-8').encode('UTF-8')
row = dict(proj=pts[0],
url=urllib.unquote_plus(pts[1]),
cnt=pts[2],
bytes=pts[3],
ts=timestamp.isoformat())
if(row_matches(row, filters)):
print json.dumps(row)
processed = True
except:
sys.stderr.write("%i:%s"%(count, line))
print "%i:%s"%(count, line)
#print sys.exc_info()
return processed
def row_matches(row, filters):
match = filters['proj'] == row['proj']
# only check the 'match' filter if one was supplied
if(match and ('match' in filters) ):
match = match and re.search(filters['match'], row['url'], re.I)
return match
# expected format of filename 'pagecount-YYYYmmdd-HHMMSS.gz'
def extract_timestamp(filename):
[name, sep, ext] = filename.partition('.')
[type, sep, time ] = name.partition('-')
ts = datetime.strptime(time, '%Y%m%d-%H%M%S')
return ts
def usage():
print "usage:"
print "\t" + sys.argv[0] + " [-p proj (en)] [-l limit (10)] [-m 'regex match'] file.gz"
if __name__ == "__main__":
main() |
16,863 | 3f1207de81a3a4d929408af5c8a75aeb2db08941 | from fastapi import FastAPI
from app.data.deeds import data
app = FastAPI()
@app.get("/")
def root():
return {"message": "Hello, world!"}
@app.get("/deeds")
def deeds():
return get_deeds()
def get_deeds():
return data
|
16,864 | d07c38daac0d07966c8a69b6d009c4d7b44a5a1b | import numpy as np
def one_hot(labels, item_labels):
''' Create one-hot encodings for each item in item_labels according to labels
Parameters
----------
labels : list shape (M,)
list of distinct labels of which to categorize item_labels
item_labels : list shape (X,)
list of item labels with each element corresponding to the label of one item
Returns
-------
numpy.ndarray, shape=(X, M)
one hot encodings for each item
'''
onehot_encoded = list()
for lbl in item_labels:
item_encoded = [0 for _ in range(len(labels))]
item_encoded[labels.index(lbl)] = 1
onehot_encoded.append(item_encoded)
return np.array(onehot_encoded)
with open('food_labels_raw.txt', mode="r") as var:
all_labels = var.read().splitlines()
distinct_labels = list(set(all_labels))
encodings = one_hot(distinct_labels, all_labels)
print(distinct_labels)
print(encodings.shape)
print(encodings[0]) |
16,865 | 29db8536589f513bb3c4a413cdd43c5beeb4fc13 | from features.AbstractFeature import AbstractFeature
from util.Settings import cmd_prefix
class OnMessageFeature(AbstractFeature):
command_str = None
def should_execute(self, message):
"""
Whether or not this feature should execute, based on the given message
"""
if self.command_str is not None:
return message.content.startswith("{}{}".format(cmd_prefix, self.command_str))
else:
return False
|
16,866 | 9b29b57ebc073a5a6b3d309b90687d307094742f | """A simple logging routine to store inputs to the system."""
from datetime import datetime
from pytz import timezone, utc
import boto3
from flask import Flask
import gin
app = Flask(__name__)
def pst():
# From https://gist.github.com/vladwa/8cd97099e32c1088025dfaca5f1bfd33
date_format = '%m_%d_%Y_%H_%M_%S_%Z'
date = datetime.now(tz=utc)
date = date.astimezone(timezone('US/Pacific'))
pstDateTime = date.strftime(date_format)
return pstDateTime
@gin.configurable
def log_dialogue_input(log_dict, dynamodb_table):
"""Logs dialogue input to file."""
if not isinstance(log_dict, dict):
raise NameError(f"Logging information must be dictionary, not type {type(log_dict)}")
# Log in PST
log_dict["time"] = pst()
if dynamodb_table is not None:
try:
dynamodb_table.put_item(Item=log_dict)
app.logger.info("DB write successful")
except Exception as e:
app.logger.info(f"Could not write to database: {e}")
# If no db is specified, write logs to info
app.logger.info(log_dict)
def load_aws_keys(filepath):
with open(filepath, "r") as file:
data = file.readlines()
return {"access_key": data[0].replace("\n", ""), "secret_key": data[1].replace("\n", "")}
@gin.configurable
def load_dynamo_db(key_filepath, region_name, table_name):
"""Loads dynamo db."""
if key_filepath is not None:
keys = load_aws_keys(key_filepath)
access_key, secret_key = keys["access_key"], keys["secret_key"]
dynamodb = boto3.resource(
"dynamodb",
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
region_name=region_name
)
return dynamodb.Table(table_name)
return None
|
16,867 | 04be7bc1d62cead00b6a840a9de80183b1356bfc | # Generated by Django 2.2.20 on 2021-05-24 22:45
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('home', '0001_load_initial_data'),
]
operations = [
migrations.CreateModel(
name='App',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, verbose_name='Name')),
('description', models.TextField(blank=True, null=True, verbose_name='Description')),
('type', models.CharField(choices=[('WEB', 'Web'), ('MOBILE', 'Mobile')], max_length=10, verbose_name='Type')),
('framework', models.CharField(choices=[('DJANGO', 'Django'), ('REACT_NATIVE', 'React Native')], max_length=20, verbose_name='Framework')),
('domain_name', models.CharField(blank=True, max_length=50, null=True, verbose_name='Domain Name')),
('screenshot', models.URLField(blank=True, null=True, verbose_name='Screenshot')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated At')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='apps', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'App',
'verbose_name_plural': 'Apps',
},
),
migrations.CreateModel(
name='Plan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20, verbose_name='Name')),
('description', models.TextField(verbose_name='Description')),
('price', models.DecimalField(decimal_places=2, max_digits=19)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated At')),
],
options={
'verbose_name': 'Plan',
'verbose_name_plural': 'Plans',
},
),
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('active', models.BooleanField(default=True, verbose_name='Active')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated At')),
('app', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to='home.App')),
('plan', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='subscriptions', to='home.Plan')),
],
options={
'verbose_name': 'Subscription',
'verbose_name_plural': 'Subscriptions',
},
),
]
|
16,868 | 445a35de705c601eca6055ff2369a2ded4f2a397 | import socket
import json
IP = {
'Node0': '192.168.2.1',
'Node1': '192.168.2.2',
'Node2': '192.168.2.3',
'Node3': '192.168.2.4',
'Node4': '192.168.2.5',
'Node5': '192.168.2.6',
'Node6': '192.168.2.7',
'Node7': '192.168.2.8',
'Node8': '192.168.2.9',
'Node9': '192.168.2.10',
'Node10': '192.168.2.11',
'Node11': '192.168.2.12',
'central controller': '192.168.2.100',
'coordinator': '127.0.0.1',
'monitor': '127.0.0.1',
'scheduler': '127.0.0.1'
}
PORT = {
'pubsub': 6379,
'coordinator': 4000,
'monitor': 4001,
'scheduler': 9000,
'start': 8000,
'RA start': 7000
}
NETMASK = '/24'
MAC = {
'Node0': '00:1D:9C:C8:BD:F0',
'Node1': '00:1D:9C:C7:B0:70',
'Node2': '00:1D:9C:C8:BC:46',
'Node3': '00:1D:9C:C8:BD:F2',
'Node4': '00:1D:9C:C8:BD:F3',
'Node5': '00:1D:9C:C8:BD:F5',
'Node6': '00:1D:9C:C8:BD:F6',
'Node7': '00:1D:9C:C8:BD:F7',
'Node8': '00:1D:9C:C8:BD:F8',
'Node9': '00:1D:9C:C8:BD:F9',
'Node10': '00:2D:9C:C8:BD:F3',
'Node11': '00:3D:9C:C8:BD:F3',
'central controller': 'AA:AA:AA:AA:AA:AA',
}
BID_CONFIRM_TIMEOUT = 5
COMMAND_ORDER_TIMEOUT = 100000
STD_ERR = 1
INF = 1000000
def distance(a, b):
return abs(a[0] - b[0]) + abs(a[1] - b[1])
def send_msg(addr, msg):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
ip, port = addr
s.connect((ip, port))
s.send(json.dumps(msg).encode()) |
16,869 | e3a08b4ec3bcce9b9a6ea91f31b93639790b1207 | import numpy as np
# hist = {'acc': [0.60170666666666661, 0.64970666666666665, 0.67247999999999997, 0.70096000000000003, 0.72831999999999997, 0.75327999999999995, 0.79210666666666663, 0.83685333333333334, 0.89738666666666667, 0.94869333333333339, 0.98768, 0.99856, 0.99978666666666671, 0.9998933333333333, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], 'loss': [0.67599039871851607, 0.64008198080062861, 0.61161179876963301, 0.58364227013905845, 0.55147493773142497, 0.51707415517807009, 0.46878006409645079, 0.40509848320325215, 0.31960229801177981, 0.23928055049419403, 0.1614533010260264, 0.10840653987010319, 0.078473922821680711, 0.061212156519095104, 0.050844819813569385, 0.043658380607366563, 0.038931761085589725, 0.034905506468613942, 0.031930986449718472, 0.029555683100223541], 'batch': [72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72], 'val_acc': [0.50319999999999998, 0.66639999999999999, 0.60928000000000004, 0.53503999999999996, 0.51727999999999996, 0.66991999999999996, 0.59023999999999999, 0.73343999999999998, 0.72575999999999996, 0.63888, 0.68000000000000005, 0.75280000000000002, 0.73392000000000002, 0.76048000000000004, 0.73775999999999997, 0.76719999999999999, 0.76271999999999995, 0.76256000000000002, 0.76383999999999996, 0.76304000000000005], 'val_loss': [0.75814045246124273, 0.62419310464859012, 0.65664560201644895, 0.93560365699768067, 1.0031634732627868, 0.62035980564117432, 0.77652886445999147, 0.54206495063781734, 0.54205195611953738, 0.72585166383743283, 0.67904411331176762, 0.51066716083526609, 0.56879334230422973, 0.50325737421035766, 0.565705334186554, 0.50501513553619382, 0.51068809757232669, 0.51375373683929448, 0.51847371002197262, 0.52685898157119748], 'size': [127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127]}
# hist = {'acc': [0.61040000000000005, 0.66965333333333332, 0.71269333333333329, 0.74261333333333335, 0.76970666666666665, 0.78549333333333338, 0.80197333333333332, 0.81872, 0.84133333333333338, 0.86138666666666663, 0.88234666666666661, 0.91221333333333332, 0.93493333333333328, 0.95573333333333332, 0.97338666666666662, 0.98154666666666668, 0.98912, 0.99407999999999996, 0.99754666666666669, 0.99914666666666663], 'loss': [0.66229341925303142, 0.61037688517888389, 0.56105807796478269, 0.5257215232054393, 0.48902832611719765, 0.46184740505854288, 0.42996074739456175, 0.40144161385854088, 0.35829173561731975, 0.32654467482248944, 0.28088018836657208, 0.23035042788187662, 0.18728675130685171, 0.14588080277283985, 0.10805082000096639, 0.083060047481854762, 0.063971798425515494, 0.048059171452919644, 0.034843711685736974, 0.024511216193437577], 'batch': [292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292], 'val_acc': [0.50495999999999996, 0.65007999999999999, 0.66703999999999997, 0.73760000000000003, 0.74831999999999999, 0.7792, 0.74319999999999997, 0.74480000000000002, 0.76768000000000003, 0.77983999999999998, 0.75600000000000001, 0.76127999999999996, 0.76207999999999998, 0.75551999999999997, 0.76959999999999995, 0.77327999999999997, 0.75744, 0.74095999999999995, 0.76207999999999998, 0.76432], 'val_loss': [0.85289827960968012, 0.61938058338165281, 0.60033894657135012, 0.53840571493148803, 0.51515883089065551, 0.47567841200828553, 0.54509489851951598, 0.55246049943923947, 0.49265963602066037, 0.48533072179794312, 0.55839545064926144, 0.55713994558334345, 0.59176110905647283, 0.61099106838226314, 0.57528177835464478, 0.58856903702735897, 0.65395764139175416, 0.77298401973724362, 0.68928052658081052, 0.65886097120285037], 'size': [31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31]}
# hist = {'acc': [0.60842666666666667, 0.66496, 0.70799999999999996, 0.74319999999999997, 0.76719999999999999, 0.78591999999999995, 0.80047999999999997, 0.81999999999999995, 0.83813333333333329, 0.86160000000000003, 0.88549333333333335, 0.91013333333333335, 0.92858666666666667, 0.9496, 0.96981333333333331, 0.98197333333333336, 0.98794666666666664, 0.99456, 0.99695999999999996, 0.99754666666666669, 0.99925333333333333, 0.99936000000000003, 0.9998933333333333, 0.9998933333333333, 0.99983999999999995, 0.99994666666666665, 0.99983999999999995, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], 'loss': [0.66240078449249262, 0.61301115545908613, 0.56650861356099447, 0.52666178926467899, 0.49086858425140378, 0.46030765804608664, 0.43432388045310977, 0.39819696858406067, 0.36649336884180705, 0.33039594181378684, 0.28322263444264728, 0.23331937576611836, 0.19571519700368245, 0.15482046542803446, 0.11591709202289581, 0.086087764985958729, 0.067531267909208936, 0.049765399232705433, 0.035450270514289541, 0.029474592960675557, 0.022904746125936508, 0.018450904572010041, 0.014534764552215735, 0.0134843383474648, 0.012718115153312682, 0.010768842419087887, 0.010740207039515177, 0.0085216235853234935, 0.0075080361364781856, 0.0067496916103363036, 0.0065559151982019347, 0.0055863112429281074, 0.0056592495764295264, 0.0055201838895678522, 0.0049493034219990177, 0.0045409350671619178, 0.0042915554307897882, 0.0041260089698682226, 0.0045062989238649608, 0.0040759931686023872, 0.0041205350280801457, 0.0039253659020860986, 0.003558775956208507, 0.0033647228382527828, 0.0031103958547860386, 0.0031776673414558171, 0.0030192559991901119, 0.0029129640011489391, 0.0028474370118354757, 0.003024212182847162, 0.002913796332279841, 0.002834449345084528, 0.0029427745144069194, 0.002641335375420749, 0.0023857027138769627, 0.0023533324800928432, 0.0023200609461218121, 0.0023001326639950277, 0.0023025003504008055, 0.0023820328418289621], 'batch': [292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292], 'val_acc': [0.50848000000000004, 0.65456000000000003, 0.65744000000000002, 0.69023999999999996, 0.77215999999999996, 0.76832, 0.60399999999999998, 0.75519999999999998, 0.748, 0.52976000000000001, 0.72255999999999998, 0.73760000000000003, 0.76863999999999999, 0.74080000000000001, 0.76848000000000005, 0.75792000000000004, 0.76336000000000004, 0.74944, 0.75439999999999996, 0.74272000000000005, 0.70399999999999996, 0.75407999999999997, 0.75488, 0.75360000000000005, 0.75504000000000004, 0.76127999999999996, 0.76559999999999995, 0.75775999999999999, 0.75824000000000003, 0.75631999999999999, 0.75744, 0.75248000000000004, 0.75712000000000002, 0.75695999999999997, 0.75407999999999997, 0.75583999999999996, 0.75536000000000003, 0.74992000000000003, 0.75712000000000002, 0.75600000000000001, 0.75712000000000002, 0.75807999999999998, 0.75375999999999999, 0.75375999999999999, 0.75471999999999995, 0.75439999999999996, 0.75183999999999995, 0.75583999999999996, 0.75536000000000003, 0.75375999999999999, 0.75407999999999997, 0.75392000000000003, 0.75471999999999995, 0.75631999999999999, 0.75439999999999996, 0.75407999999999997, 0.75471999999999995, 0.75360000000000005, 0.75456000000000001, 0.75392000000000003], 'val_loss': [0.83099874757766723, 0.61630004909515379, 0.60988262300491336, 0.58187653435707087, 0.48209546091079714, 0.49152652966499327, 0.95987617481231691, 0.51031813723564146, 0.55518745296478267, 1.6223285962677001, 0.63996526561737066, 0.62960652519226079, 0.58113950263977054, 0.66655004633903503, 0.63060753737449649, 0.63062375366687773, 0.69096670564651486, 0.72070998303890232, 0.72822552292346954, 0.75697966092109681, 1.0030097365951538, 0.71427239350318905, 0.77823157302856449, 0.77560362388610837, 0.76769009612560268, 0.7689899741268158, 0.77246122301578524, 0.79130663941383361, 0.80737354588985444, 0.79964950931549073, 0.79939541961431504, 0.83889609127044673, 0.8254445036077499, 0.826949597966671, 0.83998052089214326, 0.83905673991203311, 0.83934639024734492, 0.88232042727947235, 0.85239349826812749, 0.85866071177005765, 0.88359404543876652, 0.86687393261194234, 0.87489767303705213, 0.8848082482099533, 0.87654798297405245, 0.88409748976230618, 0.89592284898519514, 0.89214193494558336, 0.88900369663000112, 0.89869544796228407, 0.89222350886344914, 0.90436567729473116, 0.91327436664104467, 0.91660713975667951, 0.90649745750188826, 0.9294387438964844, 0.91614944346904759, 0.91154556308746337, 0.92372855185031888, 0.91708505530834195], 'size': [31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31]}
# hist = {'acc': [0.60405333333333333, 0.67749333333333328, 0.71808000000000005, 0.75626666666666664, 0.78778666666666664, 0.82389333333333337, 0.85962666666666665, 0.90400000000000003, 0.93973333333333331, 0.95855999999999997, 0.97861333333333334, 0.97824, 0.98010666666666668, 0.9809066666666667, 0.98186666666666667, 0.98751999999999995, 0.9890133333333333, 0.98869333333333331, 0.98426666666666662, 0.98560000000000003, 0.99093333333333333, 0.99157333333333331, 0.99231999999999998, 0.99658666666666662, 0.99994666666666665, 0.99994666666666665, 0.99994666666666665, 0.99994666666666665, 0.9998933333333333, 0.9998933333333333, 0.99994666666666665, 0.99994666666666665, 1.0, 1.0, 0.99994666666666665, 1.0, 1.0, 1.0, 0.99994666666666665, 0.9998933333333333, 0.9998933333333333, 0.99994666666666665, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.9998933333333333, 0.9998933333333333, 0.99994666666666665, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], 'loss': [0.66096524510065713, 0.60251230394999189, 0.55755540348688759, 0.50417322521845498, 0.4553801802190145, 0.39423027318954468, 0.33696667938232422, 0.25310164899667104, 0.1843972886300087, 0.13577249122460683, 0.090821224315563831, 0.082960747853914898, 0.077625990978876749, 0.071701066735585531, 0.066505232951641083, 0.050246429081757867, 0.047454721429347993, 0.043911529026230176, 0.053869656380017596, 0.049129247435728711, 0.037948016947607202, 0.037407833847204842, 0.032645335459013783, 0.020367217614551385, 0.0098970298432310422, 0.0079641506666441753, 0.007760948695490758, 0.0073177562509973842, 0.0066278330479065577, 0.0062338190081715587, 0.005831012386331956, 0.0055164803894360856, 0.0050136769266426566, 0.0049698377213627104, 0.0045335783093422653, 0.0043223557150860624, 0.0040322599734862645, 0.0042169533460587267, 0.0040594824566692117, 0.0038770630473643542, 0.003800476775839925, 0.0036546519884467126, 0.0034199453267703453, 0.0031003407175590595, 0.0032379238306855162, 0.0032608512243628503, 0.0030321109636376299, 0.0030461734936883052, 0.0029766344864914814, 0.0029135984853655098, 0.0026570867832005022, 0.0025076298517609638, 0.0026299074064319332, 0.002490029305741191, 0.0024280058349917333, 0.0023558908302088581, 0.0021960328992456199, 0.0023255339737112322, 0.0023219326059271891, 0.0020706570873906217], 'batch': [292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, 292], 'val_acc': [0.58272000000000002, 0.68415999999999999, 0.70416000000000001, 0.73407999999999995, 0.59584000000000004, 0.76463999999999999, 0.78591999999999995, 0.67247999999999997, 0.81103999999999998, 0.74624000000000001, 0.80896000000000001, 0.79296, 0.72448000000000001, 0.76751999999999998, 0.78576000000000001, 0.79679999999999995, 0.80528, 0.79615999999999998, 0.78544000000000003, 0.79024000000000005, 0.79727999999999999, 0.78351999999999999, 0.79696, 0.81184000000000001, 0.81423999999999996, 0.81391999999999998, 0.81408000000000003, 0.81455999999999995, 0.81376000000000004, 0.81503999999999999, 0.81344000000000005, 0.81440000000000001, 0.81408000000000003, 0.81408000000000003, 0.81103999999999998, 0.81135999999999997, 0.81264000000000003, 0.81535999999999997, 0.81055999999999995, 0.81311999999999995, 0.81167999999999996, 0.81184000000000001, 0.81232000000000004, 0.81103999999999998, 0.81200000000000006, 0.81247999999999998, 0.81167999999999996, 0.81167999999999996, 0.81120000000000003, 0.81103999999999998, 0.81184000000000001, 0.81167999999999996, 0.81215999999999999, 0.81152000000000002, 0.81200000000000006, 0.81088000000000005, 0.81055999999999995, 0.81072, 0.81103999999999998, 0.81311999999999995], 'val_loss': [0.66323673450469967, 0.5854979243087769, 0.55960757585525511, 0.54635018127441404, 0.93855842555999758, 0.53104276062011724, 0.48182720432281495, 0.96671885446548467, 0.46510607001304627, 0.73187205430984492, 0.50358315374374385, 0.57243882286071779, 1.0303087930679322, 0.78865345264434816, 0.57424940204143526, 0.63237994078636173, 0.62346292119979863, 0.64191558809280391, 0.70791081592559812, 0.78111661178588865, 0.64544504346847531, 0.71805332042694092, 0.6913547252655029, 0.6689467629051209, 0.67256693210601803, 0.67230848577499391, 0.68701177753448484, 0.67543874372482304, 0.69979702983856196, 0.70509329910278318, 0.70809677968978879, 0.73088359319686891, 0.7167953396987915, 0.72915623466491697, 0.74287793304443361, 0.73951993263244631, 0.74755212207794186, 0.7542802974319458, 0.76091298088073733, 0.75528446842193608, 0.76680799823760981, 0.76910124156951909, 0.77137601329803462, 0.78814651325225826, 0.79396389606475826, 0.78126737098693844, 0.79712862529754636, 0.79072718971252443, 0.79756058330535884, 0.79091342765808104, 0.80556001319885251, 0.81648752490997312, 0.8161456254577637, 0.79706697704315188, 0.81621215755462651, 0.82276443237304686, 0.8298242942810059, 0.84755123180389402, 0.83619567523956295, 0.81817739814758306], 'size': [31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31]}
hist ={'acc': [0.61951999999999996, 0.77170666666666665, 0.83760000000000001, 0.86528000000000005, 0.88613333333333333, 0.89874666666666669, 0.90709333333333331, 0.91602666666666666, 0.92482666666666669, 0.92698666666666663, 0.93288000000000004, 0.93922666666666665, 0.94135999999999997, 0.94802666666666668, 0.94869333333333339, 0.95133333333333336, 0.95525333333333329, 0.95743999999999996, 0.95960000000000001, 0.96135999999999999, 0.96306666666666663, 0.96509333333333336, 0.96543999999999996, 0.97045333333333328, 0.97186666666666666], 'loss': [0.63844141895294193, 0.47848376563707989, 0.36781092189153036, 0.31195125549952191, 0.27345538512547812, 0.24447771657943726, 0.223868361714681, 0.20573332547187806, 0.18833642890135446, 0.17918039000511168, 0.16939229509512582, 0.15453991984923682, 0.14843729253053664, 0.13444095009485882, 0.13151166947762172, 0.12288821726799011, 0.11764726775487264, 0.1118359219566981, 0.10408324633876483, 0.10383469001789888, 0.095940436923901237, 0.094547189220984776, 0.090859603000481917, 0.081742304106553393, 0.075947360633611682], 'batch': [585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585, 585], 'val_acc': [0.62343999999999999, 0.82647999999999999, 0.86951999999999996, 0.89295999999999998, 0.89936000000000005, 0.90127999999999997, 0.91903999999999997, 0.90471999999999997, 0.92471999999999999, 0.91991999999999996, 0.93191999999999997, 0.92871999999999999, 0.93552000000000002, 0.93896000000000002, 0.93328, 0.93535999999999997, 0.93600000000000005, 0.92944000000000004, 0.92935999999999996, 0.93584000000000001, 0.94040000000000001, 0.94023999999999996, 0.94376000000000004, 0.94208000000000003, 0.94328000000000001], 'val_loss': [0.64920384429931643, 0.39348448449134826, 0.33252282171249392, 0.27575752491950989, 0.25996453018188476, 0.24484089900016784, 0.21952514928817748, 0.22738555394649507, 0.19011760674476624, 0.19792678730964661, 0.16936864914894104, 0.17935732094287873, 0.16077562838077544, 0.16285966594219209, 0.17462326645851134, 0.16419303091764451, 0.16285175944805144, 0.1837169475889206, 0.1762955101251602, 0.16733106744289397, 0.16371256225824357, 0.15570383219003678, 0.14981777836799623, 0.15030898282051086, 0.15404446303367614], 'size': [31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31]}
train_loss = hist['loss']
valid_loss = hist['val_loss']
x = np.arange(1, len(train_loss) + 1, 1)
import matplotlib.pyplot as plt
# plt.subplot(223)
plt.title("train validation loss on each epoch")
plt.xlabel("epoch")
plt.ylabel("loss")
plt.plot(train_loss,'g^',label = "train_loss")
plt.plot(valid_loss,'r--', label="valid_loss")
# Create a legend for the first line.
plt.legend(loc = 'lower center',shadow = True)
# Add the legend manually to the current Axes.
# Create another legend for the second line.
file = "ca_04_18_09_loss.jpg"
plt.savefig(file)
plt.close()
|
16,870 | 5f1a4c3a03b2ef9ec08bc5af78667b8ffc79daab | import unittest
import numpy as np
from chainer import testing
from chainercv.visualizations import vis_image
try:
import matplotlib # NOQA
optional_modules = True
except ImportError:
optional_modules = False
@testing.parameterize(
{'img': np.random.randint(0, 255, size=(3, 32, 32)).astype(np.float32)},
{'img': None}
)
class TestVisImage(unittest.TestCase):
def test_vis_image(self):
if optional_modules:
ax = vis_image(self.img)
self.assertTrue(isinstance(ax, matplotlib.axes.Axes))
testing.run_module(__name__, __file__)
|
16,871 | d67277ef7c02df3430bceb52ee5cbff6c6477b30 | """
Make a resource for the current container.
Presumes there is something in the container indicating the current
page info from Sphinx.
"""
from typing import Dict, Any
from wired import ServiceContainer
from themester.protocols import Resource, Root
from themester.resources import Document
from themester.sphinx.models import PageContext
def resource_factory(container: ServiceContainer) -> Resource:
# Get dependencies
root: Root = container.get(Root)
page_context: PageContext = container.get(PageContext)
# Extract what's needed and make a resource
document_metadata: Dict[str, Any] = page_context.meta
this_rtype = document_metadata.get('type', 'document')
resource = root if this_rtype == 'homepage' else Document(
name=page_context.pagename,
parent=root,
title=page_context.title
)
return resource
|
16,872 | 99925b0266af056d8806cd0ea97af67e25322969 | from common import *
from event import *
from rain import *
import xarray as xr
def to_netcdf(Rain_gage=None, Rain_radar=None, out_file='{site}.nc', path='', site='City', station_names=None, **kwargs):
def __do_ll(Rain, ds0):
ll = ['lat', 'lon']
if type(station_names) is list:
ll.extend(station_names)
f = Rain.ll.loc[:, ll]
f = f.reset_index(range(len(f.index)))
f.index.name='station'
f = f.loc[:, ll]
ds1 = xr.Dataset.from_dataframe(f)
ds_ = ds0.merge(ds1)
return ds_, f
def __do_rate(Rain, name, standard_name):
Rain.rate.index.name = 'time'
datasets = [xr.DataArray(Rain.rate[i]/Rain.per_hour) for i in Rain.rate.columns]
combined = xr.concat(datasets, 'station')
ds0 = combined.to_dataset(name=name)
ds0[name].attrs.update({'units': Rain.units, 'standard_name': standard_name, 'coordinates': 'lat lon'})
ds0[name].encoding = {'chunksizes': (5, 100000), 'zlib': True}
return ds0
if Rain_gage:
ds0 = __do_rate(Rain_gage, name='rain_gage', standard_name='gage rain depth')
ds_, f = __do_ll(Rain_gage, ds0)
if Rain_radar:
ds0 = __do_rate(Rain_radar, name='rain_radar', standard_name='radar rain depth')
if not ds_:
ds_, f = __do_ll(Rain_radar, ds0)
else:
ds_ = ds_.merge(ds0)
if type(station_names) is pd.DataFrame:
if 'station_name' not in station_names.columns:
station_names.index.name = 'station_name'
station_names = station_names.reset_index(range(len(station_names.index)))
f['station'] = f.index
f = f.merge(station_names, how='outer', on=['lat','lon'])
f = f.reset_index(f['station'])
f.index.name = 'station'
f = f.drop(['lat','lon', 'station', 'index'], axis=1)
ds1 = xr.Dataset.from_dataframe(f)
ds_ = ds_.merge(ds1)
ds_.station_name.attrs.update({'long_name': 'station name', 'cf_role':'timeseries_id'})
ds_.lat.attrs.update({'standard_name': 'latitude', 'long_name':'station latitude', 'units': 'degrees_north'})
ds_.lon.attrs.update({'standard_name': 'longitude', 'long_name':'station longitude', 'units': 'degrees_east'})
ds_.time.encoding = {'units':'minutes since 1970-01-01', 'calendar':'gregorian', 'dtype': np.double}
ds_.attrs.update({'description': '{site} rain gage network'.format(site=site),
'history': 'Created {now}'.format(now=pd.datetime.now()),
'Conventions': "CF-1.6",
'featureType': 'timeSeries'})
ds_.to_netcdf(path=path+out_file.format(site=site), format='netCDF4', engine='h5netcdf')
ds_.close()
def read_netcdf(nc_file, path=''):
ds = xr.open_dataset(path+nc_file, decode_coords=False)
print ds
gage=None
radar=None
vars = ds.data_vars.keys()
for var in vars:
if ds.data_vars[var].ndim == 2:
df = ds[var].to_pandas()
if len(df.columns)> len(df.index):
df = df.T
if 'gage' in df.columns.name.lower() or df.columns.name=='station':
df.columns.name = 'RG'
if 'time' in df.index.name.lower():
df.index.name = 'date_time'
try:
freq = (df.index[1]-df.index[0]).seconds/60
kwargs = {'ngages': min(ds.dims.values()),
'units': ds[var].units,
'per_hour': 60/freq,
'freq': str(freq)+'min'}
except:
kwargs = None
if 'gage' in var:
gage = {'df':df, 'kwargs':kwargs}
if 'radar' in var:
radar = {'df':df, 'kwargs':kwargs}
vars.remove(var)
ll = ds[vars].to_dataframe()
print('')
print('Rain objects need specific index and column names: ')
print('RG, date_time, lat, lon. Trying to set them...')
if 'gage' in ll.index.name.lower() or ll.index.name=='station':
ll.index.name = 'RG'
if 'latitiude' in ll.columns:
ll.columns = [l.lower()[:3] for l in ll.columns]
print ''
if not gage and not radar:
if df.columns.name == ll.index.name =='RG' and df.index.name=='date_time' and 'lat' in ll.columns and 'lon' in ll.columns:
print 'Sucess!'
if kwargs is not None:
rg = Rain(df, **kwargs)
rg.set_ll(ll)
return rg
else:
print 'Manual editing needed'
if gage:
gage = Rain(gage.get('df'), show=False, **gage.get('kwargs'))
gage.set_ll(ll)
if not radar:
gage.show()
return gage
if radar:
radar = Rain(radar.get('df'), show=False, **radar.get('kwargs'))
radar.set_ll(ll)
if not gage:
radar.show()
return radar
if gage and radar:
p = RadarGage(gage, radar)
p.set_ll(ll)
p.show()
return p
print ''
print 'Returning tuple containing data dataframe and location dataframe '
print '(once these are cleaned, initialize Rain directly with required kwargs: '
print 'ngages, units, per_hour, freq)'
return (df, ll)
|
16,873 | 3ba3ee151cfcfeae2c4d1162f5d54e06a7c73e44 | import tensorflow as tf
class NonParametricModel(object):
def __init__(self, support_set):
self.support_set = tf.constant(support_set)
self.video_feature_ph = tf.placeholder(tf.float32, [1, support_set.shape[1]])
logits = tf.matmul(a=self.support_set, b=self.video_feature_ph, transpose_b=True)
self.prediction = tf.argmax(logits, 0)
|
16,874 | a1d2e0f78318c1ee84f0a76144c7c56213ef44e4 | def playerChoises(playerA, playerB, playerOptions) :
while True :
playerAHand = input(
playerA + " what is your option? \ntype pp for paper, st for stone or sc for scissors\n")
if playerAHand == playerOptions['paper'] or playerAHand == playerOptions['stone'] or playerAHand == \
playerOptions['scissors'] :
break
else :
print("Not right, try again!!\n")
while True :
playerBHand = input(
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" + playerB + " what is your option? \ntype pp for paper, st for stone or sc for scissors\n")
if playerAHand == playerOptions['paper'] or playerAHand == playerOptions['stone'] or playerAHand == \
playerOptions['scissors'] :
break
else :
print("Not right, try again!!\n")
return (playerAHand, playerBHand)
def playGame(playerAHand, playerBHand, playerOptions) :
if playerAHand == playerOptions['paper'] and playerBHand == playerOptions['scissors'] :
return 2
elif playerBHand == playerOptions['paper'] and playerAHand == playerOptions['scissors'] :
return 1
elif playerAHand == playerOptions['scissors'] and playerBHand == playerOptions['stone'] :
return 2
elif playerBHand == playerOptions['scissors'] and playerAHand == playerOptions['stone'] :
return 1
elif playerAHand == playerOptions['stone'] and playerBHand == playerOptions['paper'] :
return 2
elif playerBHand == playerOptions['stone'] and playerAHand == playerOptions['paper'] :
return 1
else :
return 0
def playAgain() :
_answer = input("Do you want to play again?\n Answer y for YES or n for NO.\n")
return _answer
def main() :
playerAHand = None
playerBHand = None
playerA = ''
playerB = ''
playerOptions = {'paper' : 'pp', 'stone' : 'st', 'scissors' : 'sc'}
gameOptions = {'yes' : 'y', 'no' : 'n'}
playerA = input("Hello player one,\nwhats your name?\n")
playerB = input("Hello player two,\nwhats your name?\n")
answer = gameOptions['yes']
result = 0
print("Let's play!!!")
while answer == (gameOptions['yes']) :
(playerAHand, playerBHand) = playerChoises(playerA,playerB,playerOptions)
result = playGame(playerAHand, playerBHand, playerOptions)
if result == 1 :
print("CONGRATULATIONS " + playerA + " \n You're the winner!!!!!!!!!!\n\n")
elif result == 2 :
print("CONGRATULATIONS " + playerB + " \n You're the winner!!!!!!!!!!\n\n")
else :
print("Even, try again")
answer = playAgain()
if __name__ == "__main__" :
main()
|
16,875 | dcdb1839a4f4dc3c6665a187b1fea6415d03f473 | import flask
from flask import request, jsonify
app = flask.Flask(__name__)
@app.route('/', methods=['get'])
def home():
return 'hello world'
if __name__ == "__main__":
app.run(debug=True) |
16,876 | 1cefa7c9e9df0477d75b15c389e48d37bd274934 | #!/bin/python
with open('DYToEE_selected_event_list.txt', 'r') as input_f:
evtlist = input_f.readlines()
nevts = len(evtlist)
print('N evts:',nevts)
nfiles = 5
print('N files to split over:',nfiles)
nbatch = nevts//nfiles
print('N evts / file:',nbatch)
for f in range(nfiles):
start = f*nbatch
stop = (f+1)*nbatch if f < nfiles-1 else nevts
print(f, start, stop)
output_f = open('DYToEE%d_selected_event_list.txt'%f, 'w')
for i in range(start, stop):
output_f.write(evtlist[i])
output_f.close()
|
16,877 | 3dd28e4da31d0698c79008a856a3e0d0bbb29416 | from bottle import run, get, post, request, delete
import pandas as pd
import MySQLdb
#SQL Parameters
server = 'localhost'
db = 'mydb'
#Create the connection
conn = MySQLdb.connect(host="localhost", # your host, usually localhost
user="dassowmd", # your username
passwd="12345", # your password
db="mydb") # name of the data base
#animals = [{'name' : 'Ellie', 'type' : 'Elephant'},
# {'name' : 'Python', 'type' : 'Snake'},
# {'name' : 'Zed', 'type' : 'Zebra'}]
@get('/Speed_Test_Results')
def getAll():
# you must create a Cursor object. It will let
# you execute all the queries you need
cur = db.cursor()
# Use all the SQL you like
cur.execute("SELECT * FROM mydb.Internet_Speed_Test")
# print all the first cell of all the rows
results = {columns=}
for row in cur.fetchall():
print (row)
return results
@get('/animal/<name>')
def getOne(name):
the_animal = [animal for animal in animals if animal['name'] == name]
return {'animal' : the_animal[0]}
@post('/Speed_Test_Results<Date_Test_Ran><Time_Test_Ran><IP_Address><User_Name><Ping_Time><Download_Time><Upload_Time>')
def addOne():
new_animal = {'name' : request.json.get('name'), 'type' : request.json.get('type')}
sql = """
Insert INTO Internet_Speed_Test
VALUES (""" + Date_Test_Ran + """, """ + Time_Test_Ran + """, """ + IP_Address + """, """ + User_Name + """, """ + Ping_Time + """, """ + Download_Time + """, """ + Upload_Time + """)
"""
conn.commit()
return {'animals' : animals}
@delete('/animal/<name>')
def removeOne(name):
the_animal = [animal for animal in animals if animal['name'] == name]
animals.remove(the_animal[0])
return {'animals' : animals}
run(reloader=True, debug=True)
|
16,878 | 8fc489600771c56d9668547b5b1885f9600addd1 | #! /usr/bin/python
class InvalidInputException(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
def increment(number):
"""increment: list -> list
Purpose: Checks if input is valid and then calls increment helper.
This should throw InvalidInputException if your list is empty or null.
Consumes: A list of digits representing a number
Produces: A list of 0's and 1's representing that number + 1
"""
if number is None or []:
raise InvalidInputException, "inputted number cannot be None or null"
return increment_helper(number)
def increment_helper(number):
"""increment: list -> list
Purpose: Increments a binary number by 1. This is the method that recurses on itself and actually increments the number
Consumes: a list of 0's and 1's representing a binary number, k
Produces: a list of 0's and 1's representing k + 1
Example:
increment([1,1,0,0]) -> [1,1,0,1]
"""
# if the list is length 1 and holds only 1
if len(number) == 1 and number[0] == 1:
return [1,0]
# if the last place in the list is 1
if number[-1] == 1:
nList = increment_helper(number[:len(number)-1])
nList.append(0)
return nList
# if the last place in the list is 0
number[-1] = 1
return number
|
16,879 | 8f9c7eaaf8b80a2253924586e8a4bcf65d57b62d | import argparse
from pydoc import locate
from torch.nn import DataParallel
import utils
parser = argparse.ArgumentParser(description='Validate a experiment with different test time augmentations.')
parser.add_argument('name', help='Use one of the experiment names here excluding the .py ending.')
args = parser.parse_args()
name = args.name
experiment_logger = utils.ExperimentLogger(name, mode='val')
for i, (samples_train, samples_val) in enumerate(utils.mask_stratified_k_fold()):
# Get the model architecture
Model = locate('experiments.' + name + '.Model')
model = Model(name, i)
# Load the best performing checkpoint
model.load()
# Validate
stats_train = model.validate(DataParallel(model.net).cuda(), samples_train, -1)
stats_val = model.validate(DataParallel(model.net).cuda(), samples_val, -1)
stats = {**stats_train, **stats_val}
experiment_logger.set_split(i, stats)
experiment_logger.print()
experiment_logger.save() |
16,880 | a1e6a087063dd5083573278039457f4b24090c58 | ###################################################################################
#
# Copyright (C) 2018 MuK IT GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###################################################################################
import os
import base64
import logging
from odoo import api, models
from odoo.tools.mimetypes import guess_mimetype
_logger = logging.getLogger(__name__)
MIMETYPE_CSV = 'text/csv'
MIMETYPE_XLS = 'application/vnd.ms-excel'
MIMETYPE_ODS = 'application/vnd.oasis.opendocument.spreadsheet'
MIMETYPE_XLSX = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
class AccountBankStatementImport(models.TransientModel):
_inherit = "account.bank.statement.import"
def _check_csv(self, data_file, filename):
return filename and os.path.splitext(filename)[1] == '.csv' or \
guess_mimetype(data_file) == MIMETYPE_CSV
def _check_xls(self, data_file, filename):
return filename and os.path.splitext(filename)[1] == '.xls' or \
guess_mimetype(data_file) == MIMETYPE_XLS
def _check_xlsx(self, data_file, filename):
return filename and os.path.splitext(filename)[1] == '.xlsx' or \
guess_mimetype(data_file) == MIMETYPE_XLSX
def _check_ods(self, data_file, filename):
return filename and os.path.splitext(filename)[1] == '.ods' or \
guess_mimetype(data_file) == MIMETYPE_ODS
@api.multi
def import_file(self):
if self._check_csv(self.data_file, self.filename):
return self._import_wizard(self.filename, self.data_file, MIMETYPE_CSV)
elif self._check_xls(self.data_file, self.filename):
return self._import_wizard(self.filename, self.data_file, MIMETYPE_XLS)
elif self._check_xlsx(self.data_file, self.filename):
return self._import_wizard(self.filename, self.data_file, MIMETYPE_XLSX)
elif self._check_ods(self.data_file, self.filename):
return self._import_wizard(self.filename, self.data_file, MIMETYPE_ODS)
return super(AccountBankStatementImport, self).import_file()
@api.model
def _import_wizard(self, file_name, file, file_type):
wizard = self.env['account.bank.statement.import.wizard'].create({
'res_model': "account.bank.statement.line",
'file_type': 'text/csv',
'file_name': self.filename,
'file': base64.b64decode(self.data_file),
})
context = dict(self.env.context)
context.update({'wizard_id': wizard.id})
return {
'type': 'ir.actions.client',
'tag': 'import_bank_statement',
'params': {
'model': "account.bank.statement.line",
'filename': self.filename,
'context': context,
}
}
|
16,881 | b8e7aaf6d486a7ee926375918cedbfbab51415eb |
class Cli:
def __init__(self):
pass
def start(self,command):
pass |
16,882 | f0cb3b63a82c17588622c2ff834f1a4796e205b6 | from wsmode import messages as m
class MessageCallbackHandler:
def __init__(
self,
):
self.handlers = {}
def dispatch(self, topic_message: m.TopicMessage):
topic_message_type = topic_message.__class__
handler_func = self.handlers.get(topic_message_type)
return handler_func(topic_message)
def register_event(self, msg_type: m.TopicMessage):
def decorator(fn):
self.handlers[msg_type] = fn
return fn
return decorator
|
16,883 | 9602d1f7a48a519d7430eeca4e4c56953c795306 | # -*- coding=utf-8 -*-
import urllib3
from random import randint
class BDPing:
def __init__(self):
self.http = urllib3.PoolManager(timeout=3.0)
def get_proxy(self):
return self.http.request('GET', 'http://127.0.0.1:5010/get/').data
def delete_proxy(self, proxy):
self.http.request('GET', "http://127.0.0.1:5010/delete/?proxy={}".format(proxy))
def ping(self, url):
xml = """
<?xml version="1.0"?>
<methodCall>
<methodName>weblogUpdates.ping</methodName>
<params>
<param>
<value><string>%s</string></value>
</param><param><value><string>%s</string></value>
</param>
</params>
</methodCall>
"""
xml = (xml % (url, url))
headers = {
'Content-Type': 'text/xml',
'User-Agent': 'request',
'Content-Length': str(len(xml))
}
proxy = '\033[31;1mๆช่ทๅๅฐไปฃ็ IP !'
try:
proxy = 'http://%s' % str(self.get_proxy(), encoding='utf-8').strip('')
except:
print('\033[31;1mๆฌๅฐ้พๆฅ่ถ
ๆถ ้่ฏ๏ผ๏ผ๏ผ๏ผ๏ผ๏ผ')
self.ping(url)
print('\033[32;1m่ทๅIP %s ' % proxy)
while True:
try:
proxy_http = urllib3.ProxyManager(proxy)
html = proxy_http.request('POST', 'http://ping.baidu.com/ping/RPC2',
headers=headers, body=xml, timeout=3.0)
# ไฝฟ็จไปฃ็่ฎฟ้ฎ
if '<int>0</int>' in html.data.decode():
return True
except Exception as e:
print('\033[31;1mๅคฑ่ดฅ้่ฏ ping %s' % url)
self.ping(url)
return False
def ping_all(self, urls):
agent = BDPing.user_agent()
try:
proxy = 'http://%s' % str(self.get_proxy(), encoding='utf-8').strip('')
proxy_http = urllib3.ProxyManager(proxy)
except:
print('\033[31;1mๆฌๅฐ้พๆฅ่ถ
ๆถ ้ๅบ')
return
for url in urls:
xml = """
<?xml version="1.0"?>
<methodCall>
<methodName>weblogUpdates.ping</methodName>
<params>
<param>
<value><string>%s</string></value>
</param><param><value><string>%s</string></value>
</param>
</params>
</methodCall>
"""
xml = (xml % (url, url))
headers = {
'Content-Type': 'text/xml',
'User-Agent': agent,
'Content-Length': str(len(xml)),
'Host': 'ping.baidu.com',
'Origin': 'http://ping.baidu.com',
}
try:
html = proxy_http.request('POST', 'http://ping.baidu.com/ping/RPC2',
headers=headers, body=xml, timeout=3.0)
# ไฝฟ็จไปฃ็่ฎฟ้ฎ
status = html.status
if '<int>0</int>' in html.data.decode():
print('\033[32;1mๆๅ ping: %s status: %s ip: %s Agent: %s' % (url, status, proxy, agent))
else:
print(html.data.decode())
print('\033[31;1mๅคฑ่ดฅ ping: %s status: %s ip: %s Agent: %s' % (url, status, proxy, agent))
except Exception as e:
print('\033[31;1mๅคฑ่ดฅ ping: %s status: %s ip: %s Agent: %s' % (url, 0, proxy, agent))
return
def ping_all_new(self, urls):
agent = BDPing.user_agent()
try:
# proxy = 'http://%s' % str(self.get_proxy(), encoding='utf-8').strip('')
# proxy_http = urllib3.ProxyManager(proxy)
pass
except:
print('\033[31;1mๆฌๅฐ้พๆฅ่ถ
ๆถ ้ๅบ')
return
for url in urls:
xml = """
<?xml version="1.0"?>
<methodCall>
<methodName>weblogUpdates.ping</methodName>
<params>
<param>
<value><string>%s</string></value>
</param><param><value><string>%s</string></value>
</param>
</params>
</methodCall>
"""
xml = (xml % (url, url))
headers = {
'Content-Type': 'text/xml',
'User-Agent': agent,
'Content-Length': str(len(xml)),
'Host': 'ping.baidu.com',
'Origin': 'http://ping.baidu.com',
'Referer': 'http://ping.baidu.com/ping.html'
}
try:
html = self.http.request('POST', 'http://ping.baidu.com/ping/RPC2',
headers=headers, body=xml, timeout=3.0)
# ไฝฟ็จไปฃ็่ฎฟ้ฎ
status = html.status
if '<int>0</int>' in html.data.decode():
print('\033[32;1mๆๅ ping: %s status: %s Agent: %s' % (url, status, agent))
# print('\033[32;1mๆๅ ping: %s status: %s ip: %s Agent: %s' % (url, status, proxy, agent))
else:
print('\033[31;1mๅคฑ่ดฅ ping: %s status: %s Agent: %s' % (url, status, agent))
# print('\033[31;1mๅคฑ่ดฅ ping: %s status: %s ip: %s Agent: %s' % (url, status, proxy, agent))
except Exception as e:
print('\033[31;1mๅคฑ่ดฅ ping: %s status: %s Agent: %s' % (url, 0, agent))
# print('\033[31;1mๅคฑ่ดฅ ping: %s status: %s ip: %s Agent: %s' % (url, 0, proxy, agent))
return
@staticmethod
def user_agent():
all_agent = [
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'User-Agent:Mozilla/4.0(compatible;MSIE7.0;WindowsNT5.1)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
'Opera/8.0 (Windows NT 5.1; U; en)'
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)',
]
return all_agent[randint(0,31)] |
16,884 | 8a434c4da79b716e176f24d32c4104d96b0a53ff | import json as js, spatial, os, numpy as np
from laspy.file import File
#start_time = time.time()
#print("--- %s seconds ---" % (time.time() - start_time))
def get_header(filename):
inFile = File(filename, mode="r")
header = spatial.las_header(inFile.header)
head_obj = js.dumps(header)
inFile.close()
return head_obj
def get_points_by_geom(filename, poly):
inFile = File(filename, mode="r")
results = spatial.las_within(inFile, poly["rings"], ["x", "y", "z"])
json = js.dumps(results)
inFile.close()
return json
def get_points_by_geom_mfn(filenames, poly):
points = []
for fn in filenames:
inFile = File(fn, mode="r")
results = spatial.las_within(inFile, poly["rings"], ["x", "y", "z"], True)
points += results
return js.dumps({"params": ["x", "y", "z"], "points": points})
def get_stats_by_geom(filename, poly):
inFile = File(filename, mode="r")
results = spatial.las_within(inFile, poly["rings"], ["z"])
inFile.close()
stats = spatial.las_statistics(results["points"])
json = js.dumps(stats)
return json
def get_stats_by_geom_mfn(filenames, poly):
points = []
for fn in filenames:
inFile = File(fn, mode="r")
results = spatial.get_stats_by_geom(inFile, poly["rings"], ["z"], True)
points += results
return js.dumps({"params": ["z"], "points": points})
|
16,885 | 9b663a5a76a5d3396cacf2d167bb39cb8d447a7a | #Hrฤek na kolesu
#Obvezna naloga------------------------------------------------------
def v_seznam(s):
if s != "":
s = s.split("; ")
i = 0
while i < len(s):
s[i] = s[i].replace(",", ".")
s[i] = float(s[i])
i += 1
return s
else:
s = s.split()
return s
def v_niz(s):
niz = ""
x = 0
while x < len(s):
if x == len(s)-1:
niz += str(s[x])
else:
niz += str(s[x]) + "; "
i = 0
while i < len(niz):
if niz[i] == ".":
niz = niz.replace(".", ",")
i += 1
x += 1
return niz
def oznaci_veljavne(s):
seznam = []
for i in s:
value = True
for j in s:
if abs(i-j) <= 0.1 and i != j:
value = False
break
seznam.append(value)
return seznam
def veljavne(s):
seznam = []
for i in s:
value = True
for j in s:
if abs(i-j) <= 0.1 and i != j:
value = False
break
if value:
seznam.append(i)
return seznam
def brez_napacnih_casov(s):
max = s[0]
seznam = [max]
for i in s:
if max < i:
max = i
seznam.append(max)
return seznam
#Dodatna naloga------------------------------------------------------------------
def odstrani_neveljavne(s):
seznam_neveljavnih = []
i = 0
while i < len(s):
for j in s:
if abs(s[i]-j) <= 0.1 and s[i] != j:
seznam_neveljavnih.append(s[i])
break
i += 1
for x in seznam_neveljavnih:
s.remove(x)
def najv_hitrost(s, o):
pravilne = veljavne(s)
i = 0
if len(pravilne) >= 2:
while i < len(pravilne):
if len(pravilne) == 2:
i = 0
if abs(pravilne[i]-pravilne[i+1]) > 2.0:
pravilne = []
break
if i != len(pravilne)-1 and abs(pravilne[i]-pravilne[i+1]) > 2.0 and abs(pravilne[i]-pravilne[i-1]) > 2.0:
pravilne.remove(pravilne[i])
i += 1
if len(pravilne) >= 2:
najkrajsi_cas = 100
i = 0
while i < len(pravilne)-1:
if najkrajsi_cas > abs(pravilne[i]-pravilne[i+1]) and i != len(pravilne)-1:
najkrajsi_cas = abs(pravilne[i]-pravilne[i+1])
i += 1
max_hitrost = o/najkrajsi_cas
return max_hitrost
def na_kolesu(s):
pravilne = veljavne(s)
print(pravilne)
i = 0
if len(pravilne) >= 2:
while i < len(pravilne):
if len(pravilne) == 2:
i = 0
if abs(pravilne[i] - pravilne[i + 1]) > 2.0:
pravilne = []
break
if i != len(pravilne) - 1 and abs(pravilne[i] - pravilne[i + 1]) > 2.0 and abs(
pravilne[i] - pravilne[i - 1]) > 2.0:
pravilne.remove(pravilne[i])
i += 1
print(pravilne)
if len(pravilne) >= 2:
vsota = 0
i = 0
while i < len(pravilne) - 1:
if abs(pravilne[i]-pravilne[i+1]) < 2.0:
vsota += abs(pravilne[i] - pravilne[i + 1])
i += 1
return vsota
#Testi--------------------------------------
import unittest
class TestObvezna(unittest.TestCase):
def test_v_seznam(self):
t = v_seznam("5,180; 5,907; 6,632; 7,215")
self.assertEqual(len(t), 4)
for e, f in zip(t, [5.180, 5.907, 6.632, 7.215]):
self.assertAlmostEqual(e, f)
t = v_seznam("5,180")
self.assertEqual(len(t), 1)
for e, f in zip(t, [5.180]):
self.assertAlmostEqual(e, f)
self.assertEqual(v_seznam(""), [])
def test_v_niz(self):
self.assertEqual(v_niz([123, 123.75, 124.5]), "123; 123,75; 124,5")
self.assertEqual(v_niz([123.75]), "123,75")
self.assertEqual(v_niz([]), "")
def test_oznaci_veljavne(self):
self.assertEqual(oznaci_veljavne([5.18, 5.907, 6.632, 7.215]), [True] * 4)
self.assertEqual(oznaci_veljavne([132.3, 132.94]), [True] * 2)
self.assertEqual(oznaci_veljavne([183.12]), [True])
self.assertEqual(oznaci_veljavne([205.134, 205.182, 205.190, 205.207]), [False] * 4)
self.assertEqual(oznaci_veljavne([308.412, 308.416]), [False] * 2)
self.assertEqual(oznaci_veljavne([205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416]),
[False] * 4 + [True] * 3 + [False] * 2)
self.assertEqual(oznaci_veljavne([205.134, 205.182, 308.416]),
[False] * 2 + [True])
self.assertEqual(oznaci_veljavne([205.134, 205.182, 308.416, 308.999]),
[False] * 2 + [True] * 2)
self.assertEqual(oznaci_veljavne([100, 100.8, 205.134, 205.182, 308.416, 308.999]),
[True] * 2 + [False] * 2 + [True] * 2)
self.assertEqual(oznaci_veljavne([100, 100.8, 205.134, 205.182]),
[True] * 2 + [False] * 2)
self.assertEqual(oznaci_veljavne([100,
205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416]),
[True] + [False] * 4 + [True] * 3 + [False] * 2)
self.assertEqual(oznaci_veljavne([100,
205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416,
500]),
[True] + [False] * 4 + [True] * 3 + [False] * 2 + [True])
self.assertSequenceEqual(oznaci_veljavne([
5.18, 5.907, 6.632, 7.215,
132.3, 132.94,
183.12,
205.134, 205.182, 205.190, 205.207,
308.412, 308.416,
512.73, 513.20, 513.65,
918.2, 918.73]),
[True] * 4 + [True] * 2 + [True] + [False] * 4 + [False] * 2 + [True] * 3 + [True] * 2)
def test_veljavne(self):
self.assertEqual(veljavne([5.18, 5.907, 6.632, 7.215]), [5.18, 5.907, 6.632, 7.215])
self.assertEqual(veljavne([132.3, 132.94]), [132.3, 132.94])
self.assertEqual(veljavne([183.12]), [183.12])
self.assertEqual(veljavne([205.134, 205.182, 205.190, 205.207]), [])
self.assertEqual(veljavne([308.412, 308.416]), [])
self.assertEqual(veljavne([205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416]),
[250.13, 250.83, 251.6])
self.assertEqual(veljavne([205.134, 205.182, 308.416]), [308.416])
self.assertEqual(veljavne([205.134, 205.182, 308.416, 308.999]),
[308.416, 308.999])
self.assertEqual(veljavne([100, 100.8, 205.134, 205.182, 308.416, 308.999]),
[100, 100.8, 308.416, 308.999])
self.assertEqual(veljavne([100, 100.8, 205.134, 205.182]), [100, 100.8])
self.assertEqual(veljavne([100,
205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416]),
[100,
250.13, 250.83, 251.6])
self.assertEqual(veljavne([100,
205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416,
500]),
[100,
250.13, 250.83, 251.6,
500])
self.assertSequenceEqual(veljavne([5.18, 5.907, 6.632, 7.215,
132.3, 132.94,
183.12,
205.134, 205.182, 205.190, 205.207,
308.412, 308.416,
512.73, 513.20, 513.65,
918.2, 918.73]),
[5.18, 5.907, 6.632, 7.215,
132.3, 132.94,
183.12,
512.73, 513.20, 513.65,
918.2, 918.73])
def test_brez_napacnih_casov(self):
self.assertEqual(brez_napacnih_casov([1, 12, 33]), [1, 12, 33])
self.assertEqual(brez_napacnih_casov([1, 12]), [1, 12])
self.assertEqual(brez_napacnih_casov([12]), [12])
self.assertEqual(brez_napacnih_casov([1, 20, 10, 30]), [1, 20, 30])
self.assertEqual(brez_napacnih_casov([5, 20, 10, 5, 1, 30]), [5, 20, 30])
self.assertEqual(brez_napacnih_casov([5, 20, 10, 15, 30]), [5, 20, 30])
self.assertEqual(brez_napacnih_casov([5, 20, 10, 15]), [5, 20])
class TestDodatna(unittest.TestCase):
def test_odstrani_neveljavne(self):
t = [5.18, 5.907, 6.632, 7.215]
self.assertIsNone(odstrani_neveljavne(t))
self.assertEqual(t, [5.18, 5.907, 6.632, 7.215])
t = [123]
self.assertIsNone(odstrani_neveljavne(t))
self.assertEqual(t, [123])
t = [205.134, 205.182, 205.190, 205.207]
self.assertIsNone(odstrani_neveljavne(t))
self.assertEqual(t, [])
t = [308.412, 308.416]
self.assertIsNone(odstrani_neveljavne(t))
self.assertEqual(t, [])
t = [205.134, 205.182, 205.190, 205.207,
250.13, 250.83, 251.6,
308.412, 308.416]
self.assertIsNone(odstrani_neveljavne(t))
self.assertEqual(t, [250.13, 250.83, 251.6])
t = [5.18, 5.907, 6.632, 7.215,
132.3, 132.94,
183.12,
205.134, 205.182, 205.190, 205.207,
308.412, 308.416,
512.73, 513.20, 513.65,
918.2, 918.73]
self.assertIsNone(odstrani_neveljavne(t))
self.assertEqual(t, [5.18, 5.907, 6.632, 7.215,
132.3, 132.94,
183.12,
512.73, 513.20, 513.65,
918.2, 918.73])
def test_najv_hitrost(self):
self.assertAlmostEqual(najv_hitrost([5.18, 5.907, 6.632, 7.215,
132.3, 132.94,
183.12,
205.134, 205.182, 205.190, 205.207,
308.412, 308.416,
512.73, 513.20, 513.65,
918.2, 918.73], 45),
100)
self.assertAlmostEqual(najv_hitrost([24, 60,
205.134, 205.182, 205.190, 205.207,
512.73, 513.20, 513.65], 45),
100)
self.assertIsNone(najv_hitrost([24, 60, 205, 205.134, 205.140], 45))
self.assertIsNone(najv_hitrost([205.134, 205.182, 205.190, 205.207,],
45))
self.assertIsNone(najv_hitrost([182.12,
205.134, 205.182, 205.190, 205.207,],
45))
def test_na_kolesu(self):
self.assertAlmostEqual(na_kolesu([5.18, 5.907, 6.632, 7.18]), 2)
self.assertAlmostEqual(na_kolesu([5.18, 5.907, 6.632, 7.18,
205.134, 205.182, 205.190, 205.207]),
2)
self.assertAlmostEqual(na_kolesu([5.18, 5.907, 6.632, 7.18,
182.5,
205.134, 205.182, 205.190, 205.207]),
2)
self.assertAlmostEqual(na_kolesu([5.18, 5.907, 6.632, 7.18,
182.5, 183,
205.134, 205.182, 205.190, 205.207]),
2.5)
if __name__ == "__main__":
unittest.main()
|
16,886 | ca4b9d8281e93607611bc86c8b80b90b16ec2e84 | #!/usr/bin/python
from db_conn import DB_Connector
class getnode(object):
def __init__(self,host):
self.hst = host
self.loc()
def loc(self):
dbconn=DB_Connector('192.168.42.112','invent','invent#123','inventory')
sql="select node from ip_node where ip='% s'" % self.hst
result=dbconn.Execute(sql)
return result
|
16,887 | 4049e467178376a7702aaf385808107312475986 | # MoreFourCal.py
from FourCal import FourCal
class MoreFourCal(FourCal):
def pow(self):
result = self.first ** self.second
return result
def div(self):
if self.second == 0:
return 0
else:
return self.first / self.second
a = MoreFourCal(4, 2)
print(a.sum())
print(a.mul())
print(a.sub())
print(a.div())
print(a.pow())
b = MoreFourCal(4, 0)
print(b.div()) |
16,888 | 490117da5dd0599a899c94d8894bad41314148ee | #!/usr/bin/env python
from dataclasses import dataclass, field
from platform import python_version_tuple
@dataclass
class Point:
x: int = field(default=0)
y: int = field(default=0)
direction: int = field(default=0)
def __repr__(self):
str_direction = ["N", "E", "S", "W"]
return f"{self.x} {self.y} {str_direction[self.direction]}"
class Board(Point):
_max = 5
_min = 0
waypoints = 4
def change_position(self):
if self.direction in [0, 2]:
self._change_y()
else:
self._change_x()
def _change_x(self):
if self.direction == 1:
if self.x < self._max:
self.x += 1
else:
if self.x > self._min:
self.x -= 1
def _change_y(self):
if self.direction == 0:
if self.y < self._max:
self.y += 1
else:
if self.y > self._min:
self.y -= 1
def switch_direction(self, rotate_direction):
self.direction += rotate_direction
if self.direction >= self.waypoints:
self.direction %= self.waypoints
class Player:
def __init__(self, board: Board):
self.position = board
def move(self):
self.position.change_position()
def rotate(self, direction):
pos = 1 if direction == 'r' else -1
self.position.switch_direction(pos)
def play(instr: str):
board = Board()
player = Player(board)
for action in instr:
if action == 'm':
player.move()
elif action in ['l', 'r']:
player.rotate(action)
else:
msg = "Unknown action issued %s"
raise AttributeError(msg, action)
return player.position
def check_python_version():
major, minor, _ = python_version_tuple()
msg = "Not possible to use with python version below 3.7"
if major < "3":
raise RuntimeError(msg)
if minor < "7":
raise RuntimeError(msg)
def main():
check_python_version()
prompt = "Play the game.\nInput M to move, R to rotate right, L to rotate left: "
print(play(input(prompt)))
if __name__ == '__main__':
main() |
16,889 | b027e0552b1e488ce8024b2726d3c406203dafa0 | import pymongo
import time
client=pymongo.MongoClient(['localhost:27002','localhost:27001','localhost:27000'],replicaSet='repSetTest')
#selec the collection and drop it before using
collection=client.test.repTest
collection.drop()
#insert record
collection.insert_one(dict(name='Foo',age='30'))
for x in range(5):
try:
print('fetch record: %s' % collection.find_one())
except Exception as e:
print('cannot connect to primary')
time.sleep(3)
|
16,890 | 40f0df607c7a11a7a4093416c18192595045ceeb | #!/usr/bin/env python
import os
import sys
import operator
import HTSeq
def gtf_to_transcript_exons(gtf, transcript_type):
"""
Parse gtf and return a dictionary where
key: trancsript_id
value: list of exon entries
"""
gft = HTSeq.GFF_Reader(gtf)
transcripts = {}
for gtf_line in gft:
if gtf_line.type == 'exon':
try:
tr_id = gtf_line.attr['transcript_id']
tr_type = gtf_line.attr['transcript_biotype']
except:
sys.stderr.write(f"Problem with: {gtf_line}. Exiting.{os.linesep}")
sys.exit(1)
if transcript_type != "all":
if tr_type != transcript_type:
continue
if tr_id not in transcripts:
transcripts[tr_id] = [gtf_line]
else:
transcripts[tr_id].append(gtf_line)
return transcripts
def transcript_exons_to_bed12(exons_list, transcript_id):
"""
Convert a list of exon Genomic Intervals from a transcripts (from HTseq) to bed12 line
"""
blockSizes = []
blockStarts = []
sorted_exons = sorted(exons_list, key=operator.attrgetter("iv.start"))
chrom = sorted_exons[0].iv.chrom
tr_start = min(sorted_exons[0].iv.start, sorted_exons[0].iv.end, sorted_exons[-1].iv.start, sorted_exons[-1].iv.end)
tr_end = max(sorted_exons[0].iv.start, sorted_exons[0].iv.end, sorted_exons[-1].iv.start, sorted_exons[-1].iv.end)
strand = sorted_exons[0].iv.strand
items = len(sorted_exons)
for exon in sorted_exons:
blockStarts.append(str(exon.iv.start - tr_start))
blockSizes.append(str(exon.iv.end - exon.iv.start))
bed12_entry = "\t".join([
chrom,
str(tr_start),
str(tr_end),
transcript_id,
"1",
strand,
str(tr_start),
str(tr_end),
"0",
str(items),
",".join(blockSizes)+",",
",".join(blockStarts)+",",
])
return bed12_entry |
16,891 | 76c40754adab57ae3656428d2ab6f9646a7fec4b | def inp(x):
return x[-1]+x[1:]+x[0]
print(inp("abcd")) |
16,892 | 16c3c2469b6f31aae5220c7c2ad830d196c5c0fc | #!/usr/bin/env python
import time
import webapp2_extras.appengine.auth.models
import webapp2_extras.appengine.sessions_ndb
from google.appengine.ext import ndb
from google.appengine.api import memcache
from webapp2_extras import sessions, security
import logging
class Session(webapp2_extras.appengine.sessions_ndb.Session):
user_id = ndb.IntegerProperty()
@classmethod
def get_by_sid(cls, sid):
"""Returns a ``Session`` instance by session id.
:param sid:
A session id.
:returns:
An existing ``Session`` entity.
"""
data = memcache.get(sid)
if not data:
session = ndb.model.Key(cls, sid).get()
if session:
data = session.data
memcache.set(sid, data)
return data
@classmethod
def delete_by_user_id(cls, self, user_id):
"""Returns a ``Session`` instance by session id.
:param sid:
A session id.
:returns:
An existing ``Session`` entity.
"""
usersessions = Session.query(Session.user_id == int(user_id)).fetch()
logging.info(usersessions)
for session in usersessions:
sid = session._key.id()
logging.info(sid)
data = Session.get_by_sid(sid)
logging.info(data)
sessiondict = sessions.SessionDict(self, data=data)
sessiondict['_user'] = None
sessiondict['user_id'] = None
sessiondict['token'] = None
memcache.set(sid, '')
ndb.model.Key(Session, sid).delete()
usersessions = Session.query(Session.user_id == int(user_id)).fetch()
logging.info(usersessions)
return usersessions
class DataStoreSessionFactorExtended(webapp2_extras.appengine.sessions_ndb.DatastoreSessionFactory):
"""A session factory that stores data serialized in datastore.
To use datastore sessions, pass this class as the `factory` keyword to
:meth:`webapp2_extras.sessions.SessionStore.get_session`::
from webapp2_extras import sessions_ndb
# [...]
session = self.session_store.get_session(
name='db_session', factory=sessions_ndb.DatastoreSessionFactory)
See in :meth:`webapp2_extras.sessions.SessionStore` an example of how to
make sessions available in a :class:`webapp2.RequestHandler`.
"""
#: The session model class.
session_model = Session
def _get_by_sid(self, sid):
"""Returns a session given a session id."""
if self._is_valid_sid(sid):
data = self.session_model.get_by_sid(sid)
if data is not None:
self.sid = sid
logging.info(sid)
logging.info(sessions.SessionDict(self, data=data))
return sessions.SessionDict(self, data=data)
logging.info('new')
self.sid = self._get_new_sid()
return sessions.SessionDict(self, new=True)
def save_session(self, response):
if self.session is None or not self.session.modified:
return
# logging.info(self.session['user_id'])
logging.info(self.session)
logging.info(self.sid)
if self.session:
try:
try:
logging.info(self.session['user_pre_2FA'])
userid = self.session['user_pre_2FA']['user_id']
except:
userid = self.session['_user'][0]
logging.info('new session with user_id: ' + str(self.sid))
self.session_model(id=self.sid, data=dict(self.session), user_id=userid)._put()
except:
logging.info('new session no user_id: ' + str(self.sid))
self.session_model(id=self.sid, data=dict(self.session))._put()
else:
logging.info('new session no user_id: ' + str(self.sid))
self.session_model(id=self.sid, data=dict(self.session))._put()
self.session_store.save_secure_cookie(response, self.name, {'_sid': self.sid}, **self.session_args)
class User(webapp2_extras.appengine.auth.models.User):
email = ndb.StringProperty()
lastEmailSent = ndb.DateTimeProperty()
user_name = ndb.StringProperty()
stripeDictString = ndb.StringProperty()
chargeIDList = ndb.StringProperty(repeated=True)
balanceInCents = ndb.IntegerProperty(default=0)
def set_password(self, raw_password):
"""Sets the password for the current user
:param raw_password:
The raw password which will be hashed and stored
"""
self.password = security.generate_password_hash(raw_password, length=12)
@classmethod
def get_by_auth_token(cls, user_id, token, subject='auth'):
"""Returns a user object based on a user ID and token.
:param user_id:
The user_id of the requesting user.
:param token:
The token string to be verified.
:returns:
A tuple ``(User, timestamp)``, with a user object and
the token timestamp, or ``(None, None)`` if both were not found.
"""
token_key = cls.token_model.get_key(user_id, subject, token)
user_key = ndb.Key(cls, user_id)
# Use get_multi() to save a RPC call.
valid_token, user = ndb.get_multi([token_key, user_key])
if valid_token and user:
timestamp = int(time.mktime(valid_token.created.timetuple()))
return user, timestamp
return None, None
# class Order(ndb.Model):
# exchange = ndb.StringProperty()
# amoount = ndb.FloatProperty()
# currency = ndb.StringProperty()
# state = ndb.StringProperty() # choices: pending, open, close
# user_id = ndb.IntegerProperty()
class TradeSettings(ndb.Model):
created = ndb.DateTimeProperty(auto_now_add=True)
trade_size = ndb.FloatProperty()
required_spread = ndb.FloatProperty()
exchange_pair = ndb.StringProperty()
buy_on = ndb.StringProperty()
send_to = ndb.StringProperty()
email_notification = ndb.StringProperty()
coin = ndb.StringProperty()
current_order_id = ndb.StringProperty()
user_id = ndb.IntegerProperty()
@property
def get_id(self):
return self.key.id()
@classmethod
def create(cls, exchange_pair, coin, trade_size, required_spread, buy_on, send_to, email_notification):
trade_settings = cls(
coin=coin,
exchange_pair=exchange_pair,
trade_size=trade_size,
required_spread=required_spread,
buy_on=buy_on,
send_to=send_to,
email_notification=email_notification
)
trade_settings.put()
return trade_settings
class MemcacheModel(ndb.Model):
lastLink = ndb.StringProperty()
@property
def get_id(self):
return self.key.id()
@classmethod
def create(cls, lastLink):
smItem = cls(
lastLink=lastLink
)
smItem.put()
return smItem
class IpSignupCounter(ndb.Model):
ipAddress = ndb.StringProperty()
typeString = ndb.StringProperty()
last100RequestTimes = ndb.DateTimeProperty(repeated=True)
timesBanned = ndb.IntegerProperty(default=0)
created = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def create(cls, ipAddress, typeString, last100RequestTimes):
signupCounter = cls(
ipAddress=ipAddress,
typeString=typeString,
last100RequestTimes=last100RequestTimes
)
signupCounter.put()
return signupCounter
|
16,893 | f5d006445b21badfbc198705a417a243e213f003 | import random
from game import constants
from game.point import Point
from game.control_actors_action import ControlActorsAction
from game.draw_actors_action import DrawActorsAction
from game.handle_collisions_action import HandleCollisionsAction
from game.move_actors_action import MoveActorsAction
from game.arcade_input_service import ArcadeInputService
from game.arcade_output_service import ArcadeOutputService
from game.reticle import Reticle
from game.entity.player import Player
from game.director import Director
import arcade
def main():
# create the cast {key: tag, value: list}
cast = {}
player = Player()
cast["player"] = [player]
# create empty list of projectiles, will be populated automatically later
cast["projectile"] = []
# create empty list of collidable walls, will be populated and drawn by the map code
cast["wall"] = []
# create the script {key: tag, value: list}
script = {}
input_service = ArcadeInputService()
output_service = ArcadeOutputService()
reticle = Reticle()
control_actors_action = ControlActorsAction(input_service)
move_actors_action = MoveActorsAction()
handle_collisions_action = HandleCollisionsAction()
draw_actors_action = DrawActorsAction(output_service, cast)
script["input"] = [control_actors_action]
script["update"] = [handle_collisions_action, move_actors_action]
script["output"] = [draw_actors_action]
# start the game
batter = Director(cast, script, input_service, reticle)
batter.setup()
arcade.run()
if __name__ == "__main__":
main() |
16,894 | c846640a33f54a33938e20d50e336fc134fff632 | # -*- coding: utf-8 -*-
"""
Created on Sun Dec 3 12:21:21 2017
@author: HonkyT
Snake vector thingy!!
"""
#5 4 3
#6 1 2
#7 8 9
#while counter<tal:
def snakefunction(tal):
#initialize variables on the number 3
###############################
counter=3
x_idx=1
y_idx=1
varv=2 #or 'circle' or 'spiral'
# representation but it
# increses by 2 each turn
####################
while counter<tal:
ll=0
dd=0
rr=0
uu=0
# to the left
while ll<varv:
x_idx -= 1
ll+=1
counter+=1
if counter==tal:
return x_idx,y_idx
# down
while dd<varv:
y_idx -= 1
dd+=1
counter+=1
#print('dd')
if counter==tal:
return x_idx,y_idx
# to the right
while rr<(varv+1):
x_idx +=1
rr+=1
counter+=1
#print('rr')
if counter==tal:
return x_idx,y_idx
# up
while uu<(varv+1):
y_idx+=1
uu+=1
counter+=1
#print('uu')
if counter==tal:
return x_idx,y_idx
varv+=2
#Choose what nbr to determine >2
tal=7#361527
xin,yin=snakefunction(tal)
#origo = [0, 0]
steps = abs(-xin) + abs(-yin)
#print('Nbr of steps:')
#print(steps)
#star2
#koordinaterna och vรคrdena sparas
#while counter<tal:
def snakefunction2(tal):
#initialize variables on the number 3
###############################
counter=0
snake = [1,1,2]
snake_inx = []
snake_inx.append([0,0])
snake_inx.append([1,0])
snake_inx.append([1,1])
x_idx=1
y_idx=1
varv=2
#or 'circle' or 'spiral'
# representation but it
# increses by 2 each turn
####################
while counter<tal:
ll=0
dd=0
rr=0
uu=0
b=0
# to the left
while ll<varv:
print('Moving to the left')
x_idx -= 1
ll+=1
# search for indeces
for jj in range(-1,2):
try:
a=snake_inx.index([x_idx+jj,y_idx-1])
counter+=snake[a] # sum the values of the 3 cells below the current cell
print('counter in try:')
print(counter)
except ValueError:
a=0
print('non')
# add the previos value (the one to the right)
# find the index to that value:
b=snake_inx.index([x_idx+1,y_idx])
counter+=snake[b]
snake.append(counter)
print('counter after adding right')
print(counter)
snake_inx.append([x_idx,y_idx])
if counter>tal:
return snake
counter=0
# down
while dd<varv:
y_idx -= 1
dd+=1
print('moving down')
# search for indeces
for jj in range(-1,2):
try:
a=snake_inx.index([x_idx+1,y_idx-jj])
counter+=snake[a]
print('counter in try:')
print(counter)
except ValueError:
a=0
print('non')
# add the previos value (the one above) to the counter
a=snake_inx.index([x_idx,y_idx+1])
counter+=snake[a]
snake.append(counter)
print('counter after adding above cell')
print(counter)
snake_inx.append([x_idx,y_idx])
if counter>tal:
return snake
counter=0
# to the right
while rr<(varv+1):
x_idx +=1
rr+=1
print('move to the right')
# search for indeces
for jj in range(-1,2):
try:
a=snake_inx.index([x_idx+jj,y_idx+1])
counter+=snake[a]
print(snake[a])
except ValueError:
a=0
print('non')
# add the previos value (the one to the right)
a=snake_inx.index([x_idx-1,y_idx])
counter+=snake[a]
snake.append(counter)
snake_inx.append([x_idx,y_idx])
if counter>tal:
return snake
counter=0
# up
while uu<(varv+1):
y_idx+=1
uu+=1
print('move up')
# search for indeces
for jj in range(-1,2):
try:
a=snake_inx.index([x_idx-1,y_idx-jj])
counter+=snake[a]
print(snake[a])
except ValueError:
a=0
print('non')
# add the previos value (the one below) but if you are in the corner there is nothing there
try:
a=snake_inx.index([x_idx,y_idx-1])
counter+=snake[a]
snake.append(counter)
except ValueError:
a=0
snake_inx.append([x_idx,y_idx])
if counter>tal:
return snake
counter=0
# for every 'varv' there are two more cells
varv+=2
#Choose what nbr to determine >2
tal=361527
snake=snakefunction2(tal)
|
16,895 | 66f2f9670dfa862646060c06735481b795986934 | from input import input
from tools import *
from output import output
def main():
_input = input("map_2.input")
closest_factories = create_closest_dicts(
_input.mines, _input.factories, _input.dict_mines_factories
)
all_drones = _input.haulers + _input.excavators + _input.miners
closest_factories = create_closest_dicts(
_input.mines, _input.factories, _input.dict_mines_factories
)
all_drones = _input.miners + _input.excavators + _input.haulers
not_done = True
while not_done:
# print ("STARTING")
print("Remaining resources: ", sum(m.quantity for m in _input.mines), end="\r")
for drone in all_drones:
# closest mine for now
if len(drone.carrying_elements) == drone.capacity:
# move to closest factory and update quanity
closest_facts = []
for elem in drone.carrying_elements:
closest_facts.append(
closestFactory(
drone,
get_all_factories_of_type(
_input.dict_mines_factories, elem.upper()
),
)
)
closest_facts.sort(key=lambda x: x[0])
# print ("Moving hauler for element ", drone.element)
drone.move_to(closest_facts[0][1], False)
else:
# move to, update dist and index travelled
# print ("Drone is ", drone.x, drone.y, " at ", drone.location_obj)
close_mine = closestMine(drone, _input.mines, closest_factories)
closest_facts = []
if len(drone.carrying_elements) == 0:
if close_mine[1] == None:
# print ("closest mine is null")
continue
drone.move_to(close_mine[1])
continue
for elem in drone.carrying_elements:
closest_facts.append(
closestFactory(
drone,
get_all_factories_of_type(
_input.dict_mines_factories, elem.upper()
),
)
)
closest_facts.sort(key=lambda x: x[0])
if close_mine[1] == None:
# print ("closest mine is null")
drone.move_to(closest_facts[0][1], False)
continue
to_move = (
closest_facts[0][1]
if closest_facts[0][0] < close_mine[0]
else close_mine[1]
)
if (
closest_facts[0][0]
< close_mine[0] - closest_factories[close_mine[1]][0][1]
):
to_move = closest_facts[0][1]
else:
to_move = close_mine[1]
drone.move_to(to_move, True)
not_done = check_if_mines_and_drones_empty(_input.mines, all_drones)
output("out5.txt", all_drones)
import stats
stats.show_stats(all_drones, _input.mines, _input.factories, _input.budget)
if __name__ == "__main__":
main()
|
16,896 | 103be797a94a84d8b1ffc353b87a28f82d755f3b | import gspread
from oauth2client.service_account import ServiceAccountCredentials
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.triggers.cron import CronTrigger
import time
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import QDateTime
from PyQt5.QtWidgets import *
import telegram
import calendar
class TelBot:
def __init__(self):
self.telgm_token = 'token'
self.bot = telegram.Bot(token=self.telgm_token)
self.chatId = 'id'
self.updates = self.bot.getUpdates()
self.msgs = len(self.updates)
def sendMsg(self, chatId, msg):
self.bot.sendMessage(chat_id=chatId, text=msg)
class Time:
def __init__(self):
self.datetime = QDateTime.currentDateTime()
self.YEAR = int(self.datetime.toString('yyyy'))
self.MONTH = int(self.datetime.toString('MM'))
self.DAY = int(self.datetime.toString('dd'))
def getDate(self):
self.datetime = QDateTime.currentDateTime()
self.YEAR = int(self.datetime.toString('yyyy'))
self.MONTH = int(self.datetime.toString('MM'))
self.DAY = int(self.datetime.toString('dd'))
class GDrive:
def __init__(self):
self.spreadsheet_url = 'url'
self.scope = [
'https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive',
]
self. dict = {
"type": "service_account",
"project_id": "worktime-265901",
"private_key_id": "key",
"private_key": "-----BEGIN PRIVATE KEY-----\nkey",
"client_email": "mail",
"client_id": "id",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "url"
}
self.credentials = ServiceAccountCredentials.from_json_keyfile_dict(self.dict, self.scope)
self.gc = gspread.authorize(self.credentials)
self.wb = self.gc.open_by_url(self.spreadsheet_url)
self.actSheet = self.wb.worksheet(str(time.MONTH) + '์')
self.memberSheet = self.wb.worksheet('members')
self.setSheet = self.wb.worksheet('setting')
self.WORKTIME = int(self.setSheet.cell(1, 2).value)
self.members = self.memberSheet.col_values(1)
self.demerits = []
self.getDemerits()
# ์ด๋ฆ ๊ฐ์ ธ์ค๋ ๋ถ๋ถ
def getMembers(self):
self.members.clear()
self.members = self.memberSheet.col_values(1)
# ๋ฒ์ ๊ฐ์ ธ์ค๋ ๋ถ๋ถ
def getDemerits(self):
self.demerits.clear()
for i in range(1, len(self.members) + 1):
self.demerits.append(int(self.memberSheet.cell(i, 2).value))
def scheduleEveryDay(self):
self.actSheet = self.wb.worksheet(str(time.MONTH) + '์')
for i in range(len(self.members)):
item = QTableWidgetItem('๋ฏธ์ถ๊ทผ')
item.setTextAlignment(QtCore.Qt.AlignCenter)
ui.tableWidget.setItem(i, 1, item)
class Ui_Dialog(object):
def __init__(self):
self.tabWidget = QtWidgets.QTabWidget(Dialog)
self.tab = QtWidgets.QWidget()
self.label = QtWidgets.QLabel(self.tab)
self.label_2 = QtWidgets.QLabel(self.tab)
self.label_3 = QtWidgets.QLabel(self.tab)
self.comboBox = QtWidgets.QComboBox(self.tab)
self.pushButton = QtWidgets.QPushButton(self.tab)
self.pushButton_refresh = QtWidgets.QPushButton(self.tab)
self.tableWidget = QtWidgets.QTableWidget(self.tab)
self.tab_2 = QtWidgets.QWidget()
self.label_4 = QtWidgets.QLabel(self.tab_2)
self.textEdit = QtWidgets.QTextEdit(self.tab_2)
self.pushButton_2 = QtWidgets.QPushButton(self.tab_2)
self.pushButton_3 = QtWidgets.QPushButton(self.tab_2)
self.label_5 = QtWidgets.QLabel(self.tab_2)
self.textEdit_2 = QtWidgets.QTextEdit(self.tab_2)
self.label_6 = QtWidgets.QLabel(self.tab_2)
self.textEdit_workHour = QtWidgets.QTextEdit(self.tab_2)
self.textEdit_workMin = QtWidgets.QTextEdit(self.tab_2)
self.label_7 = QtWidgets.QLabel(self.tab_2)
self.label_8 = QtWidgets.QLabel(self.tab_2)
self.pushButton_workTime = QtWidgets.QPushButton(self.tab_2)
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(339, 630)
self.tabWidget.setGeometry(QtCore.QRect(0, 0, 341, 631))
self.tabWidget.setObjectName("tabWidget")
self.tab.setObjectName("tab")
self.label.setGeometry(QtCore.QRect(2, 0, 331, 51))
font = QtGui.QFont()
font.setFamily("๋ฐฐ๋ฌ์๋ฏผ์กฑ ์์ง๋ก์ฒด")
font.setPointSize(25)
self.label.setFont(font)
self.label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label.setLineWidth(1)
self.label.setTextFormat(QtCore.Qt.AutoText)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setIndent(-1)
self.label.setObjectName("label")
self.label_2.setGeometry(QtCore.QRect(0, 50, 71, 31))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(12)
self.label_2.setFont(font)
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.label_3.setGeometry(QtCore.QRect(85, 51, 241, 31))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(12)
self.label_3.setFont(font)
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.tabWidget.addTab(self.tab, "")
self.comboBox.setGeometry(QtCore.QRect(8, 91, 171, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.comboBox.setFont(font)
self.comboBox.setObjectName("comboBox")
self.comboBox.currentTextChanged.connect(self._pullComboText)
self.pushButton.setGeometry(QtCore.QRect(190, 90, 71, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.pushButton.setFont(font)
self.pushButton.setObjectName("pushButton")
self.pushButton.clicked.connect(self._pushButtonClicked)
self.pushButton_refresh.setGeometry(QtCore.QRect(265, 90, 61, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(11)
self.pushButton_refresh.setFont(font)
self.pushButton_refresh.setObjectName("pushButton_refresh")
self.pushButton_refresh.clicked.connect(self._pushButton_refreshClicked)
self.tableWidget.setGeometry(QtCore.QRect(5, 151, 321, 451))
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(3)
self.tableWidget.setRowCount(len(gdrive.members))
self.tableWidget.setHorizontalHeaderLabels(["์ด๋ฆ", "์ถ๊ทผ์๊ฐ", "๋ฒ์ "])
self.tableWidget.setEditTriggers(QtWidgets.QTableWidget.NoEditTriggers)
header = self.tableWidget.horizontalHeader()
header.setSectionResizeMode(0, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(1, QtWidgets.QHeaderView.Stretch)
header.setSectionResizeMode(2, QtWidgets.QHeaderView.ResizeToContents)
self.tabWidget.addTab(self.tab, "")
self.tab_2.setObjectName("tab_2")
self.tabWidget.addTab(self.tab_2, "")
self.label_4.setGeometry(QtCore.QRect(10, 10, 91, 31))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.label_4.setFont(font)
self.label_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_4.setObjectName("label_4")
self.textEdit.setGeometry(QtCore.QRect(10, 50, 211, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(22)
self.textEdit.setFont(font)
self.textEdit.setObjectName("textEdit")
self.pushButton_2.setGeometry(QtCore.QRect(230, 50, 101, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.pushButton_2.setFont(font)
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_2.clicked.connect(self._pushButton2Clicked)
self.pushButton_3.setGeometry(QtCore.QRect(230, 160, 101, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.pushButton_3.setFont(font)
self.pushButton_3.setObjectName("pushButton_3")
self.pushButton_3.clicked.connect(self._pushButton3Clicked)
self.label_5.setGeometry(QtCore.QRect(10, 120, 91, 31))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.label_5.setFont(font)
self.label_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_5.setObjectName("label_5")
self.textEdit_2.setGeometry(QtCore.QRect(10, 160, 211, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(22)
self.textEdit_2.setFont(font)
self.textEdit_2.setObjectName("textEdit_2")
self.tabWidget.addTab(self.tab_2, "")
self.label_6.setGeometry(QtCore.QRect(10, 240, 91, 31))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.label_6.setFont(font)
self.label_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_6.setObjectName("label_6")
self.textEdit_workHour.setGeometry(QtCore.QRect(10, 280, 71, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(22)
self.textEdit_workHour.setFont(font)
self.textEdit_workHour.setObjectName("textEdit_workHour")
self.textEdit_workMin.setGeometry(QtCore.QRect(130, 280, 71, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(22)
self.textEdit_workMin.setFont(font)
self.textEdit_workMin.setObjectName("textEdit_workMin")
self.label_7.setGeometry(QtCore.QRect(85, 285, 41, 41))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(22)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_8.setGeometry(QtCore.QRect(207, 285, 41, 41))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(22)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.pushButton_workTime.setGeometry(QtCore.QRect(250, 280, 71, 51))
font = QtGui.QFont()
font.setFamily("๋ง์ ๊ณ ๋")
font.setPointSize(13)
self.pushButton_workTime.setFont(font)
self.pushButton_workTime.setObjectName("pushButton_workTime")
self.tabWidget.addTab(self.tab_2, "")
self.pushButton_workTime.clicked.connect(self._pushButton_workTimeClicked)
self.retranslateUi(Dialog)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "AILab ์ถ๊ทผ ๊ธฐ๋ก๊ธฐ"))
self.label.setText(_translate("Dialog", "AILab ์ถ๊ทผ ๊ธฐ๋ก๊ธฐ"))
self.label_2.setText(_translate("Dialog", "ํ์ฌ์๊ฐ"))
self.label_3.setText(_translate("Dialog", time.datetime.toString('yyyy.MM.dd hh:mm:ss')))
self.pushButton.setText(_translate("Dialog", "์ถ๊ทผ"))
self.pushButton_refresh.setText(_translate("Dialog", "์๋ก\n๊ณ ์นจ"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("Dialog", "์ถ๊ทผ"))
self.label_4.setText(_translate("Dialog", "์ธ์ ์ถ๊ฐ"))
self.textEdit.setHtml(_translate("Dialog",
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'๋ง์ ๊ณ ๋\'; font-size:24pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Gulim\'; vertical-align:middle;\">์ด๋ฆ์
๋ ฅ</span></p></body></html>"))
self.pushButton_2.setText(_translate("Dialog", "์ถ๊ฐ"))
self.pushButton_3.setText(_translate("Dialog", "์ญ์ "))
self.label_5.setText(_translate("Dialog", "์ธ์ ์ญ์ "))
self.textEdit_2.setHtml(_translate("Dialog",
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'๋ง์ ๊ณ ๋\'; font-size:24pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Gulim\'; vertical-align:middle;\">์ด๋ฆ์
๋ ฅ</span></p></body></html>"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("Dialog", "์ค์ "))
self.label_6.setText(_translate("Dialog", "์ถ๊ทผ ์๊ฐ"))
self.label_7.setText(_translate("Dialog", "์"))
self.label_8.setText(_translate("Dialog", "๋ถ"))
self.pushButton_workTime.setText(_translate("Dialog", "๋ณ๊ฒฝ"))
self.textEdit_workHour.setText("%02d" % (gdrive.WORKTIME // 60))
self.textEdit_workMin.setText("%02d" % (gdrive.WORKTIME % 60))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("Form", "์ด๋ฆ", None))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("Form", "์ถ๊ทผ์๊ฐ", None))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("Form", "๋ฒ์ ", None))
self.comboBox.addItems(gdrive.members)
def initTable(self):
today_rcd = gdrive.actSheet.col_values(time.DAY + 1)
del today_rcd[0]
for i in range(len(gdrive.members)):
item = QTableWidgetItem(gdrive.members[i])
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(i, 0, item)
item = QTableWidgetItem(today_rcd[i])
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(i, 1, item)
item = QTableWidgetItem(str(gdrive.demerits[i]))
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(i, 2, item)
def setTime(self):
_translate = QtCore.QCoreApplication.translate
self.label_3.setText(_translate("Dialog", time.datetime.toString('yyyy.MM.dd hh:mm:ss')))
def _pushButtonClicked(self):
if self.name in gdrive.members:
index = gdrive.members.index(self.name)
if self.tableWidget.item(index, 1).text() == '๋ฏธ์ถ๊ทผ':
comHour = int(time.datetime.toString('hh'))
comeMin = int(time.datetime.toString('mm'))
comeTime = 60 * comHour + comeMin
if comeTime - gdrive.WORKTIME > 0:
gdrive.demerits[index] = gdrive.demerits[index] + (comeTime - gdrive.WORKTIME - 1) // 10
item = QTableWidgetItem(str(gdrive.demerits[index]))
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(index, 2, item)
item = QTableWidgetItem(time.datetime.toString('hh:mm:ss'))
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(index, 1, item)
telBot.sendMsg(telBot.chatId, time.datetime.toString('hh:mm:ss') + '\n' + self.name + '๋์ด ์ถ๊ทผํ์ต๋๋ค.\n')
cell = gdrive.actSheet.find(self.name)
gdrive.actSheet.update_cell(cell.row, time.DAY + 1, time.datetime.toString('hh:mm:ss'))
cell = gdrive.memberSheet.find(self.name)
gdrive.memberSheet.update_cell(cell.row, 2, gdrive.demerits[index])
def _pushButton2Clicked(self):
newMember = self.textEdit.toPlainText()
num_members = len(gdrive.members)
if newMember != '์ด๋ฆ์
๋ ฅ':
gdrive.memberSheet.update_cell(num_members + 1, 1, newMember)
gdrive.memberSheet.update_cell(num_members + 1, 2, 0)
lastday = calendar.monthrange(time.YEAR, time.MONTH)[1]
gdrive.actSheet.update_cell(num_members + 2, 1, newMember)
cell_tmp = gdrive.actSheet.row_values(2)
del cell_tmp[0]
cell_list = gdrive.actSheet.range(num_members + 2, 2, num_members + 2, lastday + 1)
for i in range(len(cell_list)):
if cell_tmp[i] == 'ํด์ผ':
cell_list[i].value = 'ํด์ผ'
else:
cell_list[i].value = '๋ฏธ์ถ๊ทผ'
gdrive.actSheet.update_cells(cell_list)
gdrive.getMembers()
gdrive.getDemerits()
num_members = len(gdrive.members)
self.comboBox.addItems(gdrive.members)
self.tableWidget.setRowCount(num_members)
row = self.tableWidget.rowCount()
item = QTableWidgetItem(gdrive.members[-1])
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(row - 1, 0, item)
item = QTableWidgetItem(str(gdrive.demerits[-1]))
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(row - 1, 2, item)
item = QTableWidgetItem('๋ฏธ์ถ๊ทผ')
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(row - 1, 1, item)
QMessageBox.about(Dialog, '์ถ๊ฐ ์๋ฃ', "'" + newMember + "'" + '์(๋ฅผ) ์ถ๊ฐํ์ต๋๋ค.')
else:
QMessageBox.about(Dialog, '์ถ๊ฐ ์คํจ', '์ด๋ฆ์ ์
๋ ฅํด์ฃผ์ธ์')
def _pushButton3Clicked(self):
delMember = self.textEdit_2.toPlainText()
if delMember != '์ด๋ฆ์
๋ ฅ':
try:
cell = gdrive.memberSheet.find(delMember)
gdrive.memberSheet.delete_row(cell.row)
cell = gdrive.actSheet.find(delMember)
gdrive.actSheet.delete_row(cell.row)
tmpSheet = gdrive.wb.get_worksheet(-1)
if gdrive.actSheet.title != tmpSheet.title:
cell = tmpSheet.find(delMember)
tmpSheet.delete_row(cell.row)
for i in range(self.tableWidget.rowCount()):
if self.tableWidget.item(i, 0).text() == delMember:
self.tableWidget.removeRow(i)
break
gdrive.getMembers()
gdrive.getDemerits()
self.comboBox.addItems(gdrive.members)
self.tableWidget.setRowCount(len(gdrive.members))
QMessageBox.about(Dialog, '์ญ์ ์๋ฃ', "'" + delMember + "'" + '์(๋ฅผ) ์ญ์ ํ์ต๋๋ค.')
except gspread.exceptions.CellNotFound:
QMessageBox.about(Dialog, '์ญ์ ์คํจ', '์๋ ์ฌ๋ ์
๋๋ค.')
else:
QMessageBox.about(Dialog, '์ญ์ ์คํจ', '์ด๋ฆ์ ์
๋ ฅํด์ฃผ์ธ์')
def _pushButton_workTimeClicked(self):
hour = int(self.textEdit_workHour.toPlainText())
min = int(self.textEdit_workMin.toPlainText())
gdrive.WORKTIME = 60 * hour + min
gdrive.setSheet.update_cell(1, 2, gdrive.WORKTIME)
msg = '์ถ๊ทผ์๊ฐ์ %02d์ %02d๋ถ์ผ๋ก ๋ณ๊ฒฝํ์ต๋๋ค.' % (hour, min)
QMessageBox.about(Dialog, '๋ณ๊ฒฝ ์๋ฃ', msg)
def _pushButton_refreshClicked(self):
gdrive.getDemerits()
datas = gdrive.actSheet.col_values(time.DAY + 1)
del datas[0]
for row in range(len(datas)):
item = QTableWidgetItem(datas[row])
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(row, 1, item)
item = QTableWidgetItem(str(gdrive.demerits[row]))
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setItem(row, 2, item)
QMessageBox.about(Dialog, '์๋ฃ', '์๋ก๊ณ ์นจ ๋์์ต๋๋ค.')
def _pullComboText(self, text):
self.name = text
if __name__ == "__main__":
telBot = TelBot()
time = Time()
gdrive = GDrive()
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
ui.initTable()
sched = BackgroundScheduler()
sched.add_job(ui.setTime, IntervalTrigger(seconds=1), id='setTime')
sched.add_job(time.getDate, IntervalTrigger(seconds=1), id='getDate')
sched.add_job(gdrive.scheduleEveryDay, CronTrigger(hour='0', minute='1', second='0'), id='scheduleEveryDay')
sched.start()
sys.exit(app.exec_()) |
16,897 | 244117abc7ef1ff846acce723363020f0c1e286b | import requests
import json
import time
import os
import base64
import cv2 as cv
from requests.exceptions import Timeout
def encode_img(str_img_path):
img = cv.imread(str_img_path)
_, imdata = cv.imencode('.JPG', img)
byte = base64.b64encode(imdata).decode('ascii')
del (img)
del (imdata)
return byte
def main():
strpath = "C:/Users/41162395/Desktop/Dataset/"
print("path image : "+strpath)
list_imgfile = []
for root, directory, file in os.walk(strpath):
for file_selected in file:
if '.jpg' in file_selected:
list_imgfile.append(root+file_selected)
while (True):
errCnt = 0
gttl = 0
addr = 'http://10.151.22.202/DL_Server'
#addr = 'http://4320cf5fd8d3.ngrok.io/DL_Server'
connection_url = addr + '/api/dl'
r = requests.get(connection_url, timeout=5)
for f in list_imgfile: #while (True):
sum_ct = 0
for cnt in range(1):
try:
st_time = time.time()
response = requests.post(connection_url,data={'Image' : encode_img(f),
'BOM':'ST',
'Operation' : 'cut',
'Process' : 'hairline',
'ProcessNO':'223',
'Client_ID':'MC-09'})
cycle_time = time.time() - st_time
sum_ct = sum_ct + cycle_time
avg_ct = sum_ct / (cnt+1)
gttl = gttl + 1
except Timeout:
errCnt = errCnt + 1
print(errCnt,'Timed Out')
else:
print (gttl,json.loads(response.text),f,' cyltime(mS):',round(avg_ct*1e3),round(cycle_time*1e3),r)
#cv2.waitKey(1000)
print('Error Time out: %, hit',errCnt/50000*100,errCnt)
if __name__ == '__main__':
main()
|
16,898 | 48fe96efa3016bde5245bc98bbec66fd0e2eae67 | import cv2
import numpy as np
img = cv2.imread('./img/house.jpg')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gftt =cv2.GFTTDetector_create()
keypoints = gftt.detect(gray, None)
img_draw = cv2.drawKeypoints(img, keypoints, None)
cv2.imshow('GFTTDectector', img_draw)
cv2.waitKey(0)
cv2.destroyAllWindows() |
16,899 | bb08a0b6c131393c91d97d0ce843813df1030afb | #!/usr/bin/env python
# $Id: report_client.py 174393 2018-03-13 16:40:03Z kbu $
#
# Copyright (C) 1990 - 2009 CONTACT Software GmbH
# All rights reserved.
# http://www.contact.de/
"""
report client implementation
"""
import io
import os
import pickle
import shutil
import traceback
from cdb import CADDOK
from cdb import cdbuuid
from cdb import misc
from cdb import mq
from cdb import ue
from cdb.objects import ByID
from cdb.objects.pdd import Sandbox
from cs.tools.powerreports import XMLReportTemplate
from cs.tools.powerreports.reportserver.report_proxy import ReportProxy
from cs.tools.powerreports.reportserver.reportlib import getConfValue, log
__all__ = ['ReportClient',
'ReportClientRemote',
'ReportClientMQ']
class ReportClient(object):
def __init__(self, tmpl_cdb_object_id, cdb_file_cdb_object_id, **args):
self.tmpl_cdb_object_id = tmpl_cdb_object_id
self.cdb_file_cdb_object_id = cdb_file_cdb_object_id
self.args = args
self.sys_args = args.get("__sys_args__", None)
if not self.sys_args:
log("__sys_args__ missing in args. Cannot create report", 0, misc.kLogErr)
self.sandbox_name = "report_client__%s" % (cdbuuid.create_uuid())
# create an explicit sandbox to control the removal
self.sandbox = Sandbox(self.sandbox_name)
self.workdir = self.sandbox._location
log("ReportClient.__init__(): sandbox is '%s'" % self.workdir)
self.xml_fname = None
def __del__(self):
log("ReportClient.__del__ (base)")
self._cleanup()
def _get_xml_source(self, args):
source = None
if "source" in args:
source = ByID(args["source"])
else:
log("ReportClient: XML Source not found in args.", 0, misc.kLogErr)
return source
def _cleanup(self):
if self.sandbox:
self.sandbox.clear()
self.sandbox = None
def _get_context_objects(self, args):
objects = []
if "objects" in args and args["objects"]:
objects = [ByID(o) for o in args["objects"]]
return objects
def _export_xml_data(self):
# Export data from xml source
objects = self._get_context_objects(self.sys_args)
xml_source_obj = self._get_xml_source(self.sys_args)
if not xml_source_obj:
log("source missing in sys_args. Cannot create report.", 0, misc.kLogErr)
return
log("ReportClient: START EXPORT")
xml_fname = xml_source_obj.export_ex(objects, self.template_fname,
self.tmpl_cdb_object_id,
self.cdb_file_cdb_object_id, "0",
**self.args)
log("ReportClient: END EXPORT")
return xml_fname
def _checkout_template(self):
tmpl = XMLReportTemplate.KeywordQuery(cdb_object_id=self.tmpl_cdb_object_id)
if not tmpl:
raise ue.Exception("powerreports_tmpl_not_found", self.tmpl_cdb_object_id)
tmpl = tmpl[0]
cdbfile = tmpl.Files[0]
if not cdbfile:
raise ue.Exception("powerreports_tmpl_file_not_found", self.cdb_file_cdb_object_id)
self.sandbox.add(cdbfile)
if self.sandbox.status(cdbfile) == Sandbox.NEEDS_UPDATE:
self.sandbox.checkout(cdbfile)
template_fname = self.sandbox.pathname(cdbfile)
log("ReportClient: Checked out report template: %s" % template_fname)
return template_fname
def _pickle_args(self, args_basename):
args_fname = misc.jail_filename(self.workdir, args_basename)
assert isinstance(args_fname, unicode)
with io.open(args_fname, "wb") as f:
p = pickle.Pickler(f)
p.dump(self.args)
return args_fname
# extraction of report template and report data
def _prepare_report(self):
self.template_fname = self._checkout_template()
self.xml_fname = self._export_xml_data()
def _copy_results(self, dst_path, gen_ret):
ret = gen_ret
def _copy(src_fname, dst_path):
dst_fname = os.path.join(dst_path, os.path.basename(src_fname))
log("Copying '%s' to '%s'" % (src_fname, dst_fname))
shutil.copyfile(src_fname, dst_fname)
return dst_fname
if dst_path:
if not os.path.exists(dst_path):
log("Destination path '%s' does not exist" % dst_path, 0, misc.kLogErr)
else:
report_format = self.sys_args["report_format"]
if any(f in report_format for f in ["Excel", "E-Link"]):
ret["xls"] = _copy(gen_ret["xls"], dst_path)
if any(f in report_format for f in ["PDF", "E-Link"]):
ret["pdf"] = _copy(gen_ret["pdf"], dst_path)
return ret
def create_report(self, target_path=None):
# IMPORTANT: import of report_generator requires win32!
from cs.tools.powerreports.reportserver.report_generator import ReportGenerator
ret = {"status": "",
"xls": None,
"pdf": None}
try:
log("ReportRemote.create_report(): workdir: '%s', target_path '%s'" %
(self.workdir, target_path))
self._prepare_report()
ret = ReportGenerator().create_report(
self.workdir, self.template_fname,
self.xml_fname,
self.sys_args.get("target",
"%s Report" % Message.GetMessage("branding_product_name")),
("PDF" in self.sys_args["report_format"]))
if ret["status"] == "OK":
ret = self._copy_results(target_path, ret)
except Exception as ex:
if ret["status"] in ["", "OK"]:
ret["status"] = "%s" % ex
log("%s" % traceback.format_exc())
finally:
log("ReportRemote.create_report(): result is '%s'" % (ret))
return ret
class ReportClientRemote(ReportClient):
def create_report(self, target_path=None):
ret = {"status": "",
"xls": None,
"pdf": None}
try:
log("ReportClientRemote.create_report(): workdir: '%s', target_path '%s'" %
(self.workdir, target_path))
p = ReportProxy()
self._prepare_report()
ret = p.create(self.workdir, self.template_fname, self.xml_fname, self.args)
if ret["status"] == "OK":
ret = self._copy_results(target_path, ret)
except Exception as ex:
if ret["status"] in ["", "OK"]:
ret["status"] = "%s" % ex
log("%s" % traceback.format_exc())
finally:
log("ReportClientRemote.create_report(): result is '%s'" % (ret))
return ret
class ReportClientMQ(ReportClient):
def create_report(self):
# extend args (filenames template and xml data)
args_basename = "report_args"
# set up mq job
temp_queue_folder = getConfValue("REPORT_QUEUE_TEMPFOLDER", "report_queue_payload")
queue = mq.Queue("xsd_reports",
payloaddir=os.path.join(CADDOK.TMPDIR, temp_queue_folder))
job = queue.new(cdbf_object_id=self.tmpl_cdb_object_id)
# prepare args
args_fname = self._pickle_args(args_basename)
# attach pickled args
job.add_attachment(args_basename, args_fname)
job.start()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.