content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
from django.contrib import admin
from django.contrib.admin import register
from bbbs.main.models import Main
from bbbs.users.utils import AdminOnlyPermissionsMixin
from .forms import MainAdminForm
@register(Main)
class MainAdmin(AdminOnlyPermissionsMixin, admin.ModelAdmin):
empty_value_display = "-пусто-"
filter_horizontal = ("questions", "articles", "movies")
form = MainAdminForm
def has_add_permission(self, request):
if Main.objects.first():
return False
return True
def has_delete_permission(self, request, obj=None):
return False
|
nilq/baby-python
|
python
|
""" uTorrent migration to qBittorrent module """
from tkinter import Tk, StringVar, N, W, E, S, filedialog, messagebox, HORIZONTAL
from tkinter.ttk import Frame, Entry, Button, Label, Progressbar
from shutil import copy
from os import path
from hashlib import sha1
from time import time
from re import compile as re_compile
from tpp.bencodepy import encode as bencode
from tpp.bencodepy import decode as bdecode
from tpp.bencodepy import DecodingError
FIELD_MAP = {"active_time" : 0,
"added_time" : 0,
"allocation" : "full",
"announce_to_dht" : 1,
"announce_to_lsd" : 1,
"announce_to_trackers" : 1,
"auto_managed" : 1,
"banned_peers" : "",
"banned_peers6" : "",
"blocks per piece" : 0,
"completed_time" : 0,
"download_rate_limit" : 0,
"file sizes" : [[0, 0], [0, 0], [0, 0]],
"file-format" : "libtorrent resume file",
"file-version" : 1,
"file_priority" : [2, 0, 1],
"finished_time" : 0,
"info-hash" : "",
"last_download" : 0,
"last_scrape" : 0,
"last_seen_complete" : 0,
"last_upload" : 0,
"libtorrent-version" : "0.16.19.0",
"mapped_files" : ["relative\\path\\to\\file1.ext", "r\\p\\t\\file2.ext", "file3.ext"],
"max_connections" : 100,
"max_uploads" : 16777215,
"num_downloaders" : 16777215,
"num_incomplete" : 0,
"num_seeds" : 0,
"paused" : 0,
"peers" : "",
"peers6" : "",
"piece_priority" : "",
"pieces" : "",
"seed_mode" : 0,
"seeding_time" : 0,
"sequential_download" : 0,
"super_seeding" : 0,
"total_downloaded" : 0,
"total_uploaded" : 0,
"upload_rate_limit" : 0,
"trackers" : [["https://tracker"]]}
def mkfr(res, tor):
""" Creates libtorrent fast resume file.
@res uTorrent data.
@tor Torrent File.
"""
qbt_torrent = FIELD_MAP
time_now = int(time())
pieces_num = int(tor['info']['pieces'].size / 20) # SHA1 hash is 20 bytes
qbt_torrent['added_time'] = int(res['added_on'])
qbt_torrent['completed_time'] = int(res['completed_on'])
qbt_torrent['active_time'] = int(res['runtime'])
qbt_torrent['seeding_time'] = qbt_torrent['active_time']
qbt_torrent['blocks per piece'] = int(int(tor['info']['piece length']) / int(res['blocksize']))
qbt_torrent['info-hash'] = sha1(bencode(tor['info'])).digest()
qbt_torrent['paused'] = 1 if res['started'] == 0 else 0
qbt_torrent['auto_managed'] = 0
qbt_torrent['total_downloaded'] = int(res['downloaded'])
qbt_torrent['total_uploaded'] = int(res['uploaded'])
qbt_torrent['upload_rate_limit'] = int(res['upspeed'])
qbt_torrent['trackers'] = [[tracker] for tracker in res['trackers']]
#wat?
qbt_torrent['piece_priority'] = "".join(bin(hexik)[2:]*pieces_num for hexik in res["have"])
#wat?
qbt_torrent['pieces'] = qbt_torrent['piece_priority']
qbt_torrent['finished_time'] = time_now - qbt_torrent['completed_time']
qbt_torrent['last_seen_complete'] = int(time_now) if qbt_torrent["finished_time"] else 0
qbt_torrent['last_download'] = qbt_torrent['finished_time']
qbt_torrent['last_scrape'] = qbt_torrent['finished_time']
qbt_torrent['last_upload'] = qbt_torrent['finished_time']
qbt_torrent['mapped_files'] = []
qbt_torrent['file sizes'] = []
# Per file fields:
##########
# mapped_files
# file_priority
# file sizes
#wat?
get_hex = re_compile("[0-9A-Fa-f][0-9A-Fa-f]")
qbt_torrent["file_priority"] = [(1 if int(hex_number, 16) in range(1, 9) else
(2 if int(hex_number, 16) in range(9, 16) else
(0))) for hex_number in get_hex.split(res["prio"])]
fmt = 0
if "files" in tor['info']:
for file_index in range(len(tor['info']['files'])):
tor_file = tor['info']['files'][file_index]
qbt_torrent['mapped_files'].append(path.normpath(tor_file))
if not "modtimes" in res:
fmt = int(res['modtimes'][file_index])
else:
fmt = 0
res_file = path.join(res['path'], qbt_torrent['mapped_files'][-1])
if path.isfile(res_file) and not fmt:
fmt = int(path.getmtime(res_file))
if qbt_torrent['file_priority'][file_index]:
qbt_torrent['file sizes'].append([int(tor_file['length']), fmt])
else:
qbt_torrent['file sizes'].append([0, 0])
qbt_torrent['qBt-savePath'] = res['path']
else:
qbt_torrent['qBt-savePath'] = path.dirname(res['path'])
if "modtimes" in res:
fmt = int(res['modtimes'][0]) # file time to avoid checking / not presen in ut2.2
else:
fmt = 0
res_file = res['path']
if path.isfile(res_file) and not fmt:
fmt = int(path.getmtime(res_file))
if qbt_torrent['file_priority'][0]:
qbt_torrent['file sizes'].append([int(tor['info']['length']), fmt])
else:
qbt_torrent['file sizes'].append([0, 0])
##########
# qBittorrent 3.1+ Fields
##########
if "label" in res:
qbt_torrent['qBt-label'] = res['label']
qbt_torrent['qBt-queuePosition'] = -1 # -1 for completed
qbt_torrent['qBt-seedDate'] = qbt_torrent['completed_time']
qbt_torrent['qBt-ratioLimit'] = "-2" # -2 = Use Global, -1 = No limit, other number = actual ratio?
return qbt_torrent
def punchup(res, tor, dotracker=True, doname=False):
torrent = tor
if dotracker:
utrax = res['trackers']
if len(utrax) > 1:
if "announce-list" in torrent:
if not set(torrent['announce-list']) == set(utrax):
torrent['announce-list'] = [[element] for element in set(utrax)]
elif "announce" in torrent:
if not torrent['announce'] == utrax[0]:
torrent['announce'] = utrax[0]
if doname:
res_path = res['path']
if not "files" in torrent:
torrent['info']['name'] = path.basename(res_path)
return torrent
def convertor(ut_data: str, qbt_dir: str):
""" Converts from uTorrent resume.dat to qBt
@ut_data Path to uT resum.data
@qbt_dir Path to store results
"""
message = messagebox
"""
backup_data = ".".join((ut_data, "old"))
try:
copy(ut_data, backup_data)
except IOError:
if message.askyesno("Backup error", "Cannot back-up UT data\nIs it ok?"):
backup_data = ""
else:
return
"""
with open(ut_data, 'rb') as ut_fd:
data = ut_fd.read()
try:
torrents = bdecode(data)
except DecodingError as error:
message.showerror("Decoding error", "".join(("Cannot decode uTorrent data\n",
"Error: ", str(error))))
return
ut_folder = path.dirname(ut_data)
print(torrents)
for key, value in torrents.items():
torrent_file = path.join(ut_folder, key)
with open(torrent_file, 'rb') as ut_fd:
try:
bdecoded_data = bdecode(ut_fd.read())
except BTFailure:
continue
tor_file = punchup(value, bdecoded_data)
file_hash = sha1(bencode(tor_file["info"])).hexdigest().lower()
#paths
path_torrent_file = path.join(qbt_dir, ".".join((file_hash, "torrent")))
path_fast_resume = path.join(qbt_dir, ".".join((file_hash, "fastresume")))
if path.exists(path_torrent_file) or path.exists(path_fast_resume):
continue
fast_resume_file = mkfr(value, tor_file)
with open(path_torrent_file, "wb") as tor_file:
tor_file.write(bencode(tor_file))
with open(path_fast_resume, "wb") as tor_file:
tor_file.write(bencode(fast_resume_file))
class qbtConvertor(Tk):
""" GUI Application for migration from uTorrent to qBittorrent """
def __init__(self):
Tk.__init__(self)
self.title("uT to qBt convertor")
#main frame
self.main_frame = Frame(self, padding="3 3 12 12")
self.main_frame.grid(column=0, row=0, sticky=(N, W, E, S))
self.main_frame.columnconfigure(0, weight=1)
self.main_frame.rowconfigure(0, weight=1)
#uT part
self.ut_data = StringVar()
self.ut_label = Label(self.main_frame, text="uT data")
self.ut_label.grid(column=0, row=1, sticky=(W))
self.ut_entry = Entry(self.main_frame, width=100, textvariable=self.ut_data)
self.ut_entry.grid(column=1, row=1, sticky=(W))
self.ut_button = Button(self.main_frame, text="Browse", command=self.load_file)
self.ut_button.grid(column=2, row=1)
#qBt part
self.qbt_folder = StringVar()
self.qbt_label = Label(self.main_frame, text="qBt folder")
self.qbt_label.grid(column=0, row=4, sticky=(W))
self.qbt_entry = Entry(self.main_frame, width=100, textvariable=self.qbt_folder)
self.qbt_entry.grid(column=1, row=4, sticky=(W))
self.qbt_button = Button(self.main_frame, text="Browse", command=self.open_dir)
self.qbt_button.grid(column=2, row=4, sticky=(W, E))
#convertor
self.convertor_button = Button(self.main_frame, text="Convert", command=self.convert,
width=50)
self.convertor_button.grid(column=1, columnspan=2, row=5)
self.progress_bar = Progressbar(self.main_frame, orient=HORIZONTAL, length=300, mode="indeterminate")
self.progress_bar.grid(column=1, columnspan=3, row=6)
#set padding for each element
for child in self.main_frame.winfo_children():
child.grid_configure(padx=5, pady=5)
def convert(self):
message = messagebox
if not self.qbt_folder.get() or not self.ut_data.get():
message.showerror("ERROR", "Specify paths!")
return
self.progress_bar.start()
convertor(self.ut_data.get(), self.qbt_folder.get())
self.progress_bar.stop()
def load_file(self):
file_name = filedialog.askopenfilename(filetypes=(("UT resume file", "*.dat"),
("All", "*")))
if file_name:
self.ut_data.set(file_name)
def open_dir(self):
dir_name = filedialog.askdirectory()
if dir_name:
self.qbt_folder.set(dir_name)
if __name__ == "__main__":
app = qbtConvertor()
app.geometry("800x160")
app.mainloop()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
import argparse
import logging
import logging.config
import os
import sys
import time
import yaml
from cluster_manager import setup_exporter_thread, \
manager_iteration_histogram, \
register_stack_trace_dump, \
update_file_modification_time
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../utils"))
from DataHandler import DataHandler
CLUSTER_STATUS_EXPIRY = 1
JOBS_EXPIRY = 180
logger = logging.getLogger(__name__)
def create_log(logdir='/var/log/dlworkspace'):
if not os.path.exists(logdir):
os.system("mkdir -p " + logdir)
with open('logging.yaml') as f:
logging_config = yaml.full_load(f)
log_filename = os.path.join(logdir, "db_manager.log")
logging_config["handlers"]["file"]["filename"] = log_filename
logging.config.dictConfig(logging_config)
def delete_old_cluster_status(days_ago):
table = "clusterstatus"
with DataHandler() as data_handler:
num_rows = data_handler.count_rows(table)
if num_rows <= 10: # Retain 10 rows for safety
return
logger.info("Deleting rows from table %s older than %s day(s)", table,
days_ago)
ret = data_handler.delete_rows_from_table_older_than_days(
table, days_ago)
ret_status = "succeeded" if ret is True else "failed"
logger.info("Deleting rows from table %s older than %s day(s) %s",
table, days_ago, ret_status)
def delete_old_inactive_jobs(days_ago):
table = "jobs"
with DataHandler() as data_handler:
logger.info(
"Deleting inactive job records from table %s older than %s "
"day(s)", table, days_ago)
cond = {"jobStatus": ("IN", ["finished", "failed", "killed", "error"])}
ret = data_handler.delete_rows_from_table_older_than_days(
table, days_ago, col="lastUpdated", cond=cond)
ret_status = "succeeded" if ret is True else "failed"
logger.info(
"Deleting inactive job records from table %s older than %s "
"day(s) %s", table, days_ago, ret_status)
def sleep_with_update(time_to_sleep, fn):
for _ in range(int(time_to_sleep / 100)):
fn()
time.sleep(100)
def run():
register_stack_trace_dump()
create_log()
update = lambda: update_file_modification_time("db_manager")
while True:
update()
with manager_iteration_histogram.labels("db_manager").time():
try:
delete_old_cluster_status(CLUSTER_STATUS_EXPIRY)
# query below is too time consuming since lastUpdated in job table is not indexed
# delete_old_inactive_jobs(JOBS_EXPIRY)
except:
logger.exception("Deleting old cluster status failed")
sleep_with_update(86400, update)
if __name__ == '__main__':
# TODO: This can be made as a separate service to GC DB and orphaned pods
parser = argparse.ArgumentParser()
parser.add_argument("--port",
"-p",
help="port of exporter",
type=int,
default=9209)
args = parser.parse_args()
setup_exporter_thread(args.port)
run()
|
nilq/baby-python
|
python
|
import itertools
from numbers import Number
from graphgallery.utils.type_check import is_iterable
def repeat(src, length):
if src is None:
return [None for _ in range(length)]
if src == [] or src == ():
return []
if isinstance(src, (Number, str)):
return list(itertools.repeat(src, length))
if (len(src) > length):
return src[:length]
if (len(src) < length):
return list(src) + list(itertools.repeat(src[-1], length - len(src)))
return src
def get_length(obj):
if is_iterable(obj):
length = len(obj)
else:
length = 1
return length
|
nilq/baby-python
|
python
|
from __future__ import absolute_import
from __future__ import unicode_literals
import inspect
import logging
LOG = logging.getLogger(__name__)
def is_generator(func):
"""Return True if `func` is a generator function."""
return inspect.isgeneratorfunction(func)
|
nilq/baby-python
|
python
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
# Copyright (C) 2012 by Xose Pérez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__ = "Xose Pérez"
__contact__ = "xose.perez@gmail.com"
__copyright__ = "Copyright (C) 2012-2013 Xose Pérez"
__license__ = 'GPL v3'
import yaml
class Config(object):
"""
Simple YAML configuration parser
"""
config = None
def __init__(self, filename):
"""
Constructor, parses and stores the configuration
"""
handler = file(filename, 'r')
self.config = yaml.load(handler)
handler.close()
def get(self, section, key=None, default=None):
"""
Retrieves a given section/key combination,
if not existent it return a default value
"""
try:
if key is None:
return self.config[section]
else:
return self.config[section][key]
except:
return default
|
nilq/baby-python
|
python
|
def countPattern(genome, pattern):
"""
Find the number of specific pattern in a genome sequence
"""
count = 0
for index in range(0, len(genome)-len(pattern)+1):
if genome[index:index+len(pattern)] == pattern:
count += 1
return count
def findPattern(genome, pattern):
"""
find the indexes of the pattern in a given genome sequence
"""
indexes = []
for index in range(0, len(genome) - len(pattern) + 1):
if genome[index:index + len(pattern)] == pattern:
indexes.append(index)
return indexes
|
nilq/baby-python
|
python
|
import logging
import asyncio
from ..Errors import *
from ..utils import Url
logger = logging.getLogger(__name__)
class Commander:
"""
Manages looping through the group wall and checking for commands or messages
Attributes
-----------
prefix: :class:`str`
The command prefix
"""
async def start_listening(self, client, commands, listening_to):
self.__commands = commands
self.__client = client
self.__listening_to = listening_to
self.__already_seen = []
self.__is_first = True
self.prefix = client.prefix
self.__access = Url("groups", "/v1/groups/%group_id%/wall/posts?limit=10&sortOrder=Desc", group_id=self.__listening_to.id)
await self.start_loop()
async def start_loop(self):
await self.__client._emit("start_listening", (self.__listening_to,))
while True:
await self.__client._emit("check_messages", (self.__listening_to,))
await self.check_messages()
await asyncio.sleep(5)
async def check_messages(self):
hook = await self.__access.get()
for msg in hook.json['data']:
if self.__is_first:
self.__already_seen.append(msg["id"])
if await self.check_entity(msg):
await self.process_new_message(msg)
if self.__is_first:
self.__is_first = False
async def check_entity(self, msg):
if not msg["id"] in self.__already_seen:
self.__already_seen.append(msg["id"])
return True
return False
async def process_new_message(self, msg):
text = msg["body"]
flags = str.split(text, " ")
ctx = await self.generate_context(msg)
await self.__client._emit("message", ctx)
if flags[0].startswith(self.prefix):
flags[0] = flags[0].replace(self.prefix, "")
await self.process_command(flags, ctx)
async def process_command(self, flags, ctx):
function_name = flags.pop(0)
args = tuple(flags)
try:
await self.__client.push_command(function_name, ctx, args)
except TypeError as e:
if await self.__client._emit("error", (ctx, e)):
return
raise BadArguments(
function_name
)
async def generate_context(self, msg):
try:
member = await self.__listening_to.get_member(msg["poster"]["username"])
except:
member = await self.__client.get_user(msg["poster"]["username"])
return Context(member, msg["body"])
class Context:
"""
Context object for message on group wall
.. note::
This objects checks if its `__user_or_member` has a group to determine wether it is a user or not
Attributes
-----------
user: :class:`.BloxUser`
The user that sent this message, may be :class:`None`
member: :class:`.BloxMember`
The member that sent this message, may be :class:`None`
content: :class:`str`
The content of the message sent
"""
def __init__(self, user, ctt):
self.__user_or_member = user
self.content = ctt
@property
def member(self):
if self.__user_or_member.group:
return self.__user_or_member
return None
@property
def user(self):
if not self.__user_or_member.group:
return self.__user_or_member
return None
|
nilq/baby-python
|
python
|
"""Class implementation for the scale_x_from_center interface.
"""
from typing import Dict
from apysc._animation.animation_scale_x_from_center_interface import \
AnimationScaleXFromCenterInterface
from apysc._type.attr_linking_interface import AttrLinkingInterface
from apysc._type.number import Number
from apysc._type.revert_interface import RevertInterface
class ScaleXFromCenterInterface(
AnimationScaleXFromCenterInterface, RevertInterface,
AttrLinkingInterface):
_scale_x_from_center: Number
def _initialize_scale_x_from_center_if_not_initialized(self) -> None:
"""
Initialize the `_scale_x_from_center` attribute if it hasn't been
initialized yet.
"""
import apysc as ap
with ap.DebugInfo(
callable_=self.
_initialize_scale_x_from_center_if_not_initialized,
locals_=locals(),
module_name=__name__, class_=ScaleXFromCenterInterface):
if hasattr(self, '_scale_x_from_center'):
return
self._scale_x_from_center = ap.Number(1.0)
self._append_scale_x_from_center_attr_linking_setting()
def _append_scale_x_from_center_attr_linking_setting(self) -> None:
"""
Append a scale-x attribute linking setting.
"""
import apysc as ap
with ap.DebugInfo(
callable_=self.
_append_scale_x_from_center_attr_linking_setting,
locals_=locals(),
module_name=__name__, class_=ScaleXFromCenterInterface):
self._append_applying_new_attr_val_exp(
new_attr=self._scale_x_from_center,
attr_name='scale_x_from_center')
self._append_attr_to_linking_stack(
attr=self._scale_x_from_center,
attr_name='scale_x_from_center')
@property
def scale_x_from_center(self) -> Number:
"""
Get a scale-x value from the center of this instance.
Returns
-------
scale_x_from_center : ap.Number
Scale-x value from the center of this instance.
References
----------
- GraphicsBase scale_x_from_center and scale_y_from_center interfaces
- https://bit.ly/3ityoCX
Examples
--------
>>> import apysc as ap
>>> stage: ap.Stage = ap.Stage()
>>> sprite: ap.Sprite = ap.Sprite()
>>> sprite.graphics.begin_fill(color='#0af')
>>> rectangle: ap.Rectangle = sprite.graphics.draw_rect(
... x=50, y=50, width=50, height=50)
>>> rectangle.scale_x_from_center = ap.Number(1.5)
>>> rectangle.scale_x_from_center
Number(1.5)
"""
import apysc as ap
with ap.DebugInfo(
callable_='scale_x_from_center', locals_=locals(),
module_name=__name__, class_=ScaleXFromCenterInterface):
from apysc._type import value_util
self._initialize_scale_x_from_center_if_not_initialized()
return value_util.get_copy(value=self._scale_x_from_center)
@scale_x_from_center.setter
def scale_x_from_center(self, value: Number) -> None:
"""
Update a scale-x value from the center of this instance.
Parameters
----------
value : ap.Number
Scale-x value from the center of this instance.
References
----------
- GraphicsBase scale_x_from_center and scale_y_from_center interfaces
- https://bit.ly/3ityoCX
"""
import apysc as ap
with ap.DebugInfo(
callable_='scale_x_from_center', locals_=locals(),
module_name=__name__, class_=ScaleXFromCenterInterface):
from apysc._validation import number_validation
self._initialize_scale_x_from_center_if_not_initialized()
number_validation.validate_num(num=value)
if not isinstance(value, ap.Number):
value = ap.Number(value)
before_value: ap.Number = self._scale_x_from_center
self._scale_x_from_center = value
self._append_scale_x_from_center_update_expression(
before_value=before_value)
self._append_scale_x_from_center_attr_linking_setting()
def _append_scale_x_from_center_update_expression(
self, *, before_value: Number) -> None:
"""
Append the scale-x from the center of this instance
updating expression.
Parameters
----------
before_value : ap.Number
Before updating value.
"""
import apysc as ap
with ap.DebugInfo(
callable_=self._append_scale_x_from_center_update_expression,
locals_=locals(),
module_name=__name__, class_=ScaleXFromCenterInterface):
from apysc._type import value_util
before_value_str: str = value_util.get_value_str_for_expression(
value=before_value)
after_value_str: str = value_util.get_value_str_for_expression(
value=self._scale_x_from_center)
expression: str = (
f'{self.variable_name}.scale(1 / {before_value_str}, 1);'
f'\n{self.variable_name}.scale({after_value_str}, 1);'
f'\n{before_value_str} = {after_value_str};'
)
ap.append_js_expression(expression=expression)
_scale_x_from_center_snapshots: Dict[str, float]
def _make_snapshot(self, *, snapshot_name: str) -> None:
"""
Make a value's snapshot.
Parameters
----------
snapshot_name : str
Target snapshot name.
"""
self._initialize_scale_x_from_center_if_not_initialized()
self._set_single_snapshot_val_to_dict(
dict_name='_scale_x_from_center_snapshots',
value=self._scale_x_from_center._value,
snapshot_name=snapshot_name)
def _revert(self, *, snapshot_name: str) -> None:
"""
Revert a value if snapshot exists.
Parameters
----------
snapshot_name : str
Target snapshot name.
"""
if not self._snapshot_exists(snapshot_name=snapshot_name):
return
self._scale_x_from_center._value = \
self._scale_x_from_center_snapshots[snapshot_name]
|
nilq/baby-python
|
python
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from textwrap import dedent
import pytest
from pants.backend.shell.lint.shfmt.rules import ShfmtFieldSet, ShfmtRequest
from pants.backend.shell.lint.shfmt.rules import rules as shfmt_rules
from pants.backend.shell.target_types import ShellSourcesGeneratorTarget
from pants.backend.shell.target_types import rules as target_types_rules
from pants.core.goals.fmt import FmtResult
from pants.core.goals.lint import LintResult, LintResults
from pants.core.util_rules import config_files, external_tool, source_files
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.addresses import Address
from pants.engine.fs import CreateDigest, Digest, FileContent
from pants.engine.target import Target
from pants.testutil.rule_runner import QueryRule, RuleRunner
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*shfmt_rules(),
*config_files.rules(),
*external_tool.rules(),
*source_files.rules(),
*target_types_rules(),
QueryRule(LintResults, [ShfmtRequest]),
QueryRule(FmtResult, [ShfmtRequest]),
QueryRule(SourceFiles, [SourceFilesRequest]),
],
target_types=[ShellSourcesGeneratorTarget],
)
GOOD_FILE = "! foo bar >a &\n"
BAD_FILE = "! foo bar >a &\n"
# If config is loaded correctly, shfmt will indent the case statements.
NEEDS_CONFIG_FILE = dedent(
"""\
case foo in
PATTERN_1)
\tbar
\t;;
*)
\tbaz
\t;;
esac
"""
)
FIXED_NEEDS_CONFIG_FILE = dedent(
"""\
case foo in
\tPATTERN_1)
\t\tbar
\t\t;;
\t*)
\t\tbaz
\t\t;;
esac
"""
)
def run_shfmt(
rule_runner: RuleRunner,
targets: list[Target],
*,
extra_args: list[str] | None = None,
) -> tuple[tuple[LintResult, ...], FmtResult]:
rule_runner.set_options(
["--backend-packages=pants.backend.shell.lint.shfmt", *(extra_args or ())],
env_inherit={"PATH"},
)
field_sets = [ShfmtFieldSet.create(tgt) for tgt in targets]
lint_results = rule_runner.request(LintResults, [ShfmtRequest(field_sets)])
input_sources = rule_runner.request(
SourceFiles,
[
SourceFilesRequest(field_set.sources for field_set in field_sets),
],
)
fmt_result = rule_runner.request(
FmtResult,
[
ShfmtRequest(field_sets, prior_formatter_result=input_sources.snapshot),
],
)
return lint_results.results, fmt_result
def get_digest(rule_runner: RuleRunner, source_files: dict[str, str]) -> Digest:
files = [FileContent(path, content.encode()) for path, content in source_files.items()]
return rule_runner.request(Digest, [CreateDigest(files)])
def test_passing(rule_runner: RuleRunner) -> None:
rule_runner.write_files({"f.sh": GOOD_FILE, "BUILD": "shell_sources(name='t')"})
tgt = rule_runner.get_target(Address("", target_name="t", relative_file_path="f.sh"))
lint_results, fmt_result = run_shfmt(rule_runner, [tgt])
assert len(lint_results) == 1
assert lint_results[0].exit_code == 0
assert lint_results[0].stderr == ""
assert fmt_result.stdout == ""
assert fmt_result.output == get_digest(rule_runner, {"f.sh": GOOD_FILE})
assert fmt_result.did_change is False
def test_failing(rule_runner: RuleRunner) -> None:
rule_runner.write_files({"f.sh": BAD_FILE, "BUILD": "shell_sources(name='t')"})
tgt = rule_runner.get_target(Address("", target_name="t", relative_file_path="f.sh"))
lint_results, fmt_result = run_shfmt(rule_runner, [tgt])
assert len(lint_results) == 1
assert lint_results[0].exit_code == 1
assert "f.sh.orig" in lint_results[0].stdout
assert fmt_result.stdout == "f.sh\n"
assert fmt_result.output == get_digest(rule_runner, {"f.sh": GOOD_FILE})
assert fmt_result.did_change is True
def test_multiple_targets(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{"good.sh": GOOD_FILE, "bad.sh": BAD_FILE, "BUILD": "shell_sources(name='t')"}
)
tgts = [
rule_runner.get_target(Address("", target_name="t", relative_file_path="good.sh")),
rule_runner.get_target(Address("", target_name="t", relative_file_path="bad.sh")),
]
lint_results, fmt_result = run_shfmt(rule_runner, tgts)
assert len(lint_results) == 1
assert lint_results[0].exit_code == 1
assert "bad.sh.orig" in lint_results[0].stdout
assert "good.sh" not in lint_results[0].stdout
assert "bad.sh\n" == fmt_result.stdout
assert fmt_result.output == get_digest(rule_runner, {"good.sh": GOOD_FILE, "bad.sh": GOOD_FILE})
assert fmt_result.did_change is True
def test_config_files(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"a/f.sh": NEEDS_CONFIG_FILE,
"a/BUILD": "shell_sources()",
"a/.editorconfig": "[*.sh]\nswitch_case_indent = true\n",
"b/f.sh": NEEDS_CONFIG_FILE,
"b/BUILD": "shell_sources()",
}
)
tgts = [
rule_runner.get_target(Address("a", relative_file_path="f.sh")),
rule_runner.get_target(Address("b", relative_file_path="f.sh")),
]
lint_results, fmt_result = run_shfmt(rule_runner, tgts)
assert len(lint_results) == 1
assert lint_results[0].exit_code == 1
assert "a/f.sh.orig" in lint_results[0].stdout
assert "b/f.sh.orig" not in lint_results[0].stdout
assert fmt_result.stdout == "a/f.sh\n"
assert fmt_result.output == get_digest(
rule_runner, {"a/f.sh": FIXED_NEEDS_CONFIG_FILE, "b/f.sh": NEEDS_CONFIG_FILE}
)
assert fmt_result.did_change is True
def test_passthrough_args(rule_runner: RuleRunner) -> None:
rule_runner.write_files({"f.sh": NEEDS_CONFIG_FILE, "BUILD": "shell_sources(name='t')"})
tgt = rule_runner.get_target(Address("", target_name="t", relative_file_path="f.sh"))
lint_results, fmt_result = run_shfmt(rule_runner, [tgt], extra_args=["--shfmt-args=-ci"])
assert len(lint_results) == 1
assert lint_results[0].exit_code == 1
assert "f.sh.orig" in lint_results[0].stdout
assert fmt_result.stdout == "f.sh\n"
assert fmt_result.output == get_digest(rule_runner, {"f.sh": FIXED_NEEDS_CONFIG_FILE})
assert fmt_result.did_change is True
def test_skip(rule_runner: RuleRunner) -> None:
rule_runner.write_files({"f.sh": BAD_FILE, "BUILD": "shell_sources(name='t')"})
tgt = rule_runner.get_target(Address("", target_name="t", relative_file_path="f.sh"))
lint_results, fmt_result = run_shfmt(rule_runner, [tgt], extra_args=["--shfmt-skip"])
assert not lint_results
assert fmt_result.skipped is True
assert fmt_result.did_change is False
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
__author__ = "Yaroslav Litvinov"
__copyright__ = "Copyright 2016, Rackspace Inc."
__email__ = "yaroslav.litvinov@rackspace.com"
from os import system
import psycopg2
import argparse
import configparser
from pymongo import DESCENDING
from collections import namedtuple
from datetime import datetime
from mongo_reader.reader import mongo_reader_from_settings
from gizer.psql_requests import PsqlRequests
from gizer.psql_requests import psql_conn_from_settings
from gizer.all_schema_engines import get_schema_engines_as_dict
from gizer.etlstatus_table import PsqlEtlStatusTable
from gizer.etlstatus_table import PsqlEtlStatusTableManager
from gizer.etlstatus_table import STATUS_INITIAL_LOAD
from gizer.etlstatus_table import STATUS_OPLOG_SYNC
from gizer.etlstatus_table import STATUS_OPLOG_APPLY
from gizer.etlstatus_table import STATUS_OPLOG_RESYNC
from gizer.opconfig import psql_settings_from_config
from gizer.opconfig import load_mongo_replicas_from_setting
def getargs():
""" get args from cmdline """
default_request = '{}'
parser = argparse.ArgumentParser()
parser.add_argument("-psql-schema-name", help="", type=str)
parser.add_argument("-psql-table-name-prefix", help="", type=str)
args = parser.parse_args()
return args
def main():
""" main """
parser = argparse.ArgumentParser()
parser.add_argument("--config-file", action="store",
help="Config file with settings",
type=file, required=True)
parser.add_argument("-init-load-status", action="store_true",
help="will get exit status=0 if init load not needed,\
or status=-1 if otherwise; Also print 1 - if in progress, 0 - if not.")
parser.add_argument("-init-load-start-save-ts", action="store_true",
help='Save latest oplog timestamp to psql etlstatus table')
parser.add_argument("-init-load-finish",
help='values are: "ok" or "error"', type=str)
args = parser.parse_args()
config = configparser.ConfigParser()
config.read_file(args.config_file)
psql_settings = psql_settings_from_config(config, 'psql')
psql_main = PsqlRequests(psql_conn_from_settings(psql_settings))
oplog_settings = load_mongo_replicas_from_setting(config, 'mongo-oplog')
status_table = PsqlEtlStatusTable(psql_main.cursor,
config['psql']['psql-schema-name'],
sorted(oplog_settings.keys()))
res = 0
if args.init_load_status:
status = status_table.get_recent()
if status:
if (status.status == STATUS_OPLOG_SYNC or \
status.status == STATUS_OPLOG_APPLY or \
status.status == STATUS_INITIAL_LOAD or \
status.status == STATUS_OPLOG_RESYNC) and not status.error:
delta = datetime.now() - status.time_start
# if operation is running to long
if status.time_end:
res = 0
elif delta.total_seconds() < 32400: # < 9 hours
res = 0
if not status.time_end:
print 1 # means etl in progress
else:
print 0 # means not etl in progress
else:
# takes to much time -> do init load
res = -1
else:
# error status -> do init load
res = -1
else:
# empty status table -> do init load
res = -1
elif args.init_load_start_save_ts:
# create oplog read transport/s to acquire ts
max_ts_dict = {}
for oplog_name, settings_list in oplog_settings.iteritems():
print 'Fetch timestamp from oplog: %s' % oplog_name
# settings list is a replica set (must be at least one in list)
reader = mongo_reader_from_settings(settings_list, 'oplog.rs', {})
reader.make_new_request({})
reader.cursor.sort('ts', DESCENDING)
reader.cursor.limit(1)
timestamp = reader.next()
if timestamp:
max_ts_dict[oplog_name] = timestamp['ts']
else:
max_ts_dict[oplog_name] = None
print 'Initload ts: %s, oplog: %s' % (max_ts_dict[oplog_name],
oplog_name)
status_manager = PsqlEtlStatusTableManager(status_table)
status_manager.init_load_start(max_ts_dict)
elif args.init_load_finish:
status_manager = PsqlEtlStatusTableManager(status_table)
if args.init_load_finish == "ok":
status_manager.init_load_finish(False) # ok
else:
status_manager.init_load_finish(True) # error
return res
if __name__ == "__main__":
exit(main())
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from django.urls import path,re_path
from . import views
app_name = 'ajunivel'
urlpatterns = [
path('', views.index, name='index'),
path('index.html', views.index, name='index'),
path('index', views.index, name='index'),
path('menu', views.menu, name='menu'),
]
|
nilq/baby-python
|
python
|
import os
import socket
from pathlib import Path
class Config(object):
"""
Basic configuration, like socket default timeout, headers
"""
def __init__(self):
super(Config, self).__init__()
self.socket_timeout = 20
# set socket layer timeout as 20s
socket.setdefaulttimeout(self.socket_timeout)
# self.headers = {'User-Agent': 'Mozilla/5.0'}
self.url = "http://www.tianqihoubao.com/aqi/"
self.headers = {'user-agent': 'my-app/0.0.1'}
self.folder_json = self.makedirs('json')
self.folder_csv = self.makedirs('csv')
self.log_path = self.makedirs('logging')
self.timeout = 500
self.max_retries = 30
def makedirs(self, path):
path = Path.cwd().parent.joinpath(path)
if not path.exists():
os.makedirs(path)
return path
|
nilq/baby-python
|
python
|
from dataclasses import dataclass
@dataclass
class ModelException:
pass
|
nilq/baby-python
|
python
|
import numpy as np
from torchmeta.utils.data import Task, MetaDataset
class Relu(MetaDataset):
"""
Parameters
----------
num_samples_per_task : int
Number of examples per task.
num_tasks : int (default: 2)
Overall number of tasks to sample.
noise_std : float, optional
Amount of noise to include in the targets for each task. If `None`, then
nos noise is included, and the target is either a sine function, or a
linear function of the input.
transform : callable, optional
A function/transform that takes a numpy array of size (1,) and returns a
transformed version of the input.
target_transform : callable, optional
A function/transform that takes a numpy array of size (1,) and returns a
transformed version of the target.
dataset_transform : callable, optional
A function/transform that takes a dataset (ie. a task), and returns a
transformed version of it. E.g. `torchmeta.transforms.ClassSplitter()`.
"""
def __init__(self, num_samples_per_task, num_tasks=2,
noise_std=None, transform=None, target_transform=None,
dataset_transform=None, seed=None):
super(Relu, self).__init__(meta_split='train',
target_transform=target_transform, dataset_transform=dataset_transform,
seed=seed)
self.num_samples_per_task = num_samples_per_task
self.num_tasks = num_tasks
self.noise_std = noise_std
self.transform = transform
self._input_range = np.array([-5.0, 5.0])
self._signs = None
@property
def signs(self):
if self._signs is None:
self._signs = np.ones((self.num_tasks,), dtype=np.int)
self._signs[self.num_tasks // 2:] = -1
self.np_random.shuffle(self._signs)
return self._signs
def __len__(self):
return self.num_tasks
def __getitem__(self, index):
task = ReluTask(index, self.signs[index], self._input_range,
self.noise_std, self.num_samples_per_task, self.transform,
self.target_transform, np_random=self.np_random)
if self.dataset_transform is not None:
task = self.dataset_transform(task)
return task
class ReluTask(Task):
def __init__(self, index, sign, input_range, noise_std,
num_samples, transform=None, target_transform=None,
np_random=None):
super(ReluTask, self).__init__(index, None) # Regression task
self.sign = sign
self.input_range = input_range
self.num_samples = num_samples
self.noise_std = noise_std
self.transform = transform
self.target_transform = target_transform
if np_random is None:
np_random = np.random.RandomState(None)
self._inputs = np_random.uniform(input_range[0], input_range[1],
size=(num_samples, 1))
self._targets = sign * np.maximum(self._inputs, 0)
if (noise_std is not None) and (noise_std > 0.):
self._targets += noise_std * np_random.randn(num_samples, 1)
def __len__(self):
return self.num_samples
def __getitem__(self, index):
input, target = self._inputs[index], self._targets[index]
if self.transform is not None:
input = self.transform(input)
if self.target_transform is not None:
target = self.target_transform(target)
return (input, target)
|
nilq/baby-python
|
python
|
# flake8: noqa
CLOUDWATCH_EMF_SCHEMA = {
"properties": {
"_aws": {
"$id": "#/properties/_aws",
"properties": {
"CloudWatchMetrics": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics",
"items": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items",
"properties": {
"Dimensions": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions",
"items": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions/items",
"items": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions/items/items",
"examples": ["Operation"],
"minItems": 1,
"pattern": "^(.*)$",
"title": "DimensionReference",
"type": "string",
},
"maxItems": 9,
"minItems": 1,
"title": "DimensionSet",
"type": "array",
},
"minItems": 1,
"title": "The " "Dimensions " "Schema",
"type": "array",
},
"Metrics": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics",
"items": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items",
"minItems": 1,
"properties": {
"Name": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items/properties/Name",
"examples": ["ProcessingLatency"],
"minLength": 1,
"pattern": "^(.*)$",
"title": "MetricName",
"type": "string",
},
"Unit": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items/properties/Unit",
"examples": ["Milliseconds"],
"pattern": "^(Seconds|Microseconds|Milliseconds|Bytes|Kilobytes|Megabytes|Gigabytes|Terabytes|Bits|Kilobits|Megabits|Gigabits|Terabits|Percent|Count|Bytes\\/Second|Kilobytes\\/Second|Megabytes\\/Second|Gigabytes\\/Second|Terabytes\\/Second|Bits\\/Second|Kilobits\\/Second|Megabits\\/Second|Gigabits\\/Second|Terabits\\/Second|Count\\/Second|None)$",
"title": "MetricUnit",
"type": "string",
},
},
"required": ["Name"],
"title": "MetricDefinition",
"type": "object",
},
"minItems": 1,
"title": "MetricDefinitions",
"type": "array",
},
"Namespace": {
"$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Namespace",
"examples": ["MyApp"],
"minLength": 1,
"pattern": "^(.*)$",
"title": "CloudWatch " "Metrics " "Namespace",
"type": "string",
},
},
"required": ["Namespace", "Dimensions", "Metrics"],
"title": "MetricDirective",
"type": "object",
},
"title": "MetricDirectives",
"type": "array",
},
"Timestamp": {
"$id": "#/properties/_aws/properties/Timestamp",
"examples": [1565375354953],
"title": "The Timestamp " "Schema",
"type": "integer",
},
},
"required": ["Timestamp", "CloudWatchMetrics"],
"title": "Metadata",
"type": "object",
}
},
"required": ["_aws"],
"title": "Root Node",
"type": "object",
}
|
nilq/baby-python
|
python
|
import torch
from torchvision import transforms, datasets
import numpy as np
from PIL import Image
from skimage.color import rgb2lab, rgb2gray, lab2rgb
def count_params(model):
'''
returns the number of trainable parameters in some model
'''
return sum(p.numel() for p in model.parameters() if p.requires_grad)
class GrayscaleImageFolder(datasets.ImageFolder):
'''
Custom dataloader for various operations on images before loading them.
'''
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img_orig = self.transform(img) # apply transforms
img_orig = np.asarray(img_orig) # convert to numpy array
img_lab = rgb2lab(img_orig) # convert RGB image to LAB
img_ab = img_lab[:, :, 1:3] # separate AB channels from LAB
img_ab = (img_ab + 128) / 255 # normalize the pixel values
# transpose image from HxWxC to CxHxW and turn it into a tensor
img_ab = torch.from_numpy(img_ab.transpose((2, 0, 1))).float()
img_orig = rgb2gray(img_orig) # convert RGB to grayscale
# add a channel axis to grascale image and turn it into a tensor
img_orig = torch.from_numpy(img_orig).unsqueeze(0).float()
if self.target_transform is not None:
target = self.target_transform(target)
return img_orig, img_ab, target
def load_gray(path, max_size=360, shape=None):
'''
load an image as grayscale, change the shape as per input,
perform transformations and convert it to model compatable shape.
'''
img_gray = Image.open(path).convert('L')
if max(img_gray.size) > max_size:
size = max_size
else:
size = max(img_gray.size)
if shape is not None:
size = shape
img_transform = transforms.Compose([
transforms.Resize(size),
transforms.ToTensor()
])
img_gray = img_transform(img_gray).unsqueeze(0)
return img_gray
def to_rgb(img_l, img_ab):
'''
concatinates Lightness (grayscale) and AB channels,
and converts the resulting LAB image to RGB
'''
if img_l.shape == img_ab.shape:
img_lab = torch.cat((img_l, img_ab), 1).numpy().squeeze()
else:
img_lab = torch.cat(
(img_l, img_ab[:, :, :img_l.size(2), :img_l.size(3)]),
dim=1
).numpy().squeeze()
img_lab = img_lab.transpose(1, 2, 0) # transpose image to HxWxC
img_lab[:, :, 0] = img_lab[:, :, 0] * 100 # range pixel values from 0-100
img_lab[:, :, 1:] = img_lab[:, :, 1:] * 255 - 128 # un-normalize
img_rgb = lab2rgb(img_lab.astype(np.float64)) # convert LAB image to RGB
return img_rgb
|
nilq/baby-python
|
python
|
from datetime import date
ano = int (input ('Digite o ano de nascimento: '))
idade = date.today().year - ano
if idade <= 9:
print ('Sua idade {}, Até 9 anos: Mirim'.format(idade))
elif idade > 9 and idade <= 14:
print ('Sua idade {}, Até 14 anos: Infantil'.format(idade))
elif idade > 14 and idade <= 19:
print ('Sua idade {}, Até 19 anos: Junior'.format(idade))
elif idade == 20:
print ('Sua idade {}, Até 20 anos: Sênior'.format(idade))
else:
print ('Sua idade {}, Acima de 20 anos: Master'.format(idade))
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
"""
from flask import flash, redirect, url_for, render_template, request
from sayhello import app, db
from sayhello.forms import HelloForm
from sayhello.models import Message
@app.route('/', methods=['GET', 'POST'])
def index():
"""
# TODO 分页BUG未解决
"""
form = HelloForm()
if form.validate_on_submit():
name = form.name.data
body = form.body.data
message = Message(body=body, name=name)
db.session.add(message)
db.session.commit()
flash('添加成功')
return redirect(url_for('index'))
messages = Message.query.order_by(Message.timestamp.desc()).all()
total_page = divmod(len(messages),10)[0]+1 if divmod(len(messages),10)[1] else divmod(len(messages),10)[0]
page_num = request.args.get('page_num') and int(request.args.get('page_num'))
li_list = []
if not page_num:
start_page = 1
end_page = 5
page_num = 0
else:
start_page = int(request.args.get('start_page'))
end_page = int(request.args.get('end_page'))
mid_page = (int(start_page) + int(end_page)) // 2
offset_page = page_num - mid_page
if offset_page > 0:
start_page += offset_page
end_page += offset_page
if end_page > total_page:
end_page = total_page
for i in range(start_page, end_page+1):
standard_li = '<li><a href="/?page_num={0}&start_page={1}&end_page={2}">{0}</a></li>'.format(i,start_page,end_page)
li_list.append(standard_li)
page_block = "".join(li_list)
messages = Message.query.order_by(Message.timestamp.desc()).offset(page_num).limit(100000).all()
return render_template('index.html', form=form, messages=messages,page_block=page_block)
|
nilq/baby-python
|
python
|
from aws_google_auth import exit_if_unsupported_python
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import unittest
import sys
import mock
class TestPythonFailOnVersion(unittest.TestCase):
@mock.patch('sys.stdout', new_callable=StringIO)
def test_python26(self, mock_stdout):
with mock.patch.object(sys, 'version_info') as v_info:
v_info.major = 2
v_info.minor = 6
with self.assertRaises(SystemExit):
exit_if_unsupported_python()
self.assertIn("aws-google-auth requires Python 2.7 or higher.", mock_stdout.getvalue())
def test_python27(self):
with mock.patch.object(sys, 'version_info') as v_info:
v_info.major = 2
v_info.minor = 7
try:
exit_if_unsupported_python()
except SystemExit:
self.fail("exit_if_unsupported_python() raised SystemExit unexpectedly!")
def test_python30(self):
with mock.patch.object(sys, 'version_info') as v_info:
v_info.major = 3
v_info.minor = 0
try:
exit_if_unsupported_python()
except SystemExit:
self.fail("exit_if_unsupported_python() raised SystemExit unexpectedly!")
|
nilq/baby-python
|
python
|
import json
from django.contrib.auth import login, logout, authenticate
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.http import JsonResponse
from django.shortcuts import render, redirect
import re
import logging
from apps.goods.models import SKU
logger = logging.getLogger('django')
# Create your views here.
from django.urls import reverse
from django.views import View
from django_redis import get_redis_connection
from apps.areas.models import Area
from apps.users.models import User, Address
from apps.users.utils import check_active_token, generic_access_token_url
from utils.response_code import RETCODE
class RegisterView(View):
def get(self,request):
return render(request,'register.html')
def post(self,request):
data = request.POST
username = data.get('username')
password=data.get('password')
password2=data.get('password2')
mobile=data.get('mobile')
sms_code = data.get('sms_code')
if not all([username,password,password2,mobile]):
return HttpResponseBadRequest('参数不全')
if not re.match(r'[a-zA-Z0-9]{5,20}',username):
return HttpResponseBadRequest('用户名不满足条件')
if not re.match(r'[a-zA-Z0-9]{8,20}',password):
return HttpResponseBadRequest('密码不符合规则')
if password2 != password:
return HttpResponseBadRequest('密码不一致')
if not re.match(r'^1[3-9]\d{9}$',mobile):
return HttpResponseBadRequest('手机号错误')
redis_conn = get_redis_connection('code')
smskey = 'sms_%s'%mobile
a = redis_conn.get(smskey)
print(a)
print(type(a))
print(sms_code)
if redis_conn.get(smskey).decode() != sms_code:
return HttpResponseBadRequest('验证码错误')
# ③ 保存数据
user = User.objects.create_user(username=username,
password=password,
mobile=mobile)
login(request, user)
return redirect(reverse('contents:index'))
class isUnique(View):
def get(self, request, username):
count = User.objects.filter(username=username).count()
return JsonResponse({'count': count,'username':username})
class MobileUnique(View):
def get(self,request,mobile):
print(mobile)
count = User.objects.filter(mobile=mobile).count()
return JsonResponse({'count':count,'mobile':mobile})
class LoginView(View):
def get(self,request):
return render(request,'login.html')
# 1.相应状态码可以帮助我们分析问题
# 2.面试会问
# 405 Method Not Allowed 没有实现对应的请求方法
def post(self,request):
# ① 接收数据
username=request.POST.get('username')
password=request.POST.get('pwd')
remembered = request.POST.get('remembered')
# ② 验证数据 (参数是否齐全,是否符合规则)
if not all([username,password]):
return HttpResponseBadRequest('参数不全')
# 用户名,密码是否符合正则,此处省略
# ③ 再判断用户名和密码是否匹配一致
from django.contrib.auth import authenticate
# 认证成功返回User对象
# 认证失败返回None
from django.contrib.auth.backends import ModelBackend
user = authenticate(username=username,password=password)
if user is None:
return HttpResponseBadRequest('用户名或密码错误')
# ④ 状态保持
login(request,user)
# ⑤ 记住登陆
if remembered == 'on':
#记住登陆 2周
request.session.set_expiry(None)
else:
#不记住登陆
request.session.set_expiry(0)
# ⑥ 返回相应
response = redirect(reverse('contents:index'))
#设置cookie
response.set_cookie('username',user.username,max_age=3600*24*14)
from apps.carts.utils import merge_cookie_to_redis
response=merge_cookie_to_redis(request,user,response)
return response
class LogoutView(View):
def get(self,request):
logout(request)
response = redirect(reverse('contents:index'))
response.delete_cookie('username')
return response
class UserCenterInfoView(LoginRequiredMixin,View):
def get(self,request):
context = {
'username':request.user.username,
'mobile':request.user.mobile,
'email':request.user.email,
'email_active':request.user.email_active,
}
return render(request,'user_center_info.html',context=context)
class EmailView(View):
def put(self,request):
data = json.loads(request.body.decode())
email = data.get('email')
if not re.match(r'^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$',email):
return JsonResponse({{'code':RETCODE.PARAMERR,'errmsg':'邮箱不符合规则'}})
request.user.email = email
request.user.save()
# from django.core.mail import send_mail
#
# #subject, message, from_email, recipient_list,
# #subject 主题
# subject='美多商场激活邮件'
# #message, 内容
# message=''
# #from_email, 谁发的
# from_email = '欢乐玩家<qi_rui_hua@163.com>'
# #recipient_list, 收件人列表
# recipient_list = ['chen570386336@163.com']
#
# html_mesage="<a href='http://www.huyouni.com'>戳我有惊喜</a>"
#
# send_mail(subject=subject,
# message=message,
# from_email=from_email,
# recipient_list=recipient_list,
# html_message=html_mesage)
from celery_tasks.email.tasks import send_active_email
send_active_email.delay(request.user.id, email)
print(email)
# ⑤ 返回相应
return JsonResponse({'code':RETCODE.OK,'errmsg':'ok'})
class EmailActiveView(View):
def get(self,request):
token = request.GET.get('token')
if token is None:
return HttpResponseBadRequest('缺少参数')
data = check_active_token(token)
if data == None:
return HttpResponseBadRequest('验证失败')
id = data.get('id')
email = data.get('email')
try:
user = User.objects.get(id=id,email=email)
except User.DoesNotExist:
return HttpResponseBadRequest('验证失败')
user.email_active = True
user.save()
return redirect(reverse('users:center'))
class UserCenterSiteView(View):
def get(self,request):
user=request.user
addresses = Address.objects.filter(user=user, is_deleted=False)
address_dict_list = []
for address in addresses:
address_dict = {
"id": address.id,
"title": address.title,
"receiver": address.receiver,
"province": address.province.name,
"province_id":address.province_id,
"city": address.city.name,
"city_id":address.city_id,
"district": address.district.name,
"district_id":address.district_id,
"place": address.place,
"mobile": address.mobile,
"tel": address.tel,
"email": address.email
}
address_dict_list.append(address_dict)
context = {
'default_address_id':user.default_address_id,
'addresses':address_dict_list
}
return render(request,'user_center_site.html',context=context)
class CreateView(View):
def post(self,request):
count = Address.objects.filter(user=request.user,is_deleted=False).count()
if count >= 20:
return JsonResponse({'code': RETCODE.THROTTLINGERR, 'errmsg': '超过地址数量上限'})
data = json.loads(request.body.decode())
receiver=data.get('receiver')
province_id=data.get('province_id')
city_id=data.get('city_id')
district_id=data.get('district_id')
place=data.get('place')
mobile=data.get('mobile')
tel=data.get('tel')
email=data.get('email')
if not all([receiver,province_id,city_id,district_id,place,mobile]):
return HttpResponseBadRequest('参数不全')
if not re.match(r'^1[3-9]\d{9}$', mobile):
return HttpResponseBadRequest('电话号码输入有误')
if tel:
if not re.match(r'^(0[0-9]{2,3}-)?([2-9][0-9]{6,7})+(-[0-9]{1,4})?$', tel):
return HttpResponseBadRequest('参数tel有误')
if email:
if not re.match(r'^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$', email):
return HttpResponseBadRequest('参数email有误')
try:
ads = Address.objects.create(user=request.user,
title=receiver,
receiver=receiver,
province_id=province_id,
city_id=city_id,
district_id=district_id,
place=place,
mobile=mobile,
tel=tel,
email=email)
except Exception as e:
logger.error(e)
return HttpResponseBadRequest('保存失败')
address = {
"receiver": ads.receiver,
"province": ads.province.name,
"city": ads.city.name,
"district": ads.district.name,
"place": ads.place,
"mobile": ads.mobile,
"tel": ads.tel,
"email": ads.email,
"id": ads.id,
"title": ads.title,
}
return JsonResponse({'code': RETCODE.OK, 'errmsg': '新增地址成功', 'address': address})
class DefaultView(View):
def put(self,request,address_id):
try:
default_address = Address.objects.get(id=address_id)
request.user.default_address = default_address
request.user.save()
except Exception as e:
logger.error(e)
return HttpResponseBadRequest('出错')
return JsonResponse({'code':RETCODE.OK,'errmsg': '设置成功'})
class UpdateView(View):
def put(self,request,address_id):
data = json.loads(request.body.decode())
receiver=data.get('receiver')
province_id=data.get('province_id')
city_id=data.get('city_id')
district_id=data.get('district_id')
place=data.get('place')
mobile=data.get('mobile')
tel=data.get('tel')
email=data.get('email')
if not all([receiver,province_id,city_id,district_id,place,mobile]):
return HttpResponseBadRequest('参数不全')
if not re.match(r'^1[3-9]\d{9}$', mobile):
return HttpResponseBadRequest('电话号码输入有误')
if tel:
if not re.match(r'^(0[0-9]{2,3}-)?([2-9][0-9]{6,7})+(-[0-9]{1,4})?$', tel):
return HttpResponseBadRequest('参数tel有误')
if email:
if not re.match(r'^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$', email):
return HttpResponseBadRequest('参数email有误')
try:
update_address = Address.objects.filter(id=address_id)
update_address.update(
user=request.user,
title=receiver,
receiver=receiver,
province_id=province_id,
city_id=city_id,
district_id=district_id,
place=place,
mobile=mobile,
tel=tel,
email=email,
)
except Exception as e:
logger.error(e)
return HttpResponseBadRequest('更新失败')
update_address = Address.objects.get(id=address_id)
address_dict = {
"id": update_address.id,
"title": update_address.title,
"receiver": update_address.receiver,
"province": update_address.province.name,
"city": update_address.city.name,
"district": update_address.district.name,
"place": update_address.place,
"mobile": update_address.mobile,
"tel": update_address.tel,
"email": update_address.email
}
return JsonResponse({'code': RETCODE.OK, 'errmsg': '更新地址成功', 'address': address_dict})
def delete(self,request,address_id):
try:
delete_address = Address.objects.filter(id=address_id)
delete_address.update(is_deleted=True)
except Exception as e:
logger.error(e)
return HttpResponseBadRequest('删除失败')
return JsonResponse({'code': RETCODE.OK, 'errmsg': '删除地址成功'})
class UpdateTitleView(View):
def put(self,request,address_id):
data = json.loads(request.body.decode())
title = data.get('title')
try:
update_title_address = Address.objects.filter(id=address_id)
update_title_address.update(title=title)
except Exception as e:
logger.error(e)
return HttpResponseBadRequest('修改标题失败')
return JsonResponse({'code': RETCODE.OK, 'errmsg': '设置地址标题成功'})
class ChangePassword(View):
def get(self,request):
return render(request,'user_center_pass.html')
def post(self, request):
"""实现修改密码逻辑"""
# 1.接收参数
old_password = request.POST.get('old_password')
new_password = request.POST.get('new_password')
new_password2 = request.POST.get('new_password2')
# 2.验证参数
if not all([old_password, new_password, new_password2]):
return HttpResponseBadRequest('缺少必传参数')
if not re.match(r'^[0-9A-Za-z]{8,20}$', new_password):
return HttpResponseBadRequest('密码最少8位,最长20位')
if new_password != new_password2:
return HttpResponseBadRequest('两次输入的密码不一致')
# 3.检验旧密码是否正确
if not request.user.check_password(old_password):
return render(request, 'user_center_pass.html', {'origin_password_errmsg': '原始密码错误'})
# 4.更新新密码
try:
request.user.set_password(new_password)
request.user.save()
except Exception as e:
logger.error(e)
return render(request, 'user_center_pass.html', {'change_password_errmsg': '修改密码失败'})
# 5.退出登陆,删除登陆信息
logout(request)
# 6.跳转到登陆页面
response = redirect(reverse('users:login'))
response.delete_cookie('username')
return response
class UserHistoryView(LoginRequiredMixin,View):
def post(self,request):
user = request.user
data = json.loads(request.body.decode())
sku_id = data.get('sku_id')
try:
sku = SKU.objects.get(id=sku_id)
except SKU.DoesNotExist:
return JsonResponse({'code':RETCODE.NODATAERR,'errmsg':'没有此商品'})
redis_conn = get_redis_connection('history')
pipeline = redis_conn.pipeline()
pipeline.lrem('history_%s'%user.id,0,sku_id)
pipeline.lpush('history_%s' % user.id, sku_id)
pipeline.ltrim('history_%s'%user.id,0,4)
pipeline.execute()
return JsonResponse({'code':RETCODE.OK,'errmsg':'ok'})
def get(self, request):
"""获取用户浏览记录"""
# 获取Redis存储的sku_id列表信息
redis_conn = get_redis_connection('history')
sku_ids = redis_conn.lrange('history_%s' % request.user.id, 0, -1)
# 根据sku_ids列表数据,查询出商品sku信息
skus = []
for sku_id in sku_ids:
sku = SKU.objects.get(id=sku_id)
skus.append({
'id': sku.id,
'name': sku.name,
'default_image_url': sku.default_image.url,
'price': sku.price
})
return JsonResponse({'code': RETCODE.OK, 'errmsg': 'OK', 'skus': skus})
class FindPasswordView(View):
def get(self,request):
return render(request,'find_password.html')
class Form_1_On_Submit(View):
def get(self, request, username, user=None):
data = request.GET
text = data.get('text')
image_code_id = data.get('image_code_id')
if not all([text]):
return HttpResponseBadRequest('参数不全')
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return HttpResponseBadRequest('用户不存在')
redis_conn = get_redis_connection('code')
check_code = redis_conn.get('img_%s'%image_code_id).decode()
if check_code.lower() != text.lower():
return HttpResponseBadRequest('图片验证码错误')
mobile = user.mobile
access_token = generic_access_token_url(user.username,user.mobile)
return JsonResponse({'mobile':mobile,'access_token':access_token})
class Form_2_On_Submit(View):
def get(self,request,username):
sms_code = request.GET.get('sms_code')
try:
user = User.objects.get(username=username)
mobile = user.mobile
except User.DoesNotExist:
return HttpResponseBadRequest('用户不存在')
redis_conn = get_redis_connection('code')
code = redis_conn.get('find_sms_%s'%mobile)
if int(sms_code) != int(code):
return HttpResponseBadRequest('验证码错误')
access_token = generic_access_token_url(user.username, user.mobile)
return JsonResponse({
'user_id': user.id,
'access_token': access_token,
})
class FindChangePasswordView(View):
def post(self,request,userid):
data = json.loads(request.body.decode())
new_password = data.get('password')
re_password = data.get('password2')
access_token = data.get('access_token')
if new_password != re_password:
return HttpResponseBadRequest('输入不一致')
try:
user = User.objects.get(id=userid)
user.set_password(new_password)
user.save()
except Exception:
return HttpResponseBadRequest('失败')
return JsonResponse({'message':'ok'})
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: variable
Author: ken
CreateDate: 5/21/2018 AD
Description:
-------------------------------------------------
"""
__author__ = 'ken'
|
nilq/baby-python
|
python
|
import chess
from .external_chess_player import ExternalChessPlayer
MAX_RETRIES = 3
class ChessPlayer(object):
def __init__(self, external_player):
"""
:param external_player:
:type external_player: ExternalChessPlayer
"""
self.ext_player = external_player
def end_game(self, board):
self.ext_player.end_game(board)
def send_move_uci(self, uci_move):
self.ext_player.send_move_uci(uci_move)
def make_move_uci(self, board):
try:
return True, self.try_get_uci_move()
except Exception:
return False, Exception
def try_get_uci_move(self, board):
tries = MAX_RETRIES
while tries > 0:
move = self.ext_player.make_move_uci(board)
if move in board.legal_moves:
return move
raise RuntimeError("Too many bad moves")
|
nilq/baby-python
|
python
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rc
from meanfield import MeanField
def d_tanh(x):
"""Derivative of tanh."""
return 1. / np.cosh(x)**2
def simple_plot(x, y):
plt.plot(x, y)
plt.xlim(0.5, 3)
plt.ylim(0, 0.25)
plt.xlabel('$\sigma_\omega^2$', fontsize=16)
plt.ylabel('$\sigma_b^2$', fontsize=16)
plt.show()
def plot(x, y):
fontsize = 12
plt.figure(figsize=(4, 3.1))
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.rc('xtick', labelsize=int(fontsize / 1.5))
plt.rc('ytick', labelsize=int(fontsize / 1.5))
# plot critical line
plt.plot(x, y, linewidth=2, color='black')
# plot dashed line for sb= 0.05
x_c = np.interp(0.05, y, x) # 1.7603915227624916
line_dict = dict(linewidth=1.5, linestyle='dashed', color='black')
plt.plot([0.5, x_c], [0.05, 0.05], **line_dict)
plt.plot([x_c, x_c], [0.00, 0.05], **line_dict)
# fill ordered and chaotic phase
plt.fill_betweenx(y, x, 3.0, facecolor='#ffdad3')
plt.fill_betweenx(y, 0.5, x, facecolor='#d3e4ff')
# setting
fontsize = 12
plt.xlim(0.5, 3)
plt.ylim(0, 0.25)
plt.xlabel('$\sigma_\omega^2$', fontsize=fontsize)
plt.ylabel('$\sigma_b^2$', fontsize=fontsize)
plt.xlabel(r'$\sigma_w^2$', fontsize=fontsize)
plt.ylabel(r'$\sigma_b^2$', fontsize=fontsize)
# add text
text_dict = dict(fontsize=fontsize,
horizontalalignment='center',
verticalalignment='center')
plt.text(1.25, 0.15, r'\textbf{Ordered Phase}', **text_dict)
plt.text(1.25, 0.125, r'$\max(\chi_{q^*}, \chi_{c^*}) < 1$', **text_dict)
plt.text(2.475, 0.08, r'\textbf{Chaotic Phase}', **text_dict)
plt.text(2.475, 0.055, r'$\max(\chi_{q^*}, \chi_{c^*}) > 1$', **text_dict)
# show plot
plt.tight_layout()
plt.show()
if __name__ == "__main__":
# run mean field experiment.
mf = MeanField(np.tanh, d_tanh)
qrange = np.linspace(1e-5, 2.25, 50)
sw_sbs = [mf.sw_sb(q, 1.0) for q in qrange]
sw = [sw_sb[0] for sw_sb in sw_sbs]
sb = [sw_sb[1] for sw_sb in sw_sbs]
# for simplified figure
simple_plot(sw, sb)
# for creating the actual figure in the paper.
plot(sw, sb)
|
nilq/baby-python
|
python
|
from tests.conftest import JiraTestCase
class PrioritiesTests(JiraTestCase):
def test_priorities(self):
priorities = self.jira.priorities()
self.assertEqual(len(priorities), 5)
def test_priority(self):
priority = self.jira.priority("2")
self.assertEqual(priority.id, "2")
self.assertEqual(priority.name, "High")
|
nilq/baby-python
|
python
|
"""
Please implement a `test` (e.g. pytest - this is up to you) for the method `compute_phenotype_similarity()`
- The details are up to you - use whatever testing framework you prefer.
"""
|
nilq/baby-python
|
python
|
# Aula 10 - Desafio 31: Custo da viagem
# Pedir a distancia de uma viagem em seguida:
# se a viagem for até 200Km de distancia, o valor da passagem será de R$0,50 por Km rodado
# se for maior que 200 Km, o valor sera de R$0,45 por Km rodado
d = int(input('Informe a distancia em Km da sua viagem: '))
if d <= 200:
print(f'O preço da passagem eh de R${d*0.5:.2f}')
else:
print(f'O preço da passagem eh de R${d*0.45:.2f}')
'''
# Outra maneira
preço = d * 0.5 if d <= 200 else d * 0.45
print(f'O preço da passagem eh de R${preço:.2f}')
'''
|
nilq/baby-python
|
python
|
import os,shutil
from .ExtensibleFileObject import ExtensibleFileObject
def file_list_dedup(file_list):
new_list=list(set(file_list))
new_list.sort(key=file_list.index)
return new_list
def relpath(a,b):
pass
def check_vfile(func):
pass
def refresh_directory(path):
if os.path.exists(path):
shutil.rmtree(path)
os.makedirs(path)
def create_file(path,text):
text = [text] if isinstance(text,str) else list(text)
if os.path.exists(path):
os.remove(path)
fo = ExtensibleFileObject(keyword='UHDL')
fo.write('\n'.join(text))
fo.write_version('1.0.1')
fo.save(path=path)
#with open(path,'w') as fp:
# fp.write('\n'.join(text))
#fp.close()
#fp = open(path,'w')
return path
if __name__ == "__main__":
#ListProcess.relpath('a/b/c','d/e/f')
create_file('./test.v',['456'])
|
nilq/baby-python
|
python
|
import nuke
t=nuke.menu("Nodes")
u=t.addMenu("Pixelfudger", icon="PxF_Menu.png")
t.addCommand( "Pixelfudger/PxF_Bandpass", "nuke.createNode('PxF_Bandpass')", icon="PxF_Bandpass.png" )
t.addCommand( "Pixelfudger/PxF_ChromaBlur", "nuke.createNode('PxF_ChromaBlur')", icon="PxF_ChromaBlur.png")
t.addCommand( "Pixelfudger/PxF_Distort", "nuke.createNode('PxF_Distort')", icon="PxF_Distort.png")
t.addCommand( "Pixelfudger/PxF_Erode", "nuke.createNode('PxF_Erode')", icon="PxF_Erode.png")
t.addCommand( "Pixelfudger/PxF_Filler", "nuke.createNode('PxF_Filler')", icon="PxF_Filler.png")
t.addCommand( "Pixelfudger/PxF_Grain", "nuke.createNode('PxF_Grain')", icon="PxF_Grain.png")
t.addCommand( "Pixelfudger/PxF_HueSat", "nuke.createNode('PxF_HueSat')", icon="PxF_HueSat.png")
t.addCommand( "Pixelfudger/PxF_IDefocus", "nuke.createNode('PxF_IDefocus')", icon="PxF_IDefocus.png")
t.addCommand( "Pixelfudger/PxF_KillSpill", "nuke.createNode('PxF_KillSpill')", icon="PxF_KillSpill.png")
t.addCommand( "Pixelfudger/PxF_Line", "nuke.createNode('PxF_Line')", icon="PxF_Line.png" )
t.addCommand( "Pixelfudger/PxF_MergeWrap", "nuke.createNode('PxF_MergeWrap')", icon="PxF_MergeWrap.png" )
t.addCommand( "Pixelfudger/PxF_ScreenClean", "nuke.createNode('PxF_ScreenClean')", icon="PxF_ScreenClean.png")
|
nilq/baby-python
|
python
|
BOT_NAME = 'naver_movie'
SPIDER_MODULES = ['naver_movie.spiders']
NEWSPIDER_MODULE = 'naver_movie.spiders'
ROBOTSTXT_OBEY = False
DOWNLOAD_DELAY = 2
COOKIES_ENABLED = True
DEFAULT_REQUEST_HEADERS = {
"Referer": "https://movie.naver.com/"
}
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None,
'scrapy.downloadermiddlewares.retry.RetryMiddleware': None,
'scrapy_fake_useragent.middleware.RandomUserAgentMiddleware': 400,
'scrapy_fake_useragent.middleware.RetryUserAgentMiddleware': 401,
}
RETRY_ENABLED = True
RETRY_TIMES = 2
ITEM_PIPELINES = {
'naver_movie.pipelines.NaverMoviePipeline': 300,
}
|
nilq/baby-python
|
python
|
import asyncio
import uvloop
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from scrapper import scrap
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
if __name__ == "__main__":
scheduler = AsyncIOScheduler()
scheduler.add_job(scrap, 'interval', seconds=5)
scheduler.start()
try:
asyncio.get_event_loop().run_forever()
except (KeyboardInterrupt, SystemExit):
pass
|
nilq/baby-python
|
python
|
from ..classes import WorkflowAction
class TestWorkflowAction(WorkflowAction):
label = 'test workflow state action'
def execute(self, context):
context['workflow_instance']._workflow_state_action_executed = True
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Lucas Sinclair and Paul Rougieux.
JRC biomass Project.
Unit D1 Bioeconomy.
"""
# Built-in modules #
import os
# Third party modules #
# First party modules #
from autopaths import Path
from autopaths.auto_paths import AutoPaths
from autopaths.dir_path import DirectoryPath
from plumbing.cache import property_cached
from plumbing.databases.access_database import AccessDatabase
# Internal modules #
from cbmcfs3_runner.pump.dataframes import multi_index_pivot
# Constants #
default_path = "C:/Program Files (x86)/Operational-Scale CBM-CFS3/Admin/DBs/ArchiveIndex_Beta_Install.mdb"
default_path = Path(default_path)
###############################################################################
class AIDB(object):
"""
This class enables us to switch the famous "ArchiveIndexDatabase", between
the Canadian standard and the European standard.
It also provides access to the data within this database.
"""
all_paths = """
/orig/aidb_eu.mdb
"""
def __init__(self, parent):
# Default attributes #
self.parent = parent
# Automatically access paths based on a string of many subpaths #
self.paths = AutoPaths(self.parent.data_dir, self.all_paths)
def __repr__(self):
return "%s object at '%s'" % (self.__class__, self.paths.aidb)
def switch(self):
default_path.remove()
self.paths.aidb.copy(default_path)
@property_cached
def database(self):
database = AccessDatabase(self.paths.aidb)
database.convert_col_names_to_snake = True
return database
@property_cached
def dm_table(self):
"""Main disturbance matrix."""
# Load #
df = self.database['tblDM']
# Rename #
df = df.rename(columns={ "name": "dist_desc_dm",
"description": "dist_desc_long"})
# Return #
return df
@property_cached
def source(self):
"""Name of source pools."""
# Load #
df = self.database['tblSourceName']
# Rename #
df = df.rename(columns={ 'row': 'dm_row',
'description': 'row_pool'})
# Return #
return df
@property_cached
def sink(self):
"""Name of sink pools."""
# Load #
df = self.database['tblSinkName']
# Rename #
df = df.rename(columns={ 'column': 'dm_column',
'description': 'column_pool'})
# Return #
return df
@property_cached
def lookup(self):
"""Proportion by source and sink."""
# Load #
df = self.database['tblDMValuesLookup']
# Return #
return df
@property_cached
def dist_type_default(self):
"""Link between dist_type_id and dist_desc_aidb."""
# Load #
df = self.database['tbldisturbancetypedefault']
# Rename #
df = df.rename(columns = {'dist_type_name': 'dist_desc_aidb'})
# Return #
return df
@property_cached
def dm_assoc_default(self):
"""
Link between default_dist_type_id, default_ec_id, and dmid
Pay attention to the tricky annual_order which might generate
errors in some cases (see also libcbm aidb import efforts)
Shape in the EU AIDB: 110180 rows × 6 columns
"""
# Load #
df = self.database['tbldmassociationdefault']
# Rename #
# TODO, check if dist_type_id is exactly the correct name
df = df.rename(columns = {'default_disturbance_type_id': 'dist_type_id',
'name': 'assoc_name',
'description': 'assoc_desc'})
# Return #
return df
@property_cached
def dm_assoc_default_short(self):
"""Same as above but with any "Annual order" > 1 dropped."""
# Load #
df = self.dm_assoc_default
# Collapse #
df = df.query("annual_order < 2").copy()
# Check that the combination of dist_type_id and dmid
# is unique on dist_type_id
a = len(set(df['dist_type_id']))
b = len(df[['dmid', 'dist_type_id']].drop_duplicates())
assert a == b
# Keep only a couple columns #
df = df[['dmid', 'dist_type_id']].drop_duplicates()
# Return #
return df
@property_cached
def dm_assoc_spu_default(self):
"""
Link between default_dist_type_id, spuid and dmid.
Warning, it contains only wildfire distances in the EU AIDB.
Shape in the EU aidb: 920 rows × 6 columns
"""
# Load #
df = self.database['tbldmassociationspudefault']
# Rename
# TODO check if dist_type_id is exactly the correct name
df = df.rename(columns = {'default_disturbance_type_id': 'dist_type_id',
'name': 'spu_name',
'description': 'spu_desc'})
# Return #
return df
@property_cached
def dist_matrix_long(self):
"""
Recreates the disturbance matrix in long format.
Join lookup and the disturbance matrix table 'tblDM',
Then join source and sink to add description of the origin and destination pools.
To be continued based on /notebooks/disturbance_matrix.ipynb
There is a many-to-one relationship between dist_type_name and dmid
(disturbance matrix id),
i.e for each dist_type_name there is one and only one dmid.
The opposite is not true, as there are more dist_type_name than dmid.
Columns are:
['dist_desc_input', 'dist_desc_aidb', 'dist_type_id', 'dmid',
'dm_column', 'dm_structure_id', 'dm_row', 'proportion', 'dist_desc_dm',
'dist_desc_long', 'row_pool', 'column_pool', 'on_off_switch',
'description', 'is_stand_replacing', 'is_multi_year',
'multi_year_count', 'dist_type_name'],
"""
# Load tables from the aidb #
dm_table = self.dm_table
source = self.source
sink = self.sink
lookup = self.lookup
assoc_short = self.dm_assoc_default_short
dist_default = self.dist_type_default
# Load tables from orig_data #
map_disturbance = self.parent.associations.map_disturbance
dist_types = self.parent.orig_data.disturbance_types
# Join lookup and dm_table to add the description for each `dmid` #
dm_lookup = (lookup
.set_index('dmid')
.join(dm_table.set_index('dmid'))
.reset_index())
# Indexes #
index_source = ['dm_row', 'dm_structure_id']
index_sink = ['dm_column', 'dm_structure_id']
# Add source and sink descriptions #
df = (dm_lookup.set_index(index_source)
.join(source.set_index(index_source))
.reset_index()
.set_index(index_sink)
.join(sink.set_index(index_sink))
.reset_index())
# Add 'dist_type_name' corresponding to orig/disturbance_types.csv
df = df.left_join(assoc_short, 'dmid')
df = df.left_join(dist_default, 'dist_type_id')
df = df.left_join(map_disturbance, 'dist_desc_aidb')
df = df.left_join(dist_types, 'dist_desc_input')
# Return #
return df
@property_cached
def dist_matrix(self):
"""
The disturbance matrix is reshaped in the form of a matrix
with source pools in rows and sink pools in columns.
"""
# Load #
df = self.dist_matrix_long.copy()
# Make pool description columns suitable as column names #
# Adds a number at the end of the disturbance name #
df['row_pool'] = (df['row_pool'].str.replace(' ', '_') + '_' +
df['dm_row'].astype(str))
df['column_pool'] = (df['column_pool'].str.replace(' ','_') + '_' +
df['dm_column'].astype(str))
# Filter proportions #
# TODO correct missing name from the index (see HU for example)
index = ['dmid', 'dm_structure_id', 'dm_row', 'name', 'row_pool']
df = (df
.set_index(index)
.query('proportion>0'))
# Pivot #
df = multi_index_pivot(df, columns='column_pool', values='proportion')
# Reorder columns by the last digit number
col_order = sorted(df.columns,
key=lambda x: str(x).replace("_", "0")[-2:])
# Exclude index columns from the re-ordering of columns
df = df.set_index(index)[col_order[:-5]].reset_index()
# Return #
return df
@property_cached
def merch_biom_rem(self):
"""
Retrieve the percentage of merchantable biomass removed
from every different disturbance type used in the silviculture
treatments.
The column "perc_merch_biom_rem" comes from silviculture.csv
The column "proportion" comes from aidb.mdb and multiple joins.
"""
# Load #
df = self.dist_matrix_long
dist_types = self.parent.orig_data.disturbance_types
treats = self.parent.silviculture.treatments
# Filter dist_mat to take only disturbances that are actually used #
selector = df['dist_type_name'].isin(dist_types['dist_type_name'])
df = df[selector].copy()
# Take only products #
df = df.query("column_pool == 'products'")
df = df.query("row_pool == 'Softwood merchantable' or row_pool == 'Hardwood merch'")
# Join #
df = treats.left_join(df, 'dist_type_name')
# Take columns of interest #
cols = ['dist_type_name', 'perc_merch_biom_rem', 'dist_desc_aidb', 'row_pool', 'proportion']
df = df[cols]
# Compute difference #
df['diff']= df['perc_merch_biom_rem'] - df['proportion']
# NaNs appear because of natural disturbances #
df = df.fillna(0)
# Check #
assert all(df['diff'].abs() < 1e-3)
# Return #
return df
@property_cached
def dmid_map(self):
"""Map the dist_type_name to its dmid for the current country.
Only returns the unique available combinations
of dmid and dist_type_name.
Note two dist_type_name can map to the same dmid.
Columns:
['dist_type_name', 'dmid', 'dist_desc_aidb']
"""
# Load #
dist_mat = self.dist_matrix_long
# Keep only two columns #
columns_of_interest = ['dist_type_name', 'dmid', 'dist_desc_aidb']
df = dist_mat[columns_of_interest].drop_duplicates()
# Check #
#assert not any(df['dmid'] == numpy.nan)
# Return #
return df
#-------------------------- Special Methods ------------------------------#
def symlink(self):
# Where is the data, default case #
aidb_repo = DirectoryPath("~/repos/libcbm_aidb/")
# But you can override that with an environment variable #
if os.environ.get("CBMCFS3_AIDB"):
aidb_repo = DirectoryPath(os.environ['CBMCFS3_AIDB'])
# The source #
source = aidb_repo + self.parent.iso2_code + '/orig/aidb_eu.mdb'
# Special case for ZZ #
if self.parent.iso2_code == 'ZZ':
source = aidb_repo + 'LU/orig/aidb_eu.mdb'
# Check the AIDB exists #
assert source
# The destination #
destin = self.paths.aidb
# Remove destination if it already exists #
destin.remove()
# Symlink #
source.link_to(destin)
# Return #
return 'Symlink success for ' + self.parent.iso2_code + '.'
|
nilq/baby-python
|
python
|
import numpy as np
import interconnect
import copy
# # VARIABLES
N = 3001 # max clock cycles +1
FW = 16 # flit width
FPP = 32 # flits per packet
def get_header(FW=16):
'''
generates a random header for a flit-width of FW)
'''
return np.random.random_integers(0, (1 << FW)-1)
# data, day = np.load('./videos/traffic_pictures_day.npz'), 1
data, day = np.load('./videos/traffic_pictures_night.npz'), 0
# data = np.load('./videos/traffic_features.npz')
sim = np.load('./res_simulator/sensors_to_memory.npz')
mux = sim['mux_matrices']
DHs = [get_header(16) for i in range(int(N))]
D0s = data['pic1'].astype(int) # pixel samples
D1s = data['pic2'].astype(int)
D2s = data['pic3'].astype(int)
D3s = data['pic4'].astype(int)
D4s = data['pic5'].astype(int)
D5s = data['pic6'].astype(int)
D0s = np.add(D0s[0::2, :], (1 << 8)*D0s[1::2, :]) # attach two for flit
D1s = np.add(D1s[0::2, :], (1 << 8)*D1s[1::2, :])
D2s = np.add(D2s[0::2, :], (1 << 8)*D2s[1::2, :])
D3s = np.add(D3s[0::2, :], (1 << 8)*D3s[1::2, :])
D4s = np.add(D4s[0::2, :], (1 << 8)*D4s[1::2, :])
D5s = np.add(D5s[0::2, :], (1 << 8)*D5s[1::2, :])
ic2D = interconnect.Interconnect(B=16, wire_spacing=0.3e-6, # 2D IC
wire_width=0.3e-6, wire_length=100e-6)
ic3D = interconnect.Interconnect(16, 0.6e-6, 0.3e-6, wire_length=0, # 3D IC
TSVs=True, TSV_radius=2e-6, TSV_pitch=8e-6)
E3dLink0bitlevel = []
E2dLink1bitlevel = []
E2dLink2bitlevel = []
E3dLink0highlevel = []
E2dLink1highlevel = []
E2dLink2highlevel = []
E3dLink0ref = []
E2dLink1ref = []
E2dLink2ref = []
# # MAIN PART
for coding in range(8):
# run the simulation for
# 0: NO-CODING; 1: NEGK1; 2: NEGK0
# 3: NEGCORR; 4:NEG(K0+CORR); 5:NEG(K1+CORR)
D_true = []
cD = [] # counter for the different data types
DH = interconnect.DataStream(np.copy(DHs), 16) # headers not coded
D0 = interconnect.DataStream(D0s.flatten()[:N], 16) # DATA STREAMS UNCO
D1 = interconnect.DataStream(D1s.flatten()[:N], 16)
D2 = interconnect.DataStream(D2s.flatten()[:N], 16)
D3 = interconnect.DataStream(D3s.flatten()[:N], 16)
D4 = interconnect.DataStream(D4s.flatten()[:N], 16)
D5 = interconnect.DataStream(D5s.flatten()[:N], 16)
# coding correlated data streams
if coding == 1:
D0, D1 = D0.k0_encoded().invert, D1.k0_encoded().invert
D2, D3 = D2.k0_encoded().invert, D3.k0_encoded().invert
D4, D5 = D4.k0_encoded().invert, D5.k0_encoded().invert
elif coding == 2:
D0, D1 = D0.k1_encoded().invert, D1.k1_encoded().invert
D2, D3 = D2.k1_encoded().invert, D3.k1_encoded().invert
D4, D5 = D4.k1_encoded().invert, D5.k1_encoded().invert
elif coding == 3:
D0, D1 = D0.corr_encoded().invert, D1.corr_encoded().invert
D2, D3 = D2.corr_encoded().invert, D3.corr_encoded().invert
D4, D5 = D4.corr_encoded().invert, D5.corr_encoded().invert
elif coding == 4:
D0 = D0.k0_encoded().corr_encoded().invert
D1 = D1.k0_encoded().corr_encoded().invert
D2 = D2.k0_encoded().corr_encoded().invert
D3 = D3.k0_encoded().corr_encoded().invert
D4 = D4.k0_encoded().corr_encoded().invert
D5 = D5.k0_encoded().corr_encoded().invert
elif coding == 5:
D0 = D0.k1_encoded().corr_encoded().invert
D1 = D1.k1_encoded().corr_encoded().invert
D2 = D2.k1_encoded().corr_encoded().invert
D3 = D3.k1_encoded().corr_encoded().invert
D4 = D4.k1_encoded().corr_encoded().invert
D5 = D5.k1_encoded().corr_encoded().invert
elif coding == 6:
D0 = D0.corr_encoded().k0_encoded().invert
D1 = D1.corr_encoded().k0_encoded().invert
D2 = D2.corr_encoded().k0_encoded().invert
D3 = D3.corr_encoded().k0_encoded().invert
D4 = D4.corr_encoded().k0_encoded().invert
D5 = D5.corr_encoded().k0_encoded().invert
elif coding == 7:
D0 = D0.corr_encoded().k1_encoded().invert
D1 = D1.corr_encoded().k1_encoded().invert
D2 = D2.corr_encoded().k1_encoded().invert
D3 = D3.corr_encoded().k1_encoded().invert
D4 = D4.corr_encoded().k1_encoded().invert
D5 = D5.corr_encoded().k1_encoded().invert
# # #
for i in range(len(sim['links'])):
d_link = [0] # data going over the link (init val 0)
# copy of single data streams as list
h = np.copy(DH.samples).tolist()
d0, d1 = np.copy(D0.samples).tolist(), np.copy(D1.samples).tolist()
d2, d3 = np.copy(D2.samples).tolist(), np.copy(D3.samples).tolist()
d4, d5 = np.copy(D4.samples).tolist(), np.copy(D5.samples).tolist()
d_list = [h, d0, d1, d2, d3, d4, d5]
counter = [0, 0, 0, 0, 0, 0, 0]
seq = sim['true_values'][i].astype(int) # pattern sequence
for j in range(1, len(seq)):
if seq[j] < 7:
d_link.append(d_list[seq[j]].pop(0))
counter[seq[j]] += 1
else:
d_link.append(d_link[-1])
cD.append(counter)
D_true.append(interconnect.DataStream(d_link, 16))
D_mux0 = interconnect.DataStreamProb([DH[:cD[0][0]], D0[:cD[0][1]],
D1[:cD[0][2]], D2[:cD[0][3]],
D3[:cD[0][4]], D4[:cD[0][5]],
D5[:cD[0][6]]], mux[0])
D_mux1 = interconnect.DataStreamProb([DH[:cD[1][0]], D0, # D0-D2 not trans
D1, D2,
D3[:cD[1][4]], D4[:cD[1][5]],
D5[:cD[1][6]]], mux[1])
D_mux2 = interconnect.DataStreamProb([DH[:cD[2][0]], D0,
D1, D2, # only D3 transmitted
D3[:cD[2][4]], D4, D5], mux[2])
D_noMux0 = copy.deepcopy(DH[:cD[0][0]])
D_noMux0.append(D0[:cD[0][1]])
D_noMux0.append(D1[:cD[0][2]])
D_noMux0.append(D2[:cD[0][3]])
D_noMux0.append(D3[:cD[0][4]])
D_noMux0.append(D4[:cD[0][5]])
D_noMux0.append(D5[:cD[0][6]])
D_noMux1 = copy.deepcopy(DH[:cD[1][0]])
D_noMux1.append(D3[:cD[1][4]])
D_noMux1.append(D4[:cD[1][5]])
D_noMux1.append(D5[:cD[1][6]])
D_noMux2 = copy.deepcopy(DH[:cD[2][0]])
D_noMux2.append(D3[:cD[2][4]])
# golden values (bit-level sim)
E3dLink0bitlevel.append(ic3D.E(D_true[0]))
E2dLink1bitlevel.append(ic2D.E(D_true[1]))
E2dLink2bitlevel.append(ic2D.E(D_true[2]))
# proposed high-level model
E3dLink0highlevel.append(ic3D.E(D_mux0))
E2dLink1highlevel.append(ic2D.E(D_mux1))
E2dLink2highlevel.append(ic2D.E(D_mux2))
# ref bit level
E3dLink0ref.append(ic3D.E(D_noMux0))
E2dLink1ref.append(ic2D.E(D_noMux1))
E2dLink2ref.append(ic2D.E(D_noMux2))
if day == 0:
E3dLink0bitlevel_night = E3dLink0bitlevel
E2dLink1bitlevel_night = E2dLink1bitlevel
E2dLink2bitlevel_night = E2dLink2bitlevel
E3dLink0highlevel_night = E3dLink0highlevel
E2dLink1highlevel_night = E2dLink1highlevel
E2dLink2highlevel_night = E2dLink2highlevel
E3dLink0ref_night = E3dLink0ref
E2dLink1ref_night = E2dLink1ref
E2dLink2ref_night = E2dLink2ref
else:
E3dLink0bitlevel_day = E3dLink0bitlevel
E2dLink1bitlevel_day = E2dLink1bitlevel
E2dLink2bitlevel_day = E2dLink2bitlevel
E3dLink0highlevel_day = E3dLink0highlevel
E2dLink1highlevel_day = E2dLink1highlevel
E2dLink2highlevel_day = E2dLink2highlevel
E3dLink0ref_day = E3dLink0ref
E2dLink1ref_day = E2dLink1ref
E2dLink2ref_day = E2dLink2ref
if 'E3dLink0ref_day' in locals() and 'E3dLink0ref_night' in locals():
packages = 2*sum(cD[0])/32
E3dLink0ref_tot = (N/packages)*(np.array(E3dLink0ref_day)+np.array(E3dLink0ref_night))
E2dLink1ref_tot = (N/packages)*(np.array(E2dLink1ref_day)+np.array(E2dLink1ref_night))
E2dLink2ref_tot = (N/packages)*(np.array(E2dLink2ref_day)+np.array(E2dLink2ref_night))
E3dLink0bitlevel_tot = (N/packages)*(np.array(E3dLink0bitlevel_day)+np.array(E3dLink0bitlevel_night))
E2dLink1bitlevel_tot = (N/packages)*(np.array(E2dLink1bitlevel_day)+np.array(E2dLink1bitlevel_night))
E2dLink2bitlevel_tot = (N/packages)*(np.array(E2dLink2bitlevel_day)+np.array(E2dLink2bitlevel_night))
E3dLink0highlevel_tot = (N/packages)*(np.array(E3dLink0highlevel_day)+np.array(E3dLink0highlevel_night))
E2dLink1highlevel_tot = (N/packages)*(np.array(E2dLink1highlevel_day)+np.array(E2dLink1highlevel_night))
E2dLink2highlevel_tot = (N/packages)*(np.array(E2dLink2highlevel_day)+np.array(E2dLink2highlevel_night))
|
nilq/baby-python
|
python
|
from py_db import db
import NSBL_helpers as helper
# Re-computes the team hitting tables
db = db('NSBL')
def process():
print "processed_team_hitting"
db.query("TRUNCATE TABLE `processed_team_hitting_basic`")
db.query("TRUNCATE TABLE `processed_team_hitting_advanced`")
yr_min, yr_max = db.query("SELECT MIN(year), MAX(year) FROM processed_league_averages_pitching")[0]
for year in range(yr_min, yr_max+1):
for _type in ('basic', 'advanced'):
print str(year) + "\thitting\t" + _type
table = 'processed_team_hitting_%s' % (_type)
if _type == 'basic':
entries = process_basic(year)
elif _type == 'advanced':
entries = process_advanced(year)
if entries != []:
db.insertRowDict(entries, table, replace=True, insertMany=True, rid=0)
db.conn.commit()
def process_basic(year):
entries = []
qry = """SELECT
r.team_abb,
SUM(pa), SUM(ab), SUM(h), SUM(2B), SUM(3b), SUM(Hr), SUM(r), SUM(rbi), SUM(hbp), SUM(bb), SUM(k), SUM(sb), SUM(cs)
FROM register_batting_primary r
JOIN processed_compWAR_offensive o USING (player_name, team_abb, YEAR)
JOIN processed_WAR_hitters w USING (pa, player_name, team_abb, YEAR)
WHERE r.year = %s
GROUP BY r.team_abb;"""
query = qry % (year)
res = db.query(query)
for row in res:
team_abb, pa, ab, h, _2, _3, hr, r, rbi, hbp, bb, k, sb, cs = row
entry = {}
entry["year"] = year
entry["team_abb"] = team_abb
_1 = h - _2 - _3 - hr
avg = float(h)/float(ab)
obp = (float(h)+float(bb)+float(hbp))/float(pa)
slg = (float(_1)+2*float(_2)+3*float(_3)+4*float(hr))/float(pa)
entry["avg"] = avg
entry["obp"] = obp
entry["slg"] = slg
entry["pa"] = pa
entry["ab"] = ab
entry["h"] = h
entry["2b"] = _2
entry["3b"] = _3
entry["hr"] = hr
entry["r"] = r
entry["rbi"] = rbi
entry["hbp"] = hbp
entry["bb"] = bb
entry["k"] = k
entry["sb"] = sb
entry["cs"] = cs
entries.append(entry)
return entries
def process_advanced(year):
entries = []
qry = """SELECT
r.team_abb, SUM(pa), SUM(pf*pa)/SUM(pa), SUM(wOBA*pa)/SUM(pa), SUM(park_wOBA*pa)/SUM(pa), SUM(OPS*pa)/SUM(pa), SUM(OPS_plus*pa)/SUM(pa), SUM(babip*pa)/SUM(pa), SUM(wRC), SUM(wRC_27*pa)/SUM(pa), SUM(wRC_plus*pa)/SUM(pa), SUM(rAA), SUM(w.oWAR)
FROM register_batting_primary r
JOIN processed_compWAR_offensive o USING (player_name, team_abb, YEAR)
JOIN processed_WAR_hitters w USING (pa, player_name, team_abb, YEAR)
WHERE r.year = %s
GROUP BY r.team_abb;"""
query = qry % (year)
res = db.query(query)
for row in res:
team_abb, pa, pf, woba, park_woba, ops, ops_plus, babip, wrc, wrc_27, wrc_plus, raa, owar = row
entry = {}
entry["year"] = year
entry["team_abb"] = team_abb
entry["pa"] = pa
entry["pf"] = pf
entry["wOBA"] = woba
entry["park_wOBA"] = park_woba
entry["OPS"] = ops
entry["OPS_plus"] = ops_plus
entry["babip"] = babip
entry["wRC"] = wrc
entry["wRC_27"] = wrc_27
entry["wRC_plus"] = wrc_plus
entry["rAA"] = raa
entry["oWAR"] = owar
entries.append(entry)
return entries
if __name__ == "__main__":
process()
|
nilq/baby-python
|
python
|
"""
The MIT License (MIT)
Copyright (c) 2020-Current Skelmis
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, List, AsyncIterable, Dict
from attr import asdict
import orjson as json
from antispam.abc import Cache
from antispam.enums import ResetType
from antispam.exceptions import GuildNotFound, MemberNotFound
from antispam.dataclasses import Message, Member, Guild, Options
if TYPE_CHECKING:
from redis import asyncio as aioredis
from antispam import AntiSpamHandler
log = logging.getLogger(__name__)
class RedisCache(Cache):
"""
A cache backend built to use Redis.
Parameters
----------
handler: AntiSpamHandler
The AntiSpamHandler instance
redis: redis.asyncio.Redis
Your redis connection instance.
"""
def __init__(self, handler: AntiSpamHandler, redis: aioredis.Redis):
self.redis: aioredis.Redis = redis
self.handler: AntiSpamHandler = handler
async def get_guild(self, guild_id: int) -> Guild:
log.debug("Attempting to return cached Guild(id=%s)", guild_id)
resp = await self.redis.get(f"GUILD:{guild_id}")
if not resp:
raise GuildNotFound
as_json = json.loads(resp.decode("utf-8"))
guild: Guild = Guild(**as_json)
# This is actually a dict here
guild.options = Options(**guild.options) # type: ignore
guild_members: Dict[int, Member] = {}
for member_id in guild.members: # type: ignore
member: Member = await self.get_member(member_id, guild_id)
guild_members[member.id] = member
guild.members = guild_members
return guild
async def set_guild(self, guild: Guild) -> None:
log.debug("Attempting to set Guild(id=%s)", guild.id)
# Store members separate
for member in guild.members.values():
await self.set_member(member)
guild.members = [member.id for member in guild.members.values()]
as_json = json.dumps(asdict(guild, recurse=True))
await self.redis.set(f"GUILD:{guild.id}", as_json)
async def delete_guild(self, guild_id: int) -> None:
log.debug("Attempting to delete Guild(id=%s)", guild_id)
await self.redis.delete(f"GUILD:{guild_id}")
async def get_member(self, member_id: int, guild_id: int) -> Member:
log.debug(
"Attempting to return a cached Member(id=%s) for Guild(id=%s)",
member_id,
guild_id,
)
resp = await self.redis.get(f"MEMBER:{guild_id}:{member_id}")
if not resp:
raise MemberNotFound
as_json = json.loads(resp.decode("utf-8"))
member: Member = Member(**as_json)
messages: List[Message] = []
for message in member.messages:
messages.append(Message(**message)) # type: ignore
member.messages = messages
return member
async def set_member(self, member: Member) -> None:
log.debug(
"Attempting to cache Member(id=%s) for Guild(id=%s)",
member.id,
member.guild_id,
)
# Ensure a guild exists
try:
guild = await self.get_guild(member.guild_id)
guild.members = [m.id for m in guild.members.values()]
guild.members.append(member.id)
guild_as_json = json.dumps(asdict(guild, recurse=True))
await self.redis.set(f"GUILD:{guild.id}", guild_as_json)
except GuildNotFound:
guild = Guild(id=member.guild_id, options=self.handler.options)
guild.members = [member.id]
guild_as_json = json.dumps(asdict(guild, recurse=True))
await self.redis.set(f"GUILD:{guild.id}", guild_as_json)
as_json = json.dumps(asdict(member, recurse=True))
await self.redis.set(f"MEMBER:{member.guild_id}:{member.id}", as_json)
async def delete_member(self, member_id: int, guild_id: int) -> None:
log.debug(
"Attempting to delete Member(id=%s) in Guild(id=%s)", member_id, guild_id
)
try:
guild: Guild = await self.get_guild(guild_id)
guild.members.pop(member_id)
await self.set_guild(guild)
except:
pass
await self.redis.delete(f"MEMBER:{guild_id}:{member_id}")
async def add_message(self, message: Message) -> None:
log.debug(
"Attempting to add a Message(id=%s) to Member(id=%s) in Guild(id=%s)",
message.id,
message.author_id,
message.guild_id,
)
try:
member: Member = await self.get_member(message.author_id, message.guild_id)
except (MemberNotFound, GuildNotFound):
member: Member = Member(message.author_id, guild_id=message.guild_id)
member.messages.append(message)
await self.set_member(member)
async def reset_member_count(
self, member_id: int, guild_id: int, reset_type: ResetType
) -> None:
log.debug(
"Attempting to reset counts on Member(id=%s) in Guild(id=%s) with type %s",
member_id,
guild_id,
reset_type.name,
)
try:
member: Member = await self.get_member(member_id, guild_id)
except (MemberNotFound, GuildNotFound):
return
if reset_type == ResetType.KICK_COUNTER:
member.kick_count = 0
else:
member.warn_count = 0
await self.set_member(member)
async def drop(self) -> None:
log.warning("Cache was just dropped")
await self.redis.flushdb(asynchronous=True)
async def get_all_guilds(self) -> AsyncIterable[Guild]:
log.debug("Yielding all cached guilds")
keys: List[bytes] = await self.redis.keys("GUILD:*")
for key in keys:
key = key.decode("utf-8").split(":")[1]
yield await self.get_guild(int(key))
async def get_all_members(self, guild_id: int) -> AsyncIterable[Member]:
log.debug("Yielding all cached members for Guild(id=%s)", guild_id)
# NOOP
await self.get_guild(guild_id)
keys: List[bytes] = await self.redis.keys(f"MEMBER:{guild_id}:*")
for key in keys:
key = key.decode("utf-8").split(":")[2]
yield await self.get_member(int(key), guild_id)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
try:
import mdp
use_mdp = True
except ImportError:
print 'mdp (modular data processing) module not installed. Cannot do PCA'
use_mdp = False
import numpy as np
from neuropype import node
from itertools import imap, repeat
from copy import deepcopy, copy
from neuropype import parameter
import os
from bisect import bisect_left
from neuropype.ressources._common import boxfilter, findextrema, cross_threshold
from neuropype.ressources._common import flatenList, filterValues
import neuropype.ressources.progressbar as pgb
from neuropype.datatypes import Time_list, Sweep
from neuropype.gui.lassoExempl import LassoManager
class DetectSpike(node.Node):
"""Detect events in a sweep
* filter is a list of 4-tuples, (sniptype, property, comp, value)
sniptype can be 'raw' or 'filtered',
property can be any of the 'props' param or one PCA componant,
comp can be 0 -- for < --, 1 -- for > --, 'in' or 'out',
if comp is 0 or 1, value is a float
if comp is 'in' or 'out', value must be a list of 2 floats,
defining the window to keep/exclude"""
def __init__(self, name, parent):
# Inputs
self.in_sweep = node.Input(['Sweep', 'SweepData'])
self.in_numSweeps = node.Input('int')
self.in_chanNames = node.Input('list')
self.in_origin = node.Input('list')
self.in_tag = node.Input('list')
self.in_sweepInfo = node.Input('SweepInfo')
# Outputs
self.out_time = node.Output('Time_list')
self.out_numSweeps = node.Output('int')
self.out_sweep = node.Output('Sweep')
self.out_chanNames = node.Output('list')
self.out_origin = node.Output('list')
self.out_tagTimeList = node.Output('list')
self.out_sweepInfo = node.Output('SweepInfo')
self.out_numSpikes = node.Output('int')
self.out_snip_tag = node.Output('list')
self.out_snip_sweepInfo = node.Output('SweepInfo')
self.out_snip = node.Output('Sweep')
self.out_snip_origin = node.Output('list')
self.out_snip_chanNames = node.Output('list')
super(DetectSpike, self).__init__(name, parent)
self._inputGroups['sweep'] = {'sweep': 'in_sweep',
'numSweeps': 'in_numSweeps',
'chanNames': 'in_chanNames',
'origin': 'in_origin',
'tag' : 'in_tag',
'sweepInfo': 'in_sweepInfo'}
self._outputGroups = {'time_list': {'time_list': 'out_time',
'numSweeps': 'out_numSweeps',
'tag': 'out_tagTimeList'},
'filteredSweep': {'sweep': 'out_sweep',
'numSweeps': 'out_numSweeps',
'chanNames': 'out_chanNames',
'origin': 'out_origin',
'tag': 'out_tagTimeList',
'sweepInfo': 'out_sweepInfo'},
'snippet': {'sweep': 'out_snip',
'numSweeps': 'out_numSpikes',
'chanNames': 'out_snip_chanNames',
'origin': 'out_snip_origin',
'tag': 'out_snip_tag',
'sweepInfo': 'out_snip_sweepInfo'}}
# Default parameters:
baseline = parameter.combobox('baseline', self, ['fixed', 'floating',
'mean', 'window', None], 'floating')
fixed_baseline = parameter.float_param('fixed_baseline', self, 0, decimals= 9,
singleStep= 1e-3)
createUniv = CreateUniv(self)
self.cU = createUniv
chan = parameter.combobox('chan', self, [], 'None', func = createUniv)
padding = parameter.combobox('padding', self, ['flatPad', 'zeroPad', 'keep'], 'flatPad')
win0 = parameter.float_param('win0', self, 5e-3, minVal= 0, decimals= 9,
singleStep= 1e-3)
win1 = parameter.float_param('win1', self, 1e-3, minVal= 0, decimals= 9,
singleStep= 1e-3)
win2 = parameter.float_param('win2', self, 1.5e-3, minVal= 0, decimals=
9, singleStep= 1e-3)
dt0 = parameter.float_param('dt0', self, 1.5e-3, minVal= 0, decimals=
9, singleStep= 1e-3)
dt1 = parameter.float_param('dt1', self, 1.5e-3, minVal= 0, decimals=
9, singleStep= 1e-3)
pointinterval = parameter.float_param('pointinterval', self, 1e-3, minVal= 0, decimals=
9, singleStep= 0.1e-3)
numWins = parameter.integer('numWins', self, 1, minVal= 1, maxVal= 3)
threshold = parameter.float_param('threshold', self, 0, decimals=
9, singleStep= 1e-3)
maximum = parameter.boolean('maximum', self, Default = True)
upwards = parameter.boolean('upwards', self, Default = True)
cross_threshold_param = parameter.boolean('cross_threshold', self, Default = False)
self._params={'chan': chan,
'maximum': maximum,
'upwards': upwards,
'threshold': threshold,
'cross_threshold': cross_threshold_param,
'pointinterval': pointinterval,
'baseline' : baseline,
'verbose' : 1,
'numWins': numWins,
'win0': win0,
'win1' : win1,
'win2' : win2,
'dt0' : dt0,
'dt1' : dt1,
'baseline_window': ['begin', 'end'],
'fixed_baseline': fixed_baseline,
'memory': 'store',
'snip_window' : [-2e-3,2e-3],
'props' :['sw_ind', 'max', 'min', 'median', 'mean',
'ptp', 'std', 'sum'],
'snip_memory' : ('store', 'all'),
'filter':[],
'padding': padding,
'graphviz':{'style': 'filled', 'fillcolor':
'lightyellow'}}
#connecting outputs:
self.out_time.output = self.time_list
self.out_numSweeps.output = self.numSweeps
self.out_chanNames.output = self.chanNames
self.out_sweep.output = self.filteredSweep
self.out_origin.output = self.origin
self.out_tagTimeList.output = self.tag
self.out_sweepInfo.output = self.sweepInfo
self.out_numSpikes.output = self.numSpikes
self.out_snip_tag.output = self.snip_tag
self.out_snip.output = self.snippet
self.out_snip_origin.origin = self.snippet_origin
self.out_snip_sweepInfo.origin = self.snippet_sweepInfo
self.out_snip_chanNames.output = self.snippet_chanNames
def _ready_trace(self, data, time, debug = 0):
'''do the filtering'''
f0, f1, f2, dt0, dt1 = None, None, None, None, None
dtype = None
if data.dtype == 'int':
dtype = data.dtype
data = np.asarray(data, dtype ='float64')
dt = float(time[1]-time[0])
#substracting baseline:
baseline = self.get_param('baseline')
if baseline == 'window':
if self.get_param('baseline_window')[0] == 'begin':
beg =0
else:
baseline_window0 = float(self.get_param('baseline_window')[0])
beg = bisect_left(time, baseline_window0)
if self.get_param('baseline_window')[1] == 'end':
end = -1
else:
baseline_window1 = float(self.get_param('baseline_window')[1])
end = bisect_left(time,baseline_window1)
baseline = np.mean(data[beg:end])
data-=baseline
elif baseline == 'fixed':
baseline = float(self.get_param('fixed_baseline'))
data-=baseline
elif baseline == 'floating':
numWins = self.get_param('numWins')
cumsum = data.cumsum()
win0 = int(self.get_param('win0')/dt)
biggestWin = win0
f0 = boxfilter(data, win0, cumsum)
if numWins==1:
#simply substract baseline
data-=f0
else:
#kind of first order derivative
dt0=int(float(self.get_param('dt0')/dt))
if dt0 == 0: dt0 = 1
win1 = int(self.get_param('win1')/dt)
biggestWin = max(win1, biggestWin)
f1 = boxfilter(data, win1, cumsum)
if numWins==2:
data = np.zeros_like(data)
data[dt0/2:-dt0/2] = f1[dt0:] - f0[:-dt0]
elif numWins==3:
#kind of 2nd order derivative
win2 = int(self.get_param('win2')/dt)
biggestWin = max(win2, biggestWin)
dt1=int(float(self.get_param('dt1')/dt))
if dt1 == 0: dt1=1
f2 = boxfilter(data, win2, cumsum)
data = np.zeros_like(data)
data[dt1/2:-dt1/2] = f2[dt1:]-2*f1[dt0:-dt1+dt0]-f0[:-dt1]
else:
raise ValueError('wrong numWins: %s'%numWins)
padding = self.get_param('padding')
if padding == 'zeroPad':
data[:biggestWin] = np.zeros(biggestWin)
data[-biggestWin:] = np.zeros(biggestWin)
elif padding == 'flatPad':
data[:biggestWin] = np.ones(biggestWin)*data[biggestWin]
data[-biggestWin:] = np.ones(biggestWin)*data[biggestWin]
elif baseline == 'mean':
baseline=np.mean(data)
data-=baseline
if dtype is not None:
data = np.asarray(data, dtype = dtype)
if debug:
return data, f0, f1, f2, dt0, dt1
return data
def _detect(self, trace, time):
'''Do the detection on trace, delete values in pointinterval and
return the other'''
params = self.params
dt=time[1]-time[0]
pointinterval=int(float(params['pointinterval']/dt))
if not params['cross_threshold']:
temp = findextrema(trace, params['maximum'], params['threshold'],
pointinterval)
else:
temp = cross_threshold(trace, params['upwards'], params['maximum'],
params['threshold'], pointinterval)
out = np.array([time[i] for i in temp])
return out
def _spikeTimes(self, index_sweep):
'''Return the list of spike times detected in sweep 'index_sweep' '''
if self.get_param('memory') == 'write':
data, dataTot = None, None
path = self.get_cache('path')
if path is not None:
dataTot = np.load(path)
if 'Sw_'+str(index_sweep) in dataTot.files:
data = dataTot['Sw_'+str(index_sweep)]
elif self.get_param('memory') == 'store':
if not self._cache.has_key('sp_times'): self._cache['sp_times']={}
data = self._cache['sp_times'].get(index_sweep)
elif self.get_param('memory') is None:
data = None
else:
print 'param %s for memory not '%self.get_param('memory') + \
'recognised, won\'t memorise anything'
data = None
if data is None:
sweep = self._get_input_sweep(index_sweep)
time = self._get_time(index_sweep)
swdata = sweep._data[1] if sweep._data.shape[0]>1 else sweep._data[0]
trace = self._ready_trace(deepcopy(swdata), time)
data = self._detect(trace, time)
if self.get_param('memory') == 'store':
self._cache['sp_times'][index_sweep] = data
elif self.get_param('memory') == 'write':
if dataTot is not None:
temp = {}
for i in dataTot.files:
temp[i]= dataTot[i]
temp['Sw_'+str(index_sweep)] = data
else:
temp = {'Sw_'+str(index_sweep) : data}
path = self.parent.home + self.parent.name + '_' +self.name + \
'_spikeTimes.npz'
np.savez(path, **temp)
self.set_cache('path', self.parent.home + self.parent.name +
'_' + self.name + '_spikeTimes.npz', force = 1)
dataTot.close()
# if self.get_param('snip_memory') is not None:
# snippet = self._extract_snippet(data, sweep._data[1], time)
# self._saveSnip(index_sweep, snippet, 'raw')
# snippet = self._extract_snippet(data, trace, time)
# self._saveSnip(index_sweep, snippet, 'filtered')
return data
def all_times(self, list_sweep=None, groupbysweep=False, keepMasked = False):
out = []
if list_sweep is None:
list_sweep = xrange(self.numSweeps())
if not keepMasked:
mask = self._mask()
for i, sweep in enumerate(list_sweep):
data = self._spikeTimes(sweep)
if not keepMasked:
b0, b1 = self._sweepBorder(sweep)
data = data[mask[b0: b1]]
out.append(data)
if not groupbysweep:
out = flatenList(out)
return out
def ISI(self,list_sweep=None, groupbysweep=False, keepMasked = False):
out = []
if list_sweep is None:
list_sweep = xrange(self.numSweeps())
if not keepMasked:
mask = self._mask()
for i, sweep in enumerate(list_sweep):
data = self._spikeTimes(sweep)
if not keepMasked:
b0, b1 = self._sweepBorder(i)
data = data[mask[b0: b1]]
out.append(np.diff(data))
if not groupbysweep:
out = flatenList(out)
return out
def time_list(self, index_sweep, keepMasked = False):
sp_times = self._spikeTimes(index_sweep)
if not keepMasked:
b0, b1 = self._sweepBorder(index_sweep)
mask = self._mask()[b0:b1]
sp_times = sp_times[mask]
name = 'SpikesOfSweep'+str(index_sweep)
origin = self.in_origin(index_sweep)+ [str(self.get_param('chan'))]
out = Time_list.Time_list(name, sp_times, origin, SweepIndex = index_sweep,
nodeOfSweep = self, title = 'NoTitle', units
= 's')
out.tag = self.tag(index_sweep)
return out
def findOriginFromIndex(self, index, keepMasked = False):
"""return the index of the sweep and the spike in that sweep
corresponding to spike index"""
tempind = 0
func = self._spikeTimes if keepMasked else self.time_list
if not keepMasked:
for i in xrange(self.in_numSweeps()):
length = len(self.time_list(i))
if index >= (tempind + length):
#if it's not in this sweep
tempind += length #note: add only the # of valid spikes
else:
index_trace = i
index_time_list = index - tempind
# index_time_list is the index of the spike in the time_list
# with all its spike (even if partialTraces is Ignore)
return index_trace, index_time_list
raise ValueError('I haven\'t found snippet %s'%index)
else:
borders = self._borders()
sortedSecondIndex = [borders[i][1] for i in range(self.in_numSweeps())]
index_trace = np.searchsorted(sortedSecondIndex, index)
if index_trace == 0:
return index_trace, index
if index_trace >= len(sortedSecondIndex):
raise ValueError('I haven\'t found snippet %s'%index)
index_time_list = index - sortedSecondIndex[index_trace - 1]
def _sweepBorder(self, index, borderbefore = None):
"""index of first and last spikes of sweep in numSpikes"""
if not self._cache.has_key('borders'): self._cache['borders'] = {}
bord = self._cache['borders']
out = bord.get(index)
if out is not None:
return out
if borderbefore is None:
out = (self.findSpikeFromSweep(index, True),
self.findSpikeFromSweep(index+1, True))
else:
out = (borderbefore[1], self._spikeTimes(index).size+borderbefore[1])
bord[index] = out
return out
def _borders(self):
borderbefore = None
for i in range(self.numSweeps()):
borderbefore = self._sweepBorder(i, borderbefore)
return self.get_cache('borders')
def numSpikes(self, list_sweep = None, keepMasked = False, verbose =1):
"""Count the number of spikes in list_sweep.
if list_sweep is None, count on all the sweeps,
if countMaskedSpikes, count the spikes before applying filter"""
if list_sweep is None:
list_sweep = xrange(self.numSweeps())
elif not list_sweep:
return 0
if not hasattr(list_sweep, '__iter__'):
list_sweep = [int(list_sweep)]
if keepMasked:
iterator = imap(self._spikeTimes, list_sweep)
else:
iterator = imap(self.time_list, list_sweep, repeat(False))
if verbose:
print 'numSpikes in %s:'%self.name
pbar = pgb.ProgressBar(maxval=len(list_sweep),
term_width = 79).start()
n=0
nspikes=len(iterator.next())
for i in iterator:
n+=1
nspikes += len(i)
if verbose: pbar.update(n)
if verbose: pbar.finish()
return nspikes
def findSpikeFromSweep(self, index, keepMasked = False):
return self.numSpikes(range(index), keepMasked, verbose = 0)
def _extract_snippet(self, sp_times, trace, time):
if not sp_times.size:
return np.array([])
win0, win1 = self.get_param('snip_window')
beg = time.searchsorted(sp_times + win0)
end = time.searchsorted(sp_times + win1)
midl = time.searchsorted(sp_times)
length = int(np.ceil((win1-win0)/(time[1]-time[0])))
# ceil and have the smallest window that include totally the interval
out = np.zeros((len(beg), length), dtype = trace.dtype)
for i ,(s, b,e) in enumerate(zip(midl, beg, end)):
if e - b == length:
out[i] = trace[b:e]
else:
border0 = int(length/2)
if s - b > border0 or border0+e -s > length:
raise ValueError('ca bug')
out[i, border0 - (s - b): border0 + (e-s)] = trace[b:e]
out[i, :border0 - (s - b)]=trace[b]
out[i, border0 + (e-s):] = trace[max(0,e-1)]
#minus 1 cause e can be len(trace)
return out
def _saveSnip(self, index_sweep, snippet, sniptype):
snip_memory = self.get_param('snip_memory')
if snip_memory[0] != 'store':
return
if snip_memory[1] != 'all':
saved = self.get_cache('snippet_'+sniptype)
if saved is None: saved = {}
if saved.has_key(index_sweep):
return
saved_index = self.get_cache('snippet_index_'+sniptype)
if saved_index is None: saved_index = []
if isinstance(snip_memory[1], int):
while len(saved_index) >= snip_memory:
first = saved_index.pop(0)
saved.pop(first)
elif snip_memory[1] != 'all':
return
saved_index.append(index_sweep)
saved[index_sweep] = snippet
self.set_cache('snippet_'+sniptype, saved, force =1)
self.set_cache('snippet_index_'+sniptype, saved_index,
force =1)
else:
if not self._cache.has_key('snippet_'+sniptype):
out = np.zeros((self.numSpikes(keepMasked = True),
snippet.shape[1]),
dtype = snippet.dtype)
self._cache['snippet_'+sniptype] = out
if not self._cache.has_key('snippet_index_'+sniptype):
out = np.zeros(self.numSpikes(keepMasked=True), dtype = 'bool')
self._cache['snippet_index_'+sniptype] = out
b0, b1 = self._sweepBorder(index_sweep)
self._cache['snippet_'+sniptype][b0:b1,:] = snippet
self._cache['snippet_index_'+sniptype][b0:b1] = np.ones(b1-b0,
dtype = 'bool')
def _extract_one_sweep(self, index_sweep, sniptype):
time = self._get_time(index_sweep)
spike = self._spikeTimes(index_sweep)
if not spike.size:
return None
sweep = self._get_input_sweep(index_sweep)
sweep = sweep._data[1] if sweep._data.shape[0]>1 else sweep._data[0]
if sniptype == 'filtered':
sweep = self._ready_trace(sweep, time)
elif sniptype != 'raw':
raise ValueError('Unknown sniptype: %s'%sniptype)
snip = self._extract_snippet(spike, sweep, time)
return snip
def _extract_all_snippet(self, sniptype):
print 'extracting %s snippets in %s'%(sniptype, self.name)
pbar = pgb.ProgressBar(maxval=self.numSweeps(), term_width = 79).start()
for index_sweep in range(self.in_numSweeps()):
snip = self._extract_one_sweep(index_sweep, sniptype)
if snip is None:
continue
self._saveSnip(index_sweep, snip, sniptype)
pbar.update(index_sweep)
pbar.finish()
def _getSnip(self, listindex, sniptype):
if not isinstance(listindex, list):
listindex = [int(listindex)]
snip_memory = self.get_param('snip_memory')
if snip_memory is not None and snip_memory[0] == 'store':
if snip_memory[1] == 'last_sweep':
arg_sort = np.argsort(listindex)
saved_ind = self.get_cache('last_sweep_snip_ind_'+sniptype)
saved = self.get_cache('last_sweep_snip_'+sniptype)
if saved is None or saved_ind is None:
saved = []
saved_ind = []
out = []
for arg in arg_sort:
try:
ind = saved_ind.index(listindex[arg_sort])
out.append(saved[ind])
except ValueError:
index_sweep, indSpinSw = self.findOriginFromIndex(listindex[arg_sort], keepMasked = 1)
time = self._get_time(index_sweep)
spike = self._spikeTimes(index_sweep)
sweep = self._get_input_sweep(index_sweep)
sweep = sweep._data[1] if sweep._data.shape[0]>1 else sweep._data[0]
if sniptype == 'filtered':
sweep = self._ready_trace(sweep, time)
snip = self._extract_snippet(spike, sweep, time)
snipinds = np.arange(*self._borders()[index_sweep])
self.set_cache('last_sweep_snip_ind_'+sniptype, snipinds)
self.set_cache('last_sweep_snip_'+sniptype, snip)
out.append[snip[indSpinSw]]
return out
# out = {}
# sweep_saved = self.get_cache('snippet_'+sniptype)
# if not sweep_saved is None:
# for i in listindex:
# swind, spind = self.findOriginFromIndex(i)
# not_saved = []
# for i in list_index:
# sweep
# saved_ind =[i for i in listindex if i in sweep_saved]
elif snip_memory[1] == "all":
inds = self.get_cache('snippet_index_'+sniptype)
if inds is None or any([not inds[i] for i in listindex]):
self._extract_all_snippet(sniptype)
return np.array(self._cache['snippet_'+sniptype][listindex,:])
else:
raise NotImplementedError()
elif snip_memory is not None:
raise NotImplementedError()
out = None
for index_snip in listindex:
index_sweep, indSpinSw = self.findOriginFromIndex(index_snip, keepMasked = 1)
time = self._get_time(index_sweep)
spike = self._spikeTimes(index_sweep)
if not spike.size:
continue
sweep = self._get_input_sweep(index_sweep)
sweep = sweep._data[1] if sweep._data.shape[0]>1 else sweep._data[0]
if sniptype == 'filtered':
sweep = self._ready_trace(sweep, time)
elif sniptype != 'raw':
raise ValueError('Unknown sniptype: %s'%sniptype)
snip = self._extract_snippet(spike, sweep, time)
if out is None:
out = snip[indSpinSw]
else:
out = np.vstack((out, snip[indSpinSw]))
return snip
# not saved
# indices = [self.findOriginFromIndex(i) for i in listindex]
# time = self._get_time(index_sweep)
# time_list =self._time_list(index_sweep)
# spike = time_list._data
# sweep = self._get_input_sweep(index_sweep, dtype =self.get_param(
# 'dtype'))._data[1]
# if sniptype == 'filtered':
# sweep = self._ready_trace(sweep, time)
# elif sniptype != 'raw':
# raise ValueError('Unknown sniptype: %s'%sniptype)
# snip = self._extract_snippet(spike, sweep, time)
# self._saveSnip(index_sweep, snip, sniptype)
def _value_around_pic(self, list_index, props, sniptype):
snip = self._getSnip(list_index, sniptype)
if not isinstance(props, list):
props = [props]
out = np.zeros((len(props),snip.shape[0]), dtype = 'float')
for i, v in enumerate(props):
func = getattr(np, v)
if hasattr(func, '__call__'):
val = func(snip, axis = 1)
else:
print 'Prop is not callable, it might not be what you wanted'
out[i] = val
return out
def PCA(self, sniptype):
if not use_mdp:
print 'mdp is not installed'
return
if self._cache.has_key('PCA_'+sniptype):
return self.get_cache('PCA_'+sniptype)
all_props = self.get_param('props')
PCAnode = mdp.nodes.PCANode(input_dim=len(all_props), output_dim=len(all_props)-1)
arr = self._getFilterArray(sniptype)
PCAnode.train(arr)
out = PCAnode(arr)
self.set_cache('PCA_'+sniptype, out)
return out
def _getFilterArray(self, sniptype, list_sweep = None):
if not self._cache.has_key('properties'):
self._cache['properties'] = {}
if self._cache['properties'].has_key(sniptype):
return self.get_cache('properties')[sniptype]
props = copy(self.get_param('props'))
wasNone = False
if list_sweep is None:
wasNone = True
list_sweep = range(self.numSweeps())
elif not isinstance(list_sweep, list):
list_sweep = list(list_sweep)
out = np.zeros((self.numSpikes(list_sweep, keepMasked = True),
len(props)), dtype = 'float')
print 'getting filter array for %s in %s'%(sniptype, self.name)
inds = range(len(props))
try:
indSw = props.index('sw_ind')
props.pop(indSw)
inds.pop(indSw)
N = 0
for i in list_sweep:
n = len(self._spikeTimes(i))
out[N:N+n, indSw] = np.ones(n, dtype = 'float')*i
N+=n
except ValueError:
print 'sw_ind not in prop'
pass
if wasNone:
out[:,inds] = self._value_around_pic(range(out.shape[0]), props, sniptype).T
else:
pbar = pgb.ProgressBar(maxval=len(list_sweep), term_width = 79).start()
for i in list_sweep:
b0, b1 = self._sweepBorder(i)
if b0 != b1:
data = self._value_around_pic(range(b0,b1), props, sniptype)
out[b0:b1,inds] = data.T
pbar.update(i)
self._cache['properties'][sniptype] = out
return out[:]
def _mask(self):
if self._cache.has_key('mask'):
return self._cache['mask']
mask = np.ones(self.numSpikes(keepMasked = True), dtype = bool)
Filter = self.get_param('filter')
if Filter:
for sniptype, prop, comp, value in Filter:
val = np.array(self._getDataToPlot(keepMasked=True, prop=prop,
sniptype=sniptype))
val = filterValues(val, comp, value)
mask = np.logical_and(mask, val)
if self._cache.has_key('lasso'):
Lasso = self.get_cache('lasso')
if Lasso:
for ms in Lasso.values():
mask = np.logical_and(mask,ms)
self._cache['mask'] = mask
return mask
def numSweeps(self):
'''return the number of sweeps'''
return self.in_numSweeps()
def chanNames(self, index = 0):
'''return the name of the channel used for the detection'''
return [self.get_param('chan')]
def origin(self, index):
return self.in_origin(index)
def filteredSweep(self, index_sweep, chan = None):
'''return the trace on wich the detection is done'''
sweep = self._get_input_sweep(index_sweep)
time = self._get_time(index_sweep)
swdata = sweep._data[1] if sweep._data.shape[0]>1 else sweep._data[0]
data = np.array(self._ready_trace(swdata, time), dtype = 'float')
chinf = [getattr(sweep, cname) for cname in sweep.chanNames()]
out = Sweep.Sweep(sweep.name+'_filtered', np.vstack((time, data)), chinf,self.tag(index_sweep))
return out
def snippet_chanNames(self, index = 0):
return [self.get_param('chan')+i for i in ['_raw', '_filtered']]
def snippet_origin(self, index, keepMasked = False):
ind_sw, ind_sp = self.findOriginFromIndex(index, keepMasked = keepMasked)
return self.in_origin(ind_sw)+ ['Spike_'+str(ind_sp)]
def snippet_sweepInfo(self, index, keepMasked = False):
if not keepMasked:
index = self._findNotMaskedFromMaskedIndex(index)
sw_ind, sp_ind = self.findOriginFromIndex(index, keepMasked = True)
sw_inf = self.sweepInfo(sw_ind)
sw_inf.numChans = 2
chInf = [copy(sw_inf.channelInfo[0]) for i in (0,1)]
chInf[0].name = chInf[0].name + '_raw'
chInf[1].name = chInf[1].name + '_filtered'
sw_inf.channelInfo = chInf
dt = sw_inf.dt
win = self.get_param('snip_window')
sw_inf.numPoints = int((win[1] - win[0])/dt)
sw_inf.tend = sw_inf.numPoints
sw_inf.t0 = 0
sw_inf.dt = 1
return sw_inf
def snippet(self, index, chan = None, keepMasked = False):
"""return a snippet
Arguments:
- `index`:
- `chan`:
"""
if not keepMasked:
index = self._findNotMaskedFromMaskedIndex(index)
sw_ind, sp_ind = self.findOriginFromIndex(index, keepMasked = True)
snipRaw = self._getSnip(index, 'raw')[0]
snipFiltered = self._getSnip(index, 'filtered')[0]
data = np.zeros((3, snipRaw.size), dtype = snipRaw.dtype)
data[0] = np.arange(data.shape[1])
data[1] = snipRaw
data[2] = snipFiltered
snipinf = self.snippet_sweepInfo(index, keepMasked)
return Sweep.Sweep('Snippet_'+str(index)+'in_'+self.name, data,
snipinf.channelInfo, tag = self.tag(sw_ind))
def snip_tag(self, index, keepMasked = False):
'''Return the tags of sweep or time_list'''
if not keepMasked:
index = self._findNotMaskedFromMaskedIndex(index)
sw_ind, sp_ind = self.findOriginFromIndex(index)
return self.in_tag(sw_ind)
def tag(self, index):
'''Return the tags of sweep or time_list'''
return self.in_tag(index)
def sweepInfo(self, index):
sw_inf = self.in_sweepInfo(index)
cname = self.get_param('chan')
ind = [i.name for i in sw_inf.channelInfo].index(cname)
sw_inf.channelInfo = [sw_inf.channelInfo[ind]]
sw_inf.numChans = 1
return sw_inf
def _findNotMaskedFromMaskedIndex(self, maskedIndex):
mask = self._mask()
index = np.arange(mask.size)
return index[mask][maskedIndex]
def _get_input_sweep(self, sw_ind, *args, **kwargs):
last = self.get_cache('last')
if last is None or last[0] != sw_ind:
if not kwargs.has_key('chan') and not args:
kwargs['chan'] = self.get_param('chan')
sw = self.in_sweep(sw_ind, *args, **kwargs)
self.set_cache('last', (sw_ind, sw), force = 1)
return copy(sw)
return copy(last[1])
def _get_time(self, index_sweep):
lasttime = self.get_cache('lasttime')
if lasttime is None or lasttime[0] != index_sweep:
node, out = self.inputs['in_sweep']
time = self.in_sweep(index_sweep, self.get_param('chan'))._data[0]
if time.dtype == np.dtype('int16'):
# no time line, need to create time, assume that dt
# is constant on the sweep, does it matter?
swinf = self.sweepInfo(index_sweep)
time = np.arange(swinf.t0, swinf.tend, swinf.dt,dtype = 'float')
self.set_cache('lasttime', (index_sweep, time), force =1)
return copy(time)
return copy(lasttime[1])
def save(self, what = None, path = None, force = 0):
if what is None:
what = ['SpikeTimes', 'Border', 'Mask', 'Prop', 'Lasso']
for i in what:
getattr(self, 'save'+i)(force = force, name = path)
def saveSpikeTimes(self, name = None, force = 0, mode = 'bn', delimiter =
',', keepMasked = True):
'''Save spike times in file 'name' (can only save ALL the spike times at
once)
'name' is absolute or relative to parent.home
if 'force', replace existing file
'mode' can be 'bn', 'csv', 'txt' or 'vert_csv':
'bn': binary, saved in .npz
'csv' or 'txt': text file, value separeted by 'delimiter' (default
',') saved in lines
'vert_csv': text file, value and separeted by 'delimiter' (default
',') saved in columns '''
import os
path = name
if path is None:
path = self.parent.name+'_'+self.name+'_spikeTimes'
if path[0] != '/':
path = self.parent.home + path
data = self.all_times(keepMasked = True, groupbysweep = True)
if mode == 'bn':
path += '.npz'
if not force:
if os.path.isfile(path):
print 'File %s already exist, change name or force'%path
return
kwargs = {}
for i, value in enumerate(data):
kwargs['Sw_'+str(i)] = value
np.savez(path, **kwargs)
elif mode == 'vert_csv':
path += '_vertical.csv'
if not force:
if os.path.isfile(path):
print 'File %s already exist, change name or force'%path
return
nspike = self.numSpikes()
out = file(path, 'w')
totspike = 0
index_spike = 0
while totspike < nspike:
for index_sweep in range(self.numSweeps()):
timelist = self.time_list(index_sweep)
if len(timelist) > index_spike:
out.write(str(timelist._data[index_spike]))
totspike+=1
out.write(str(delimiter))
out.write('\n')
index_spike += 1
totspike += 1
out.close()
elif mode == 'csv' or mode =='txt':
path += '.'+mode
if not force:
if os.path.isfile(path):
print 'File %s already exist, change name or force'%path
return
out = file(path, 'w')
for line in data:
out.write(delimiter.join(np.array(line, dtype = 'str'))+'\n')
out.close()
else:
print 'unknown mode %s'%mode
def saveBorder(self, name = None, force =0):
import os
if name is None:
name = self.parent.name+'_'+self.name+'_borders'
if name[0] != '/':
path = self.parent.home + name
data = self._borders()
outdata = np.zeros((self.numSweeps(), 2), dtype = 'int')
for i, v in data.iteritems():
outdata[i] = v
path += '.npy'
if not force:
if os.path.isfile(path):
print 'File %s already exist, change name or force'%path
return
np.save(path, outdata)
def saveMask(self, name = None, force = 0):
if name is None:
name = self.parent.name+'_'+self.name+'_mask'
if name[0] != '/':
path = self.parent.home + name
path += '.npy'
if not force:
if os.path.isfile(path):
print 'File %s already exist, change name or force'%path
return
data = self._mask()
np.save(path, data)
def saveLasso(self, name = None, force = 0):
if name is None:
name = self.parent.name+'_'+self.name+'_lasso'
if name[0] != '/':
path = self.parent.home + name
path += '.npz'
if not force:
if os.path.isfile(path):
print 'File %s already exist, change name or force'%path
return
data = self._lasso()
if data:
np.save(path, data)
def saveProp(self, sniptype = ['raw', 'filtered'], name = None, force = 0):
if not isinstance(sniptype, list):
sniptype = [str(sniptype)]
if name is None:
name = self.parent.name+'_'+self.name+'_prop'+'_'
if name[0] != '/':
path = self.parent.home + name
for sntp in sniptype:
p = path+sntp+ '.npy'
if not force:
if os.path.isfile(p):
print 'File %s already exist, change name or force'%p
continue
data = self._getFilterArray(sntp)
np.save(p, data)
def set_param(self, *args, **kwargs):
if args:
if len(args) == 2:
kwargs[args[0]] = args[1]
else:
raise ValueError('set_param accept 0 or 2 positionnal arguments')
for val in ['filter', 'props', 'lasso']:
if kwargs.has_key('filter'):
self.dirty('all', selfDirty = False)
self._params['filter'] = kwargs.pop('filter')
if self._cache.has_key('mask'):
self._cache.pop('mask')
if kwargs.has_key('lasso'):
self._params['lasso'] = kwargs.pop('lasso')
if self._cache.has_key('mask'):
self._cache.pop('mask')
for wn in ['win'+str(i) for i in [0,1,2]]:
if kwargs.has_key(wn):
if kwargs[wn] is None:
kwargs[wn]=1e-5
if not kwargs:
return
# topop = ['snippet_indexraw', 'snippet_indexfiltered', 'snippetraw',
# 'snippetfiltered']
# [self._cache.pop(i) for i in topop if self._cache.has_key(i)]
return super(DetectSpike, self).set_param(**kwargs)
def load(self, force = 0):
self.loadSpikes(force= force)
self.loadMask(force= force)
self.loadBorders(force= force)
try:
self.loadProp(force= force)
self.loadLasso(force= force)
except Exception:
print 'Could load only spike times and mask'
def loadSpikes(self, path = None, force = 0):
'''load spikes from a .npz file
if memory is write, just load the path of the file
if memory is store, store spike times from the file in cache'''
if path is None:
path = self.parent.home + self.parent.name + '_' + self.name + \
'_spikeTimes.npz'
self.set_cache('sp_times', {}, force = force)
cached = self._cache['sp_times']
if self.get_param('memory') == 'store':
File = np.load(path)
for name in File.files:
cached[int(name[name.rfind('_')+1:])]= File[name]
elif self.get_param('memory') == 'write':
self.set_cache('path', path)
def loadBorders(self, path = None, force = 0):
'''load spikes from a .npz file or a .npy
if memory is write, just load the path of the file
if memory is store, store spike times from the file in cache'''
if path is None:
path = self.parent.home + self.parent.name + '_' + self.name + \
'_borders'
if os.path.isfile(path+'.npy'):
path+='.npy'
elif os.path.isfile(path+'.npz'):
path+='.npz'
else:
raise IOError('no file with npy or npz extension on this path:\n%s'%
path)
self.set_cache('borders', {}, force = force)
cached = self._cache['borders']
if self.get_param('memory') == 'store':
File = np.load(path)
if path.split('.')[-1] == 'npz':
for name in File.files:
cached[int(name[name.rfind('_')+1:])]= File[name]
else:
for i, line in enumerate(File):
cached[i] = line
else:
raise NotImplementedError()
def loadProp(self, sniptype = ['raw', 'filtered'], path = None, force=0):
if path is None:
path = self.parent.home + self.parent.name + '_' + self.name + \
'_prop_'
if not isinstance(sniptype, list):
sniptype = [str(sniptype)]
for sntp in sniptype:
p = path + sntp+'.npy'
File = np.load(p)
if not self._cache.has_key('properties'):
self._cache['properties'] = {}
self._cache['properties'][sntp] = File
def loadMask(self, path = None, force =0):
if path is None:
path = self.parent.home + self.parent.name + '_' + self.name + \
'_mask'+'.npy'
File = np.load(path)
self._cache['mask'] = File
def loadLasso(self, path = None, force =0):
import os
if path is None:
path = self.parent.home + self.parent.name + '_' + self.name + \
'_lasso'+'.npz'
if os.path.isfile(path):
File = np.load(path)
self._cache['lasso'] = dict([(i, File[i]) for i in File.files])
def all_val(self, sniptype, list_sweep = None, groupbysweep = False,
keepMasked = False):
if list_sweep is None:
list_sweep = range(self.numSweeps())
def _getDataToPlot(self, prop, sniptype, keepMasked):
if prop.split('_')[0] == 'PCA':
if not use_mdp:
print 'mdp is not installed'
return
indPCA = prop.split('_')[1]
data= self.PCA(sniptype=sniptype)[:,indPCA]
else:
props = self.get_param('props')
indprop = props.index(prop)
data = self._getFilterArray(sniptype)[:,indprop]
if not keepMasked:
mask = self._mask()
data = data[mask]
return data
def prop_hist(self, fig, prop = 'props', sniptype = 'raw',
keepMasked = False, **kwargs):
fig.clear()
ax = fig.add_subplot(111)
if prop == 'props':
data = self._getFilterArray(sniptype)
if not keepMasked:
data = data[self._mask(),:]
labels = self.get_param('props')
elif prop == 'PCA':
if not use_mdp:
print 'mdp is not installed'
return
labels = ['PCA_'+str(i) for i in range(len(self.get_param('props')))]
data = self.PCA(sniptype)
if not keepMasked:
data = data[self._mask(),:]
else:
labels = prop
data = None
for i,p in enumerate(prop):
out = self._getDataToPlot(p, sniptype, keepMasked)
if data is None:
data = np.zeros((out.size, len(prop)))
data[:,i] = out
out = ax.hist(data, label = labels, **kwargs)
fig.canvas.draw()
return out
def select_snip(self, sniptype, prop, comp, value, keepMasked = True):
allsnip = self._getSnip(range(self.numSpikes(keepMasked = True)),
sniptype)
p = self._getDataToPlot(prop, sniptype, keepMasked)
toKeep = p >= value
if not comp:
toKeep = np.invert(toKeep)
return allsnip[toKeep,:]
def plot_selectedsnip(self, fig, sniptype, prop=None, comp=None, value=
None, keepMasked = True, maxnum = 5000, **kwargs):
if any([i is None for i in [prop, comp, value]]):
snip = self._getSnip(range(self.numSpikes(keepMasked = True)),
sniptype)
if not keepMasked:
snip = snip[self._mask(),:]
else:
snip = self.select_snip(sniptype,prop,comp, value, keepMasked)
totnum = snip.shape[0]
fig.clear()
ax = fig.add_subplot(111)
if snip.shape[0]> maxnum:
snip = snip[:maxnum,:]
ax.plot(snip.T, **kwargs)
mean = snip.mean(axis = 0)
ax.plot(mean, 'r')
ax.set_title('%s snippets of %s\n(%s/%s plotted)'%(sniptype, self.name,
snip.shape[0],totnum
))
fig.canvas.draw()
return snip, mean
def prop_plot(self, figure, propx= 'min', propy = 'max', sniptype = 'raw',
clear = True, keepMasked = False, **kwargs):
print 'ploting properties in %s'%self.name
self._fig = figure
self._sniptype = sniptype
self._keepMasked = keepMasked
if clear: self._fig.clear()
ax = self._fig.add_subplot(111)
ax.set_xlabel(propx)
ax.set_ylabel(propy)
X = self._getDataToPlot(propx, sniptype, keepMasked)
Y = self._getDataToPlot(propy, sniptype, keepMasked)
if not kwargs.has_key('marker'):
kwargs['marker']='.'
ax.plot(X, Y, 'k',ls = '', picker = 5, label = '_nolegend_',
**kwargs)
ax.set_title('Properties of spikes from %s \n%s spikes plotted'%(
self.name, X.size))
self._fig.canvas.mpl_connect('pick_event', self._picked)
self._last_event = None
self._fig.canvas.draw()
self._temp = None
return X, Y
def _picked(self, event):
if self._last_event is not None:
if self._last_event.mouseevent is event.mouseevent: return
line = self._last_event.artist
line.set_mfc('k')
line.set_zorder(1)
self._last_event = event
line = self._last_event.artist
line.set_zorder(0)
i = event.ind[0]
x = np.array([line.get_xdata()[i]])
y = np.array([line.get_ydata()[i]])
if self._temp is None:
self._temp, = self._fig.axes[0].plot(x, y, 'yo', ms = 10,
alpha = .5)
self._temp.set_zorder(2)
self._temp.set_label('Spike %s'%i)
else:
self._temp.set_xdata(x)
self._temp.set_ydata(y)
self._temp.set_label('Spike %s'%i)
self._fig.axes[0].legend((line, self._temp),(line.get_label(),
self._temp.get_label()), loc = 2)
self._fig.canvas.draw()
ax = self._fig.add_axes([0.6,0.6,0.25,0.25], facecolor = 'none')
ax.clear()
index_sweep, index_spike = self.findOriginFromIndex(i, keepMasked =
self._keepMasked)
ax.set_title('Spike %s (%s in sweep %s)'%(i, index_spike, index_sweep))
ax.set_xlabel('index')
ax.set_ylabel(self._sniptype + ' snippet')
if not self._keepMasked:
i = self._findNotMaskedFromMaskedIndex(i)
snip = self._getSnip(i, self._sniptype)
line = ax.plot(snip.T, 'k')
self._fig.canvas.draw()
def multi_prop_plot(self, fig, prop, sniptype = 'raw', keepMasked = False,
**kwargs):
"""Plot all prop[i] vs prop[j] combination in one figure
Use prop = PCA to plot all PCA components
prop = props to plot all other properties"""
if prop == 'PCA':
prop = ['PCA_'+str(i) for i in range(len(self.get_param("props"))-2)]
elif prop == 'props':
prop = self.get_param('props')
size = len(prop)
fig.clear()
data = [self._getDataToPlot(keepMasked=keepMasked,sniptype=sniptype, prop = i) for i in prop]
axes =[]
for line in range(1,size):
datay = data[line]
axes.append([fig.add_subplot(size-1,size-1,(line-1)*(size-1)+1+i) for i in range(line)])
[ax.plot(data[j],datay, **kwargs) for j,ax in enumerate(axes[-1])]
[ax[0].set_ylabel(prop[i+1]) for i, ax in enumerate(axes)]
[ax.set_xlabel(prop[i]) for i, ax in enumerate(axes[-1])]
fig.canvas.draw()
return fig
def _lasso(self):
if not self._cache.has_key('lasso'):
self._cache['lasso'] = {}
return self._cache['lasso']
def lasso_prop_plot(self, figure, propx= 'min', propy = 'max', sniptype = 'raw',
clear = True, keepMasked = False, **kwargs):
print 'ploting properties in %s'%self.name
if clear: figure.clear()
ax = figure.add_subplot(111)
ax.set_xlabel(propx)
ax.set_ylabel(propy)
X = self._getDataToPlot(propx, sniptype, keepMasked)
Y = self._getDataToPlot(propy, sniptype, keepMasked)
self._lassoMask = self._mask()
self._lassoManager = LassoManager(ax, np.vstack((X,Y)).T, sizes = (5,), **kwargs)
ax.set_xlim(X.min(),X.max())
ax.set_ylim(Y.min(),Y.max())
figure.canvas.draw()
return X, Y
def keep_in_lasso(self, name = None):
isinside = self._lassoManager.isinside
mask = np.array(self._lassoMask)
mask[mask] = np.logical_and(mask[mask], isinside)
Lasso = self._lasso()
if name is None:
n = 0
while 'lasso_%s'%n in Lasso.keys():
n+=1
name = 'lasso_%s'%n
self._cache['lasso'][name] = mask
if self._cache.has_key('mask'):
self.set_cache('mask', np.logical_and(self._mask(), mask), force = 1)
return mask
def exlude_lasso(self, name =None):
isinside = self._lassoManager.isinside
mask = np.array(self._lassoMask)
mask[mask] = np.logical_and(mask[mask], np.logical_not(isinside))
Lasso = self._lasso()
if name is None:
n = 0
while 'lasso_%s'%n in Lasso.keys():
n+=1
name = 'lasso_%s'%n
self._cache['lasso'][name] = mask
if self._cache.has_key('mask'):
self.set_cache('mask', np.logical_and(self._mask(), mask), force = 1)
return mask
class CreateUniv:
def __init__(self, node):
self.node = node
def __call__(self):
a = self.node.in_chanNames()
a.append('None')
return a
|
nilq/baby-python
|
python
|
import json
import codecs
import tldextract
urls = dict()
duplicates = list()
with codecs.open('/home/rkapoor/Documents/ISI/data/Network/intersecting-urls.jsonl', 'r', 'utf-8') as f:
for line in f:
doc = json.loads(line)
url = doc['url']
if url in urls:
urls[url] += 1
else:
urls[url] = 1
# if url == 'http://flint.backpage.com/FemaleEscorts/unforgettable-new-staff-new-attitude/17626747':
# duplicates.append(doc['name'])
# for key, value in sorted(urls.items(), key=lambda x:x[1]):
# if value > 10:
# print("%s: %s" % (key, value))
# print("SIZE:",len(urls))
# # for key, value in urls.items():
# # if(value > 1):
# # print(key,":",value)
# print(duplicates)
DATA_FILE = "/home/rkapoor/Documents/ISI/data/DIG-Nov-Eval/gt-v02-all.jl"
def safe_copy_simple(json_from, json_to, field):
if field in json_from and json_from[field] is not None:
json_to[field] = json_from[field]
def safe_copy(json_from, json_to, field):
try:
if field in json_from and json_from[field] is not None:
distinct_values = set()
for values in json_from[field]:
results = values['result']
if type(results) is list:
for result in results:
distinct_values.add(result['value'])
elif 'value' in results:
distinct_values.add(results['value'])
json_to[field] = list(distinct_values)
except Exception:
print(json_from[field])
def extract_data(json_document, outfile):
extracted_document = {}
extracted_document['high_precision'] = {}
extracted_document['high_recall'] = {}
safe_copy_simple(json_document, extracted_document, 'url')
if 'high_precision' in json_document:
safe_copy(json_document['high_precision'], extracted_document['high_precision'], 'city')
safe_copy(json_document['high_precision'], extracted_document['high_precision'], 'name')
safe_copy(json_document['high_precision'], extracted_document['high_precision'], 'phone')
if 'high_recall' in json_document:
safe_copy(json_document['high_recall'], extracted_document['high_recall'], 'city')
safe_copy(json_document['high_recall'], extracted_document['high_recall'], 'name')
safe_copy(json_document['high_recall'], extracted_document['high_recall'], 'phone')
outfile.write(json.dumps(extracted_document))
outfile.write('\n')
output_file_base = "intersecting.jl"
count = 0
domain = 'backpage.com'
outfile = codecs.open(domain+'/'+output_file_base, 'w', 'utf-8')
with codecs.open(DATA_FILE, 'r', 'utf-8') as infile:
for line in infile:
count += 1
json_document = json.loads(line)
if json_document['url'] in urls:
extract_data(json_document, outfile)
if(count % 100 == 0):
print(count)
outfile.close()
|
nilq/baby-python
|
python
|
import subprocess
from uuid import uuid1
import yaml
from jinja2 import Environment, PackageLoader
from sanetrain.workflow_builder import generate_training
def test_generate_training():
env = Environment(loader=PackageLoader('sanetrain', 'templates'))
template = env.get_template('test_template.py')
with open('tests/test_model.yaml') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
train_script = generate_training(template, config)
fname = 'tests/%s.py' % uuid1().hex
with open(fname, 'w+') as fout:
fout.write(train_script)
subprocess.run(["python", fname])
|
nilq/baby-python
|
python
|
#!/usr/bin/python
import time
fact_arr = [1, 1, 2, 6, 24, 120, 720, 5040, 40320, 362880]
class memoize:
def __init__(self, function):
self.function = function
self.memoized = {}
def __call__(self, *args):
try:
return self.memoized[args[0]]
except KeyError:
self.memoized[args[0]] = self.function(*args)
return self.memoized[args[0]]
def fact(n):
if n in (0, 1):
return 1
return reduce(lambda x,y: x*y, xrange(2, n+1))
def sumofact(n):
total = 0
while n > 0:
total += fact_arr[n%10]
n /= 10
return total
#total = 0
#for d in str(n):
# total += fact(int(d))
#return total
#return reduce(lambda x,y: fact(int(x))+fact(int(y)), str(n))
@memoize
def lochelper(n, s):
if n in s:
return 0
s.add(n)
return 1 + lochelper(sumofact(n), s)
def lengthochain(n):
return lochelper(n, set([]))
if __name__ == '__main__':
#t = time.clock()
count = 0
for i in xrange(1000000):
if lengthochain(i) == 60:
count += 1
print count
#print time.clock()-t
|
nilq/baby-python
|
python
|
#! /usr/bin/env python
def condense(w):
return w[0] + str(len(w)-2) + w[-1:]
def expand(w):
length = int(w[1:-1]) + 2
for word in get_words(length):
if word.startswith(w[0]) and word.endswith(w[-1:]):
print word
def get_words(length, filename = '/usr/share/dict/words'):
return (word.strip() for word in open(filename) if len(word) == length)
if __name__ == "__main__":
print "Words With Numbers In Them Thing"
while(True):
w = raw_input("Word: ")
print "Condensed: "
print ' '.join(condense(p) for p in w.split())
try:
print "Expanded: "
expand(w)
except:
print "Could not expand " + w
|
nilq/baby-python
|
python
|
#
#
# Use: genKey(5)
# => "xmckl"
#
#
import math, random
def genKey(n):
alphabet = list("abcdefghijklmnopqrstuvwxyz")
out = ""
for i in range(n):
out += alphabet[math.floor(random.randint(0, 25))]
return out
|
nilq/baby-python
|
python
|
linha1 = input().split(" ")
linha2 = input().split(" ")
cod1, qtde1, valor1 = linha1
cod2, qtde2, valor2 = linha2
total = (int(qtde1) * float(valor1)) + (int(qtde2) * float(valor2))
print("VALOR A PAGAR: R$ %0.2f" %total)
|
nilq/baby-python
|
python
|
# https://github.com/dannysteenman/aws-toolbox
#
# License: MIT
#
# This script will set a CloudWatch Logs Retention Policy to x number of days for all log groups in the region that you exported in your cli.
import argparse
import boto3
cloudwatch = boto3.client("logs")
def get_cloudwatch_log_groups():
kwargs = {"limit": 50}
cloudwatch_log_groups = []
while True: # Paginate
response = cloudwatch.describe_log_groups(**kwargs)
cloudwatch_log_groups += [log_group for log_group in response["logGroups"]]
if "NextToken" in response:
kwargs["NextToken"] = response["NextToken"]
else:
break
return cloudwatch_log_groups
def cloudwatch_set_retention(args):
retention = vars(args)["retention"]
cloudwatch_log_groups = get_cloudwatch_log_groups()
for group in cloudwatch_log_groups:
print(group)
if "retentionInDays" not in group or group["retentionInDays"] != retention:
print(f"Retention needs to be updated for: {group['logGroupName']}")
cloudwatch.put_retention_policy(
logGroupName=group["logGroupName"], retentionInDays=retention
)
else:
print(
f"CloudWatch Loggroup: {group['logGroupName']} already has the specified retention of {group['retentionInDays']} days."
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Set a retention in days for all your CloudWatch Logs in a single region."
)
parser.add_argument(
"retention",
metavar="RETENTION",
type=int,
choices=[
1,
3,
5,
7,
14,
30,
60,
90,
120,
150,
180,
365,
400,
545,
731,
1827,
3653,
],
help="Enter the retention in days for the CloudWatch Logs.",
)
args = parser.parse_args()
cloudwatch_set_retention(args)
|
nilq/baby-python
|
python
|
from backend.util.crypto_hash import crypto_hash
HEX_TO_BINARY_CONVERSION_TABLE ={
'0': '0000',
'1': '0001',
'2': '0010',
'3': '0011',
'4': '0100',
'5': '0101',
'6': '0110',
'7': '0111',
'8': '1000',
'9': '1001',
'a': '1010',
'b': '1011',
'c': '1100',
'd': '1101',
'e': '1110',
'f': '1111'
}
def hex_to_binary(hex_string):
binary_string = ''
for character in hex_string:
binary_string += HEX_TO_BINARY_CONVERSION_TABLE[character]
return binary_string
def main():
number = 451
hex_number = hex(number)[2:]
print(f'hex_number: {hex_number}')
binary_number = hex_to_binary(hex_number)
print(f'binary_number: {binary_number}')
original_number = int(binary_number,2)
print(f'original_number: {original_number}')
hex_to_binary_crypto_hash = hex_to_binary(crypto_hash('test-data'))
print(f'hex_to_binary_crypto_hash: {hex_to_binary_crypto_hash}')
if __name__ =='__main__':
main()
|
nilq/baby-python
|
python
|
import numpy as np
from numpy.linalg import inv, cholesky
from scipy.stats import norm, rankdata
from synthpop.method import NormMethod, smooth
class NormRankMethod(NormMethod):
# Adapted from norm by carrying out regression on Z scores from ranks
# predicting new Z scores and then transforming back
def fit(self, X_df, y_df):
X_df, y_df = self.prepare_dfs(X_df=X_df, y_df=y_df, normalise_num_cols=True, one_hot_cat_cols=True)
y_real_min, y_real_max = np.min(y_df), np.max(y_df)
self.n_rows, n_cols = X_df.shape
X = X_df.to_numpy()
y = y_df.to_numpy()
z = norm.ppf(rankdata(y).astype(int) / (self.n_rows + 1))
self.norm.fit(X, z)
residuals = z - self.norm.predict(X)
if self.proper:
# looks like proper is not working quite yet as it produces negative values for a strictly possitive column
# Draws values of beta and sigma for Bayesian linear regression synthesis of y given x according to Rubin p.167
# https://link.springer.com/article/10.1007/BF02924688
self.sigma = np.sqrt(np.sum(residuals**2) / np.random.chisquare(self.n_rows - n_cols))
# NOTE: I don't like the use of inv()
V = inv(np.matmul(X.T, X))
self.norm.coef_ += np.matmul(cholesky((V + V.T) / 2), np.random.normal(scale=self.sigma, size=n_cols))
else:
self.sigma = np.sqrt(np.sum(residuals**2) / (self.n_rows - n_cols - 1))
if self.smoothing:
y = smooth(self.dtype, y, y_real_min, y_real_max)
self.y_sorted = np.sort(y)
def predict(self, X_test_df):
X_test_df, _ = self.prepare_dfs(X_df=X_test_df, normalise_num_cols=True, one_hot_cat_cols=True, fit=False)
n_test_rows = len(X_test_df)
X_test = X_test_df.to_numpy()
z_pred = self.norm.predict(X_test) + np.random.normal(scale=self.sigma, size=n_test_rows)
y_pred_indices = (norm.pdf(z_pred) * (self.n_rows + 1)).astype(int)
y_pred_indices = np.clip(y_pred_indices, 1, self.n_rows)
y_pred = self.y_sorted[y_pred_indices]
return y_pred
|
nilq/baby-python
|
python
|
from caty.core.spectypes import UNDEFINED
from caty.core.facility import Facility, AccessManager
class MongoHandlerBase(Facility):
am = AccessManager()
|
nilq/baby-python
|
python
|
nome=input('digite seu nome completo =')
nomeup=nome.upper()
nomelo=nome.lower()
nomese=nome.strip()
dividido=nome.split()
print('em maiusculas = {}'.format(nomeup.strip()))
print('em minusculas = {}'.format(nomelo.strip()))
print('o seu nome tem {} letras'.format(len(nomese)-nomese.count(' ')))
print('o seu primeiro nome tem {}'.format(len(dividido[0])))
|
nilq/baby-python
|
python
|
from flask import render_template, request
from sqlalchemy import desc
from app.proto import bp
from app.models import Share
@bp.route('/', methods=['GET', 'POST'])
@bp.route('/index', methods=['GET', 'POST'])
def index():
user_id = request.args.get('user_id')
shares = Share.query.filter_by(user_id=user_id).order_by(desc(Share.timestamp)).all()
return render_template('index.html', user_id=user_id, shares=shares)
@bp.route('/register', methods=['GET', 'POST'])
def register():
return render_template('register.html')
@bp.route('/login', methods=['GET', 'POST'])
def login():
return render_template('login.html')
|
nilq/baby-python
|
python
|
import ProblemFileHandler as handler
import OJTemplate
# 生成一个题目文件
# 第一种方法:problem_text_file + test_cases_file
text_file1 = '../resources/OJ/demo_problem1_text.txt'
test_cases_file1 = '../resources/OJ/demo_problem1_test_cases.txt'
output_file1 = '../resources/OJ/Problems/Problem1.plm'
handler.generate_problem(problem_text_file=text_file1,
test_cases_file=test_cases_file1,
output_file=output_file1,
overwrite=True)
# 第二种方法:problem_text_file + standard_answer_func + test_inputs
# 注意此时test_cases_file必须为None(默认)
text_file2 = '../resources/OJ/demo_problem2_text.txt'
answer_func = OJTemplate.standard_answer
inputs = OJTemplate.test_inputs
output_file2 = '../resources/OJ/Problems/Problem2.plm'
handler.generate_problem(problem_text_file=text_file2,
standard_answer_func=answer_func,
test_inputs=inputs,
output_file=output_file2,
overwrite=True)
# 读取Problem文件(.plm),返回包含'text'和'test_cases'两个key的字典
problem_dict1 = handler.load_problem_file(output_file1)
problem_dict2 = handler.load_problem_file(output_file2)
print(problem_dict1)
print(problem_dict2)
|
nilq/baby-python
|
python
|
import random
import os
import time
import server
import discord
import ctypes
import server
from discord.ext import commands
from cogs.musiccog import Music
from cogs.funcog import Fun
find_opus = ctypes.util.find_library('opus')
discord.opus.load_opus(find_opus)
TOKEN = os.getenv("DISCORD_TOKEN")
# Silence useless bug reports messages
bot = commands.Bot(command_prefix='!')
bot.add_cog(Music(bot))
bot.add_cog(Fun(bot))
@bot.event
async def on_ready():
print('Logged in as:\n{0.user.name}\n{0.user.id}'.format(bot))
server.server()
bot.run(TOKEN)
|
nilq/baby-python
|
python
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch.nn as nn
from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer
from maskrcnn_benchmark.layers.non_local import init_nl_module
from .generalized_rcnn import GeneralizedRCNN
import torch
from maskrcnn_benchmark.layers import CoordConv2d
from torch.nn.parameter import Parameter
_DETECTION_META_ARCHITECTURES = {"GeneralizedRCNN": GeneralizedRCNN}
def build_detection_model(cfg):
# Define and load the original model
meta_arch = _DETECTION_META_ARCHITECTURES[cfg.MODEL.META_ARCHITECTURE]
model = meta_arch(cfg)
dummy_checkpointer = DetectronCheckpointer(cfg, model)
dummy_checkpointer.load(cfg.MODEL.WEIGHT)
if cfg.MODEL.BACKBONE.COORDS:
module_dict = {
"input": {"parent": model.backbone.body.stem, "name": "conv1"},
"rpn_input": {"parent": model.rpn.head, "name": "conv"}}
# "rpn_input": {"parent": model.rpn.head, "name": "conv"}}
# }
for identifier in cfg.MODEL.BACKBONE.COORDS:
if identifier not in module_dict.keys():
continue
parent_module = module_dict[identifier]["parent"]
name = module_dict[identifier]["name"]
old_conv = getattr(parent_module, name)
out_ch, in_ch, h, w = old_conv.weight.shape
new_weight = torch.cat([old_conv.weight,
torch.zeros([out_ch, 2, h, w], dtype=torch.float32)], dim=1)
kwargs = {"with_r": False}
for key in ["in_channels", "out_channels", "kernel_size", "stride",
"padding", "dilation", "groups"]:
kwargs[key] = getattr(old_conv, key)
if old_conv.bias is None:
kwargs["bias"] = False
else:
kwargs["bias"] = True
# https://discuss.pytorch.org/t/how-can-i-modify-certain-layers-weight-and-bias/11638/3
new_conv = CoordConv2d(**kwargs)
new_conv.conv.state_dict()["weight"].copy_(new_weight)
if old_conv.bias is not None:
new_conv.conv.state_dict()["bias"].copy_(old_conv.bias.data)
delattr(parent_module, name)
setattr(parent_module, name, new_conv)
print("Replace", old_conv, "to", new_conv)
# insert non-local block just before the last block of res4 (layer3)
# if cfg.MODEL.BACKBONE.NON_LOCAL != "":
# nl_block_type, _ = cfg.MODEL.BACKBONE.NON_LOCAL.split("_")
# layer3_list = list(model.backbone.body.layer3.children())
# in_ch = list(layer3_list[-1].children())[0].in_channels
# layer3_list.insert(
# len(layer3_list) - 1,
# init_nl_module(nl_block_type, in_ch, int(in_ch / 2)))
# model.backbone.body.layer3 = nn.Sequential(*layer3_list)
return model
|
nilq/baby-python
|
python
|
# Copyright 2017 ForgeFlow S.L.
# Copyright 2018 Carlos Dauden - Tecnativa <carlos.dauden@tecnativa.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
class AccountPaymentMode(models.Model):
_inherit = "account.payment.mode"
show_bank_account = fields.Selection(
selection=[
("full", "Full"),
("first", "First n chars"),
("last", "Last n chars"),
("no", "No"),
],
string="Show bank account",
default="full",
help="Show in invoices partial or full bank account number",
)
show_bank_account_from_journal = fields.Boolean(string="Bank account from journals")
show_bank_account_chars = fields.Integer(
string="# of digits for customer bank account"
)
@api.constrains("company_id")
def account_invoice_company_constrains(self):
for mode in self:
if (
self.env["account.move"]
.sudo()
.search(
[
("payment_mode_id", "=", mode.id),
("company_id", "!=", mode.company_id.id),
],
limit=1,
)
):
raise ValidationError(
_(
"You cannot change the Company. There exists "
"at least one Journal Entry with this Payment Mode, "
"already assigned to another Company."
)
)
@api.constrains("company_id")
def account_move_line_company_constrains(self):
for mode in self:
if (
self.env["account.move.line"]
.sudo()
.search(
[
("payment_mode_id", "=", mode.id),
("company_id", "!=", mode.company_id.id),
],
limit=1,
)
):
raise ValidationError(
_(
"You cannot change the Company. There exists "
"at least one Journal Item with this Payment Mode, "
"already assigned to another Company."
)
)
|
nilq/baby-python
|
python
|
''' FUNCTIONS
Functions are pieces(block) of code that does something.
'''
|
nilq/baby-python
|
python
|
from classifiers.base_stance_classifier import BaseStanceClassifier
from classifiers.random_stance_classifier import RandomStanceClassifier
from classifiers.greedy_stance_classifier import MSTStanceClassifier
from classifiers.maxcut_stance_classifier import MaxcutStanceClassifier
|
nilq/baby-python
|
python
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
#
import threading
import time
from opentelemetry.context import attach, detach, set_value
from opentelemetry.sdk.metrics import Meter
from opentelemetry.sdk.metrics.export import MetricsExportResult
from azure_monitor.sdk.auto_collection.live_metrics import utils
from azure_monitor.sdk.auto_collection.live_metrics.exporter import (
LiveMetricsExporter,
)
from azure_monitor.sdk.auto_collection.live_metrics.sender import (
LiveMetricsSender,
)
from azure_monitor.sdk.auto_collection.metrics_span_processor import (
AzureMetricsSpanProcessor,
)
# Interval for failures threshold reached in seconds
FALLBACK_INTERVAL = 60.0
# Ping interval for succesful requests in seconds
PING_INTERVAL = 5.0
# Post interval for succesful requests in seconds
POST_INTERVAL = 1.0
# Main process interval (Manager) in seconds
MAIN_INTERVAL = 2.0
class LiveMetricsManager(threading.Thread):
"""Live Metrics Manager
It will start Live Metrics process when instantiated,
responsible for switching between ping and post actions.
"""
daemon = True
def __init__(
self,
meter: Meter,
instrumentation_key: str,
span_processor: AzureMetricsSpanProcessor,
):
super().__init__()
self.thread_event = threading.Event()
self.interval = MAIN_INTERVAL
self._instrumentation_key = instrumentation_key
self._is_user_subscribed = False
self._meter = meter
self._span_processor = span_processor
self._exporter = LiveMetricsExporter(
self._instrumentation_key, self._span_processor
)
self._post = None
self._ping = LiveMetricsPing(self._instrumentation_key)
self.start()
def run(self):
self.check_if_user_is_subscribed()
while not self.thread_event.wait(self.interval):
self.check_if_user_is_subscribed()
def check_if_user_is_subscribed(self):
if self._ping:
if self._ping.is_user_subscribed:
# Switch to Post
self._ping.shutdown()
self._ping = None
self._span_processor.is_collecting_documents = True
self._post = LiveMetricsPost(
self._meter, self._exporter, self._instrumentation_key
)
if self._post:
if not self._post.is_user_subscribed:
# Switch to Ping
self._span_processor.is_collecting_documents = False
self._post.shutdown()
self._post = None
self._ping = LiveMetricsPing(self._instrumentation_key)
def shutdown(self):
if self._ping:
self._ping.shutdown()
if self._post:
self._post.shutdown()
self.thread_event.set()
class LiveMetricsPing(threading.Thread):
"""Ping to Live Metrics service
Ping to determine if user is subscribed and live metrics need to be send.
"""
daemon = True
def __init__(self, instrumentation_key):
super().__init__()
self.instrumentation_key = instrumentation_key
self.thread_event = threading.Event()
self.interval = PING_INTERVAL
self.is_user_subscribed = False
self.last_send_succeeded = False
self.last_request_success_time = 0
self.sender = LiveMetricsSender(self.instrumentation_key)
self.start()
def run(self):
self.ping()
while not self.thread_event.wait(self.interval):
self.ping()
def ping(self):
envelope = utils.create_metric_envelope(self.instrumentation_key)
token = attach(set_value("suppress_instrumentation", True))
response = self.sender.ping(envelope)
detach(token)
if response.ok:
if not self.last_send_succeeded:
self.interval = PING_INTERVAL
self.last_send_succeeded = True
self.last_request_success_time = time.time()
if (
response.headers.get(utils.LIVE_METRICS_SUBSCRIBED_HEADER)
== "true"
):
self.is_user_subscribed = True
else:
self.last_send_succeeded = False
if time.time() >= self.last_request_success_time + 60:
self.interval = FALLBACK_INTERVAL
def shutdown(self):
self.thread_event.set()
class LiveMetricsPost(threading.Thread):
"""Post to Live Metrics service
Post to send live metrics data when user is subscribed.
"""
daemon = True
def __init__(self, meter, exporter, instrumentation_key):
super().__init__()
self.instrumentation_key = instrumentation_key
self.meter = meter
self.thread_event = threading.Event()
self.interval = POST_INTERVAL
self.is_user_subscribed = True
self.last_send_succeeded = False
self.last_request_success_time = time.time()
self.exporter = exporter
self.start()
def run(self):
self.post()
while not self.thread_event.wait(self.interval):
self.post()
def post(self):
self.meter.collect()
token = attach(set_value("suppress_instrumentation", True))
result = self.exporter.export(self.meter.batcher.checkpoint_set())
detach(token)
self.meter.batcher.finished_collection()
if result == MetricsExportResult.SUCCESS:
self.last_request_success_time = time.time()
if not self.last_send_succeeded:
self.interval = POST_INTERVAL
self.last_send_succeeded = True
if not self.exporter.subscribed:
self.is_user_subscribed = False
else:
self.last_send_succeeded = False
if time.time() >= self.last_request_success_time + 20:
self.interval = FALLBACK_INTERVAL
def shutdown(self):
self.thread_event.set()
|
nilq/baby-python
|
python
|
from art import logo
import os
clear = lambda: os. system('cls')
def new_bidder():
global greater_bid
bidder = input("What's your name?: ")
bid = int(input("What's your bid?: "))
new_bidder_dict = {"Bidder": bidder, "Bid": bid}
if bid > greater_bid["Bid"]:
greater_bid = new_bidder_dict
bids_dictionary[len(bids_dictionary)+1] = new_bidder_dict
print(logo)
bids_dictionary = {}
greater_bid = {"Bidder": "Start", "Bid": 0}
while True:
new_bidder()
other_bidder = input("Are there any other bidders? Type 'yes' or 'no': ")
clear()
if other_bidder == "no":
break
print(f'The winner is {greater_bid["Bidder"]} with a bid of ${greater_bid["Bid"]}.')
|
nilq/baby-python
|
python
|
from geocoder import main
from geocoder import STARTTIME, NUM_DOCS
import re
import os
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from datetime import datetime #for testing time of script execution
RE_URLS = 'http[s]?:\/\/(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+'
RE_AT_MENTIONS = '(?:@[\w_]+)'
RE_HASHTAGS = '#'
RE_EXTRA_WHITE_SPACE = '\s+'
RE_INSIDE_PARENTHESIS = '\([^)]*\)'
RE_SPECIAL_CHARS = "\.|\,|\\|\r|\n|\s|\(|\)|\"|\[|\]|\{|\}|\;|\:|\.|\°|\-|\/|\&|\(|\)|\||\*"
#preserve question marks and exclamation marks for Vader
EMOJI_PATTERN = re.compile("["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
"]+", flags=re.UNICODE)
EMOJI_PATTERN2 = re.compile(u'('
u'\ud83c[\udf00-\udfff]|'
u'\ud83d[\udc00-\ude4f\ude80-\udeff]|'
u'[\u2600-\u26FF\u2700-\u27BF])+',
re.UNICODE)
class CleanText():
"""
Clean the text of the tweet as well as the user description (for later analysis).
Preserving things like emojis and exclamation marks for Vader Sentiment Analyzer,
since it is able to interpret meaning / emotional value from these symbols.
"""
def clean_tweet(self, text):
text = re.sub(RE_URLS, " ", str(text))
text = re.sub(RE_AT_MENTIONS, " ", text)
text = re.sub(RE_HASHTAGS," ", text)
text = re.sub(RE_SPECIAL_CHARS," ",text)
text = text.strip()
text = re.sub(RE_EXTRA_WHITE_SPACE, " ", text)
return text
def clean_user(self, text):
# print(f'\n\nemoji pattern type: {type(EMOJI_PATTERN)}\n\n')
text = re.sub(RE_URLS, " ", str(text))
text = re.sub(RE_AT_MENTIONS, " ", text)
text = re.sub(RE_INSIDE_PARENTHESIS, " ", text)
text = re.sub(RE_HASHTAGS," ", text)
text = re.sub(EMOJI_PATTERN, " ", text)
text = re.sub(EMOJI_PATTERN2, " ", text)
text = re.sub(RE_SPECIAL_CHARS," ",text)
text = text.strip()
text = re.sub(RE_EXTRA_WHITE_SPACE, " ", text)
return text
class Analyzer():
def __init__(self, df):
self.df = df
def analyze_sentiment(self):
self.df['text'] = self.df['text'].apply(CleanText().clean_tweet)
self.df['user_description'] = self.df['user_description'].apply(CleanText().clean_user)
all_tweets = list(self.df['text'])
analyzer = SentimentIntensityAnalyzer()
"""
Can also include sentiment 'sub-scores' (i.e. negative, neutral, and positive),
but for now only including composite sentiment. Others are commented out.
"""
# neg_sent = []
# neu_sent = []
# pos_sent = []
comp_sent = []
for tw in all_tweets:
vs = analyzer.polarity_scores(tw)
# neg_sent.append(vs['neg'])
# neu_sent.append(vs['neu'])
# pos_sent.append(vs['pos'])
comp_sent.append(vs['compound'])
# self.df['neg. sentiment'] = neg_sent
# self.df['neu. sentiment'] = neu_sent
# self.df['pos. sentiment'] = pos_sent
self.df['comp. sentiment'] = comp_sent
self.df['strong positive'] = self.df['comp. sentiment'].map(lambda x: 1 if x >= 0.8 else 0)
self.df['strong negative'] = self.df['comp. sentiment'].map(lambda x: 1 if x <= -0.8 else 0)
"""CONSIDER FILTERING OUT SENTIMENTS THAT FALL WITHIN 'MIDDLE RANGE'
(e.g. anything between -0.5 -- 0.5 ) """
"""Will eventually return the resulting df, but for now printing it to csv for testing"""
test_filename = 'sandbox/live_demo.csv'
if os.path.exists(test_filename):
print(f'\n\n{test_filename} already exists; removing it first\n')
os.remove(test_filename)
with open(test_filename, 'w') as f:
self.df.to_csv(f, header=True)
print("Successfully printed to csv!\n\n")
return self.df
if __name__ == '__main__':
print(f"\n\nNumber of documents currently in DB: {NUM_DOCS}\n")
df = main()
print(f'Passing dataframe to the sentiment analyzer...')
sentiment_analyzer = Analyzer(df)
sentiment_analyzer.analyze_sentiment()
print(f"Time to completion: {datetime.now() - STARTTIME}")
|
nilq/baby-python
|
python
|
#Making List l
l = [11, 12, 13, 14]
#Using append function on list
l.append(50)
l.append(60)
print("list after adding 50 & 60:- ", l)
#Using remove function on list
l.remove(11)
l.remove(13)
print("list after removing 11 & 13:- ", l)
#Using the sort function with their parameters changed
#Implementing sorting in a list
l.sort(reverse=False)
print("list after sortinng in ascending order:- ",l)
l.sort(reverse=True)
print("list after sorting in descending order:- ",l)
#Implementing searching in a list
if 13 in l:
print("yes 13 is in the list")
else:
print("no 13 is not in the list")
print("no of elements list have:- ",len(l))
#Implementing traversing in a list
s = 0
oddsum = 0
evensum = 0
primesum = 0
for i in l:
s = s + i
if i % 2 == 0:
evensum = evensum + i
else:
oddsum = oddsum + i
count = 0
j = 1
while( j < len(l)):
if l[j] % j == 0:
count = count + 1
j = j+1
if count == 2:
primesum = primesum + l[i]
print("sum of elements in the list:- ",s)
print("sum of odd elements in the list:- ",oddsum)
print("sum of even elements in the list:- ",evensum)
print("sum of prime elements in the list:- ",primesum)
#Using clear function to delete all the data in list
#Implementing delete functionality in a list by using predefined functions
l.clear()
print("list after using clear function:- ",l)
del l
|
nilq/baby-python
|
python
|
budget_wanted = float(input())
total = 0
money_full = False
command = input()
while command != "Party!":
drink_name = command
number_of_drinks = int(input())
price = int(len(drink_name))
drinks_price = price * number_of_drinks
if drinks_price % 2 == 1:
drinks_price -= drinks_price * 25 / 100
else:
drinks_price = drinks_price
total += drinks_price
if total >= budget_wanted:
print(f"Target acquired.")
money_full = True
break
command = input()
if command == "Party!" and total < budget_wanted:
diff = budget_wanted - total
print(f"We need {diff:.2f} leva more.")
if money_full or command == "Party!":
print(f"Club income - {total:.2f} leva.")
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# coding:utf-8
#DESCRICAO
#Esse script foi desenvolvido para facilitar a forma como cadastramos,
#alteramos ou excluímos os principais ativos em dois ou mais servidores zabbix.
#A ideia é utilizar esse script para ambientes onde os eventos não estão sincronizados,
#permitindo uma ótima facilidade e agilidade nesses processos.
#A integracao e realizada via Zabbix API
# Author: Vinicius Trancoso Bitencourt - <http:github/viniciustbitencourt>
#
# FileName: altera_hosts.py
import sys
from ConfigParser import SafeConfigParser
from zabbix_api import ZabbixAPI
#Arquivo de configuracao com os parametros conf.ini
config = SafeConfigParser()
config.read('conf.ini')
#pega os valores do arquivo de configuracao
host01 = config.get('zabbix01', 'hostname')
usr01 = config.get('zabbix01', 'user')
pwd01 = config.get('zabbix01', 'passwd')
#pega os valores do arquivo de configuração
host02 = config.get('zabbix02', 'hostname')
usr02 = config.get('zabbix02', 'user')
pwd02 = config.get('zabbix02', 'passwd')
#API Zabbix com a URL de cada Servidor
zapi = ZabbixAPI(host01)
zapi2 = ZabbixAPI(host02)
#Faz login com a API Zabbix
zapi.login(usr01, pwd01)
zapi2.login(usr02, pwd02)
class AlteraHosts(object):
pass
##TELA DE EXIBICAO
print "***************************************************************************"
print "*********** SCRIPT - ALTERA HOSTS EM DOIS SERVIDORES ZABBIX ***************"
print "* FAVOR INSERIR TODOS OS DADOS CORRETAMENTE! *"
print "***************************************************************************"
print'1 - PARA ALTERAR O NOME DO EQUIPAMENTO'
print'2 - PARA ALTERAR O IP DO EQUIPAMENTO'
print'3 - PARA SAIR DESSA TELA !'
#Pega opcao selecionada
a = raw_input('Digite a Opção desejada: ')
if a == '1':
host = raw_input('Digite o NOME do HOST: ')
rename = raw_input('Digite o nome que deseja alterar: ')
#Funcao valida os dados digitados
def valida_dados(host, rename):
if host == "":
print 'Digite corretamente o NOME do HOST corretamente!'
sys.exit(0)
elif rename == "":
print 'Digite o NOME do HOST que deseja alterar corretamente!'
sys.exit(0)
valida_dados(host, rename)
#Zabbix API - Altera no Zabbix
for x in zapi.host.get({'filter': {'name': host}}):
host_id = x['hostid']
altera = zapi.host.update({'hostid': host_id, 'host': rename, 'status': 0})
#Zabbix API - Altera no Zabbix
for y in zapi2.host.get({'filter': {'name':host}}):
host_id2 = y['hostid']
altera2 = zapi2.host.update({'hostid': host_id2, 'host': rename, 'status': 0})
print ('Equipamento - ' + host + ' - alterado NOME para: ' + rename)
elif a == '2':
host = raw_input('Digite o NOME do equipamento: ')
rename = raw_input('Digite o IP que deseja alterar: ')
#Funcao valida os dados digitados
def valida_dados(host, rename):
if host == "":
print 'Digite corretamente o NOME do equipamento corretamente!'
sys.exit(0)
elif rename == "":
print 'Digite o IP que deseja alterar corretamente!'
sys.exit(0)
valida_dados(host, rename)
#Zabbix API - Altera IP Zabbix Primeiro Servidor
for x in zapi.host.get({'filter': {'name': host}}):
host_id = x['hostid']
for x in zapi.hostinterface.get({'hostids': host_id}):
host_interface = x['interfaceid']
alteraip = zapi.hostinterface.update({'interfaceid': host_interface, 'ip': rename})
#Zabbix API - Altera IP Zabbix Segundo Servidor
for y in zapi2.host.get({'filter': {'name': host}}):
host_id2 = y['hostid']
for y in zapi2.hostinterface.get({'hostids': host_id2}):
host_interface2 = y['interfaceid']
alteraip2 = zapi2.hostinterface.update({'interfaceid': host_interface2, 'ip': rename})
print ('Equipamento - ' + host +' - alterado IP para: '+ rename)
else:
print 'OPÇÃO INVALIDA - FIM!!'
sys.exit(0)
|
nilq/baby-python
|
python
|
# ToggleButton examples.
import os
from ocempgui.widgets import *
from ocempgui.widgets.Constants import *
def _create_vframe (text):
frame = VFrame (Label (text))
frame.spacing = 5
frame.align = ALIGN_LEFT
return frame
def create_button_view ():
states = ("STATE_NORMAL", "STATE_ENTERED", "STATE_ACTIVE",
"STATE_INSENSITIVE")
table = Table (2, 3)
table.spacing = 5
table.set_row_align (0, ALIGN_TOP)
table.set_row_align (1, ALIGN_TOP)
# Frame with the states.
frm_states = _create_vframe ("States")
for i, s in enumerate (states):
btn = ToggleButton (s)
if STATE_TYPES[i] == STATE_INSENSITIVE:
btn.sensitive = False
else:
btn.state = STATE_TYPES[i]
frm_states.add_child (btn)
table.add_child (0, 0, frm_states)
# Frame with different padding.
frm_padding = _create_vframe ("Padding")
for i in xrange (5):
btn = ToggleButton ("Padding: %dpx" % (i * 2))
btn.padding = i * 2
frm_padding.add_child (btn)
table.add_child (0, 1, frm_padding)
# Mnemonics.
frm_mnemonic = _create_vframe ("Mnemonics")
btn = ToggleButton ("#Simple Mnemonic")
btn2 = ToggleButton ("#Activate using <ALT><Underlined Key>")
frm_mnemonic.add_child (btn, btn2)
table.add_child (0, 2, frm_mnemonic)
# Multiline labeled buttons
frm_multiline = _create_vframe ("Multiline labels")
strings = ("Single lined ToggleButton", "Two lines on\na ToggleButton",
"Two lines with a\n#mnemonic")
for s in strings:
button = ToggleButton (s)
button.child.multiline = True
frm_multiline.add_child (button)
table.add_child (1, 0, frm_multiline)
# Empty buttons with different minimum sizes
frm_empty = _create_vframe ("Empty Buttons")
for i in xrange (5):
button = ToggleButton ()
button.minsize = (20 * i, 10 * i)
frm_empty.add_child (button)
table.add_child (1, 2, frm_empty)
return table
if __name__ == "__main__":
# Initialize the drawing window.
re = Renderer ()
re.create_screen (530, 400)
re.title = "ToggleButton examples"
re.color = (234, 228, 223)
re.add_widget (create_button_view ())
# Start the main rendering loop.
re.start ()
|
nilq/baby-python
|
python
|
from sys import stdin
def print_karte(karte):
for i in range(len(karte)):
liste = [str(x) for x in karte[i]]
print(''.join(liste))
zeilen = []
max_x = 0
max_y = 0
for line in stdin:
eingabe = line.strip()
eingabe = eingabe.split(" ")
eins = [int(x) for x in eingabe[0].split(",")]
zwei = [int(x) for x in eingabe[2].split(",")]
#if eins[0] == zwei[0] or eins[1] == zwei[1]:
zeilen.append([eins[0], eins[1], zwei[0], zwei[1]])
if eins[0] > max_x:
max_x = eins[0]
if eins[1] > max_y:
max_y = eins[1]
if zwei[0] > max_x:
max_x = zwei[0]
if zwei[1] > max_y:
max_y = zwei[1]
max_x += 1
max_y += 1
karte = [["."]*max_x for x in range(max_y)]
for zeile in zeilen:
if zeile[0] == zeile[2]:
if zeile[1] < zeile[3]:
incrementer = 1
zahl = zeile[3]+1
else:
incrementer = -1
zahl = zeile[3]-1
for i in range(zeile[1],zahl,incrementer):
if karte[i][zeile[0]] == ".":
karte[i][zeile[0]] = 1
else:
karte[i][zeile[0]] += 1
elif zeile[1] == zeile[3]:
if zeile[0] < zeile[2]:
incrementer = 1
zahl = zeile[2]+1
else:
incrementer = -1
zahl = zeile[2]-1
for i in range(zeile[0],zahl,incrementer):
if karte[zeile[1]][i] == ".":
karte[zeile[1]][i] = 1
else:
karte[zeile[1]][i] += 1
else:
pos_x = zeile[0]
pos_y = zeile[1]
if karte[pos_y][pos_x] == ".":
karte[pos_y][pos_x] = 1
else:
karte[pos_y][pos_x] += 1
if zeile[0] < zeile[2]:
pos_x += 1
else:
pos_x -= 1
if zeile[1] < zeile[3]:
pos_y += 1
else:
pos_y -= 1
while True:
if karte[pos_y][pos_x] == ".":
karte[pos_y][pos_x] = 1
else:
karte[pos_y][pos_x] += 1
if zeile[0] < zeile[2]:
pos_x += 1
else:
pos_x -= 1
if zeile[1] < zeile[3]:
pos_y += 1
else:
pos_y -= 1
if zeile[0] < zeile[2]:
if pos_x > zeile[2]:
break
else:
if pos_x < zeile[2]:
break
gefahren_punkte = 0
for i in range(max_y):
for ii in range(max_x):
if karte[i][ii] != ".":
if karte[i][ii] > 1:
gefahren_punkte += 1
print(gefahren_punkte)
|
nilq/baby-python
|
python
|
from django.apps import apps
from .models import State, Workflow
def create_builtin_workflows(sender, **kwargs):
"""
Receiver function to create a simple and a complex workflow. It is
connected to the signal django.db.models.signals.post_migrate during
app loading.
"""
if Workflow.objects.exists():
# If there is at least one workflow, then do nothing.
return
workflow_1 = Workflow(name="Simple Workflow")
workflow_1.save(skip_autoupdate=True)
state_1_1 = State(
name="submitted",
workflow=workflow_1,
allow_create_poll=True,
allow_support=True,
allow_submitter_edit=True,
)
state_1_1.save(skip_autoupdate=True)
state_1_2 = State(
name="accepted",
workflow=workflow_1,
recommendation_label="Acceptance",
css_class="success",
merge_amendment_into_final=1,
)
state_1_2.save(skip_autoupdate=True)
state_1_3 = State(
name="rejected",
workflow=workflow_1,
recommendation_label="Rejection",
css_class="danger",
merge_amendment_into_final=-1,
)
state_1_3.save(skip_autoupdate=True)
state_1_4 = State(
name="not decided",
workflow=workflow_1,
recommendation_label="No decision",
css_class="default",
merge_amendment_into_final=-1,
)
state_1_4.save(skip_autoupdate=True)
state_1_1.next_states.add(state_1_2, state_1_3, state_1_4)
workflow_1.first_state = state_1_1
workflow_1.save(skip_autoupdate=True)
workflow_2 = Workflow(name="Complex Workflow")
workflow_2.save(skip_autoupdate=True)
state_2_1 = State(
name="published",
workflow=workflow_2,
allow_support=True,
allow_submitter_edit=True,
dont_set_identifier=True,
)
state_2_1.save(skip_autoupdate=True)
state_2_2 = State(
name="permitted",
workflow=workflow_2,
recommendation_label="Permission",
allow_create_poll=True,
allow_submitter_edit=True,
)
state_2_2.save(skip_autoupdate=True)
state_2_3 = State(
name="accepted",
workflow=workflow_2,
recommendation_label="Acceptance",
css_class="success",
merge_amendment_into_final=1,
)
state_2_3.save(skip_autoupdate=True)
state_2_4 = State(
name="rejected",
workflow=workflow_2,
recommendation_label="Rejection",
css_class="danger",
merge_amendment_into_final=-1,
)
state_2_4.save(skip_autoupdate=True)
state_2_5 = State(
name="withdrawed",
workflow=workflow_2,
css_class="default",
merge_amendment_into_final=-1,
)
state_2_5.save(skip_autoupdate=True)
state_2_6 = State(
name="adjourned",
workflow=workflow_2,
recommendation_label="Adjournment",
css_class="default",
merge_amendment_into_final=-1,
)
state_2_6.save(skip_autoupdate=True)
state_2_7 = State(
name="not concerned",
workflow=workflow_2,
recommendation_label="No concernment",
css_class="default",
merge_amendment_into_final=-1,
)
state_2_7.save(skip_autoupdate=True)
state_2_8 = State(
name="refered to committee",
workflow=workflow_2,
recommendation_label="Referral to committee",
css_class="default",
merge_amendment_into_final=-1,
)
state_2_8.save(skip_autoupdate=True)
state_2_9 = State(
name="needs review",
workflow=workflow_2,
css_class="default",
merge_amendment_into_final=-1,
)
state_2_9.save(skip_autoupdate=True)
state_2_10 = State(
name="rejected (not authorized)",
workflow=workflow_2,
recommendation_label="Rejection (not authorized)",
css_class="default",
merge_amendment_into_final=-1,
)
state_2_10.save(skip_autoupdate=True)
state_2_1.next_states.add(state_2_2, state_2_5, state_2_10)
state_2_2.next_states.add(
state_2_3, state_2_4, state_2_5, state_2_6, state_2_7, state_2_8, state_2_9
)
workflow_2.first_state = state_2_1
workflow_2.save(skip_autoupdate=True)
def get_permission_change_data(sender, permissions, **kwargs):
"""
Yields all necessary collections if 'motions.can_see' permission changes.
"""
motions_app = apps.get_app_config(app_label="motions")
for permission in permissions:
# There could be only one 'motions.can_see' and then we want to return data.
if (
permission.content_type.app_label == motions_app.label
and permission.codename == "can_see"
):
yield from motions_app.get_startup_elements()
|
nilq/baby-python
|
python
|
import os
from codecs import open
from setuptools import setup
import suit_rq
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-suit-rq',
version=suit_rq.__version__,
author='Ryan Senkbeil',
author_email='ryan.senkbeil@gsdesign.com',
description='Support the django-rq admin when using django-suit',
long_description=long_description,
url='https://github.com/gsmke/django-suit-rq',
license='BSD',
packages=['suit_rq'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'django-suit >=0.2.15, <0.3.0',
'django-rq >=0.8.0, <=1.2.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
]
)
|
nilq/baby-python
|
python
|
import os
import wget
## Verify if directory exists. Create if it doesnt exist.
def check_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
## Download source Files from urls
def download_files(urls, out_path='downloads/', silent=False):
for url in urls:
check_dir(out_path)
print('Downloading', url)
wget.download(url, out=out_path)
print()
# os.system('wget %s' % url)
if __name__ == "__main__":
urls=['https://gamepedia.cursecdn.com/darkestdungeon_gamepedia/c/ce/Vo_narr_tut_firstdungeon.ogg']
download_files(urls,'testing/')
# check_dir('testing/')
|
nilq/baby-python
|
python
|
"""Morse code handling"""
from configparser import ConfigParser
import os
from pathlib import Path
import sys
import warnings
import numpy as np
import sklearn.cluster
import sklearn.exceptions
from .io import read_wave
from .processing import smoothed_power, squared_signal
class MorseCode:
"""Morse code
Attributes:
data (np.ndarray): 1D binary array, representing morse code in time
"""
_morse_to_char: dict = None
def __init__(self, data: np.ndarray, sample_rate: int = None):
"""Initialize code with binary data
Args:
data (np.ndarray): 1D binary array, representing morse code in time
sample_rate (np.ndarray): Audio sampling rate. Default: None.
"""
self.data = data
self.sample_rate = sample_rate
@classmethod
def from_wavfile(cls, file: os.PathLike) -> "MorseCode":
"""Construct from wave file
- Read in wave file
- Calculate signal envelope (smoothing of 0.1 seconds)
- Apply squaring (threshold: 50% of max smoothed data value)
Args:
file (os.PathLike): path to input WAV file
Returns:
MorseCode: class instance, with 1D binary input data
"""
sample_rate, wave = read_wave(file)
window_size = int(0.01 * sample_rate)
envelope = smoothed_power(wave, window_size)
square_data = squared_signal(envelope)
return cls(square_data)
def decode(self) -> str:
"""Decode data
Returns:
str: Morse code content, in plain language
Raises:
UserWarning: dash/dot separation cannot be made unambiguosly
"""
on_samples, off_samples = self._on_off_samples()
dash_dot_chars = self._dash_dot_characters(on_samples)
char_break_idx, word_space_idx = self._break_spaces(off_samples)
morse_words = self._morse_words(dash_dot_chars, char_break_idx, word_space_idx)
return self._translate(morse_words)
@classmethod
@property
def morse_to_char(cls) -> dict[str, str]:
"""Morse to character dictionary
Read mappings from morse.ini and store them to class variable. Later,
return directly from this class variable.
Returns:
dict[str, str]: Mapping of morse character string to letter
"""
if cls._morse_to_char is not None:
return cls._morse_to_char
config = ConfigParser()
config.read(Path(__file__).parent / "morse.ini")
chars = config["characters"]
cls._morse_to_char = {chars[key]: key.upper() for key in chars}
return cls._morse_to_char
def _on_off_samples(self) -> tuple[np.ndarray, np.ndarray]:
"""Calculate signal ON/OFF durations
Locate rising and falling edges in square wave at self.data. Calculate
number of samples in each ON / OFF period.
Returns:
tuple[np.ndarray, np.ndarray]: on_samples, off_samples. Note that
in addition to character and word spaces, off_samples also
includes inter-character spaces.
"""
if len(self.data) == 0:
return np.array([], dtype="int"), np.array([], dtype="int")
square_diff = np.diff(self.data)
rising_idx = np.nonzero(square_diff == 1)[0]
falling_idx = np.nonzero(square_diff == -1)[0]
# Case: data starts with ON - it started one sample before index 0
if falling_idx[0] < rising_idx[0]:
rising_idx = np.insert(rising_idx, 0, -1)
# Case: data ends with ON
if rising_idx[-1] > falling_idx[-1]:
falling_idx = np.insert(falling_idx, len(falling_idx), len(self.data) - 1)
on_samples = falling_idx - rising_idx
off_samples = rising_idx[1:] - falling_idx[: len(falling_idx) - 1]
return on_samples, off_samples
def _dash_dot_characters(self, on_samples: np.ndarray) -> np.ndarray:
"""Convert array of ON sample lengths to array of dashes and dots
NOTE: It is expected, that the signal contains exactly two distinct
lengths - those for a dash and for a dot. If the keying speed varies,
or either character does not exist, then this method will fail.
As a circumvention, 20 WPM is used as a guess
Args:
on_samples (np.ndarray): number of samples in each ON period in
the signal. This comes from `MorseCode._on_off_samples`.
Raises:
UserWarning: if there are no distinct clusters (only dashes
or dots in the input), and self.sample_rate is not set; thus
no guess can be made on dash/dot.
Returns:
np.ndarray: array of dashes and dots, of object (string) type
"""
if len(on_samples) == 0:
return np.array([], dtype="str")
n_clusters = min(2, len(on_samples))
column_vec = on_samples.reshape(-1, 1)
# Suppress ConvergenceWarning on too low distinct clusters; fix it later
with warnings.catch_warnings():
warnings.simplefilter("ignore")
clustering = sklearn.cluster.KMeans(
n_clusters=n_clusters, random_state=0
).fit(column_vec)
distinct_clusters = len(set(clustering.labels_))
# It is not clear whether dash or dot -- use (20 wpm dot length) * 1.5 as limit
if distinct_clusters == 1:
if self.sample_rate is None:
raise UserWarning("Cannot determine whether dash or dot")
sys.stderr.write("WARNING: too little data, guessing based on 20 wpm")
sample_length = clustering.cluster_centers_[0]
is_dot = sample_length / (self.sample_rate * 60 / 1000) < 1.5
dot_label = 0 if is_dot else 1
dash_label = 1 if is_dot else 0
else:
cluster_sort_idx = np.argsort(
clustering.cluster_centers_.flatten()
).tolist()
dot_label = cluster_sort_idx.index(0)
dash_label = cluster_sort_idx.index(1)
dash_dot_map = {dot_label: ".", dash_label: "-"}
dash_dot_characters = np.vectorize(dash_dot_map.get)(clustering.labels_)
return dash_dot_characters
@staticmethod
def _break_spaces(off_samples: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
"""Convert array of OFF sample lengths to indices for char/word breaks
NOTE: It is expected, that the signal contains exactly three distinct
space lengths: inter-character space, character space and word space.
If the keying speed varies, or word spaces do not exist, then this
method will fail.
Args:
off_samples (np.ndarray): number of samples in each OFF period in
the signal. This comes from `MorseCode._on_off_samples`.
Returns:
tuple[np.ndarray, np.ndarray]: indices for breaking dash/dot
character array from `MorseCode._dash_dot_characters`. First
array contains positions, where character breaks should be.
Second array contains positions, where word spaces should be in
the list of already resolved morse characters.
"""
if len(off_samples) == 0:
return np.array([], dtype="int"), np.array([], dtype="int")
n_clusters = min(3, len(off_samples))
column_vec = off_samples.reshape(-1, 1)
# Suppress ConvergenceWarning on too low distinct clusters; fix it later
with warnings.catch_warnings():
warnings.simplefilter("ignore")
clustering = sklearn.cluster.KMeans(
n_clusters=n_clusters, random_state=0
).fit(column_vec)
distinct_clusters = len(set(clustering.labels_))
cluster_sort_idx = np.argsort(clustering.cluster_centers_.flatten()).tolist()
# This index breaks dashes/dots into characters
intra_space_label = cluster_sort_idx.index(0)
char_break_idx = np.nonzero(clustering.labels_ != intra_space_label)[0] + 1
char_or_word_space_arr = clustering.labels_[
clustering.labels_ != intra_space_label
]
# This index breaks character list into word lists
if distinct_clusters == 3:
word_space_label = cluster_sort_idx.index(2)
word_space_idx = (
np.nonzero(char_or_word_space_arr == word_space_label)[0] + 1
)
else:
word_space_idx = np.array([], dtype="int")
return char_break_idx, word_space_idx
@staticmethod
def _morse_words(
raw_dash_dot: np.ndarray,
char_break_idx: np.ndarray,
word_space_idx: np.ndarray,
) -> list[list[str]]:
"""Convert character and space arrays to list of morse words
Args:
raw_dash_dot (np.ndarray): Numpy array of strings, contains
'.' and '-' characters, as processed from self.data
char_break_idx (np.ndarray): Index of locations in raw_dash_dot,
where a character space or word space would exist. The array
raw_dash_dot is first broken into characters with this index.
word_space_idx (np.ndarray): Index for breaking character array
into words. Contains locations of word spaces between natural
language characters.
Returns:
list[list[str]]: Words in morse code. A single word is a list of
dash-dot character combinations.
"""
char_start_idx = [0] + (char_break_idx).tolist()
char_end_idx = (char_break_idx).tolist() + [len(raw_dash_dot)]
morse_characters = [
"".join(raw_dash_dot[i:j].tolist())
for i, j in zip(char_start_idx, char_end_idx)
]
word_start_idx = [0] + (word_space_idx).tolist()
word_end_idx = (word_space_idx).tolist() + [len(morse_characters)]
return [morse_characters[i:j] for i, j in zip(word_start_idx, word_end_idx)]
def _translate(self, morse_words: list[list[str]]) -> str:
"""Translate list of morse-coded words to string
Args:
morse_words (list[list[str]]): List of words, having list of characters.
The characters are in morse-coded dash/dot form, e.g. '.--' for 'w'
Returns:
str: Message contained in input
"""
char_dict = self.morse_to_char
char_lists = [[char_dict.get(j, "") for j in i] for i in morse_words]
return " ".join(["".join(word) for word in char_lists])
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn.preprocessing import StandardScaler
X = [[0, 15],
[1, -10]]
# scale data according to computed scaling values
print(StandardScaler().fit(X).transform(X))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
#Author: Stefan Toman
if __name__ == '__main__':
print("Hello, World!")
|
nilq/baby-python
|
python
|
from l_00_inventory import inventory
import json
with open("m02_files/l_00_inventory.json", "w") as json_out:
json_out.write(json.dumps(inventory))
with open("m02_files/l_00_inventory.json", "r") as json_in:
json_inventory = json_in.read()
print("l_00_inventory.json file:", json_inventory)
print("\njson pretty version:")
print(json.dumps(json.loads(json_inventory), indent=4))
|
nilq/baby-python
|
python
|
import datetime
import time as samay
try:
from pac import voice_io
except ModuleNotFoundError:
import voice_io
def date():
x = datetime.datetime.now().strftime("%d/%m/%Y")
voice_io.show(f"Today's date is {x} (DD/MM/YYYY)")
def time():
#x=datetime.datetime.now().strftime("%H:%M:%S")
localtime = samay.localtime()
x = samay.strftime("%I:%M:%S %p", localtime)
voice_io.show(f"The current time is {x}")
def year():
x=datetime.datetime.now().strftime("%Y")
voice_io.show(f"The current year is {x}")
def month():
x=datetime.datetime.now().strftime("%B")
voice_io.show(f"The current month is {x}")
def day():
x=datetime.datetime.now().strftime("%A")
voice_io.show(f"Today it is a {x}")
|
nilq/baby-python
|
python
|
from settings import settings
from office365.graph.graph_client import GraphClient
def get_token_for_user(auth_ctx):
"""
Acquire token via user credentials
:type auth_ctx: adal.AuthenticationContext
"""
token = auth_ctx.acquire_token_with_username_password(
'https://graph.microsoft.com',
settings['user_credentials']['username'],
settings['user_credentials']['password'],
settings['client_credentials']['client_id'])
return token
def enum_folders_and_files(root_folder):
drive_items = root_folder.children
client.load(drive_items)
client.execute_query()
for drive_item in drive_items:
item_type = drive_item.folder.is_server_object_null and "file" or "folder"
print("Type: {0} Name: {1}".format(item_type, drive_item.name))
if not drive_item.folder.is_server_object_null and drive_item.folder.childCount > 0:
enum_folders_and_files(drive_item)
client = GraphClient(settings['tenant'], get_token_for_user)
root = client.me.drive.root
enum_folders_and_files(root)
|
nilq/baby-python
|
python
|
import os
import pytest
import responses
from ewhs.client import EwhsClient
@pytest.fixture(scope="function")
def client():
client = EwhsClient("test", "testpassword", "9fc05c82-0552-4ca5-b588-c64d77f117a9", "ewhs")
return client
@pytest.fixture(scope="session")
def authenticated_client():
client = EwhsClient("test", "testpassword", "9fc05c82-0552-4ca5-b588-c64d77f117a9")
client.access_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJpYXQiOjE2Mzg1NDM1NzUsImV4cCI6MTYzODU0NzE3NSwicm9sZXMiOlsiUk9MRV9TQ0FOTkVSIiwiUk9MRV9EQVNIQk9BUkRfUkVBRCIsIlJPTEVfREFTSEJPQVJEX1NUQVRVUyIsIlJPTEVfU1RPQ0tfUkVBRCIsIlJPTEVfU1RPQ0tfSU1QT1JUIiwiUk9MRV9TVE9DS19FWFBPUlQiLCJST0xFX1NUT0NLSElTVE9SWV9SRUFEIiwiUk9MRV9TVE9DS0hJU1RPUllfRVhQT1JUIiwiUk9MRV9NT0RJRklDQVRJT05fUkVBRCIsIlJPTEVfTU9ESUZJQ0FUSU9OX0NSRUFURSIsIlJPTEVfTU9ESUZJQ0FUSU9OX1VQREFURSIsIlJPTEVfTU9ESUZJQ0FUSU9OX0FQUFJPVkUiLCJST0xFX01PRElGSUNBVElPTl9FWFBPUlQiLCJST0xFX1RSQU5TRkVSX1JFQUQiLCJST0xFX1RSQU5TRkVSX0NSRUFURSIsIlJPTEVfVFJBTlNGRVJfVVBEQVRFIiwiUk9MRV9UUkFOU0ZFUl9ERUxFVEUiLCJST0xFX1RSQU5TRkVSX0NBTkNFTCIsIlJPTEVfVFJBTlNGRVJfVU5IT0xEIiwiUk9MRV9UUkFOU0ZFUl9VTkFTU0lHTiIsIlJPTEVfVFJBTlNGRVJfUkVWSUVXIiwiUk9MRV9UUkFOU0ZFUl9FWFBPUlQiLCJST0xFX0FSVElDTEVfUkVBRCIsIlJPTEVfQVJUSUNMRV9DUkVBVEUiLCJST0xFX0FSVElDTEVfVVBEQVRFX0JBUkNPREVTIiwiUk9MRV9BUlRJQ0xFX1VQREFURSIsIlJPTEVfQVJUSUNMRV9ERUxFVEUiLCJST0xFX0FSVElDTEVfSU1QT1JUIiwiUk9MRV9BUlRJQ0xFX1VQREFURV9ET0NVTUVOVFMiLCJST0xFX0FSVElDTEVfRVhQT1JUIiwiUk9MRV9WQVJJQU5UX1FVQVJBTlRJTkUiLCJST0xFX1NVUFBMSUVSX1JFQUQiLCJST0xFX1NVUFBMSUVSX0NSRUFURSIsIlJPTEVfU1VQUExJRVJfVVBEQVRFIiwiUk9MRV9TVVBQTElFUl9ERUxFVEUiLCJST0xFX0lOQk9VTkRfUkVBRCIsIlJPTEVfSU5CT1VORF9DUkVBVEUiLCJST0xFX0lOQk9VTkRfVVBEQVRFIiwiUk9MRV9JTkJPVU5EX0NBTkNFTCIsIlJPTEVfSU5CT1VORF9QUk9DRVNTIiwiUk9MRV9JTkJPVU5EX0NPTVBMRVRFIiwiUk9MRV9JTkJPVU5EX0VYUE9SVCIsIlJPTEVfT1JERVJfUkVBRCIsIlJPTEVfT1JERVJfQ1JFQVRFIiwiUk9MRV9PUkRFUl9VUERBVEUiLCJST0xFX09SREVSX1VQREFURV9QUk9DRVNTSU5HIiwiUk9MRV9PUkRFUl9VUERBVEVfUEFBWkwiLCJST0xFX09SREVSX1BBUlRJQUwiLCJST0xFX09SREVSX1VOSE9MRCIsIlJPTEVfT1JERVJfQ0FOQ0VMIiwiUk9MRV9PUkRFUl9DQU5DRUxfUFJPQ0VTU0lORyIsIlJPTEVfT1JERVJfUFJPQkxFTSIsIlJPTEVfT1JERVJfRVhQT1JUIiwiUk9MRV9PUkRFUl9QUklPUklUSVpFIiwiUk9MRV9PUkRFUl9JTVBPUlQiLCJST0xFX1BJQ0tMSVNUX1JFQUQiLCJST0xFX1BJQ0tMSVNUX0VYUE9SVCIsIlJPTEVfUElDS0xJU1RfVU5BU1NJR04iLCJST0xFX1BJQ0tMSVNUX1BSSU9SSVRJWkUiLCJST0xFX1NISVBNRU5UX1JFQUQiLCJST0xFX1NISVBNRU5UX1BSSU5UIiwiUk9MRV9TSElQTUVOVF9ET1dOTE9BRCIsIlJPTEVfU0hJUE1FTlRfRVhQT1JUIiwiUk9MRV9NQUlMU0hJUE1FTlRfUkVBRCIsIlJPTEVfTUFJTFNISVBNRU5UX1VQREFURSIsIlJPTEVfTUFJTFNISVBNRU5UX1BST0NFU1MiLCJST0xFX1RSQUNLSU5HREFUQV9SRUFEIiwiUk9MRV9UUkFDS0lOR0RBVEFfVVBEQVRFIiwiUk9MRV9UUkFDS0lOR0RBVEFfREVMRVRFIiwiUk9MRV9SRVRVUk5MQUJFTF9SRUFEIiwiUk9MRV9SRVRVUk5MQUJFTF9DUkVBVEUiLCJST0xFX1JFVFVSTkxBQkVMX1VQREFURSIsIlJPTEVfUkVUVVJOTEFCRUxfQ0FOQ0VMIiwiUk9MRV9QUklOVEVSX1JFQUQiLCJST0xFX1BSSU5URVJfVVBEQVRFIiwiUk9MRV9QUklOVEVSX0NSRUFURSIsIlJPTEVfUFJJTlRFUl9ERUxFVEUiLCJST0xFX1BBQ0tJTkdUQUJMRV9SRUFEIiwiUk9MRV9QQUNLSU5HVEFCTEVfQ1JFQVRFIiwiUk9MRV9QQUNLSU5HVEFCTEVfREVMRVRFIiwiUk9MRV9QQUNLSU5HVEFCTEVfVVBEQVRFIiwiUk9MRV9QQUNLSU5HVEFCTEVfVVBEQVRFX0FERFJFU1MiLCJST0xFX1BBQ0tJTkdUQUJMRV9VUERBVEVfU0hJUFBJTkdPUFRJT04iLCJST0xFX0ZJTExJTkdfUkVBRCIsIlJPTEVfRklMTElOR19FWFBPUlQiLCJST0xFX0ZJTExJTkdfSU1QT1JUIiwiUk9MRV9DT0xMT19SRUFEIiwiUk9MRV9DT0xMT19FWFBPUlQiLCJST0xFX0NPTExPX0lNUE9SVCIsIlJPTEVfQ1VTVE9NRVJfUkVBRCIsIlJPTEVfQ1VTVE9NRVJfQ1JFQVRFIiwiUk9MRV9DVVNUT01FUl9VUERBVEUiLCJST0xFX0NVU1RPTUVSX0lNUE9SVCIsIlJPTEVfQ1VTVE9NRVJfRVhQT1JUIiwiUk9MRV9DVVNUT01FUl9ERUxFVEUiLCJST0xFX0NVU1RPTUVSVVNFUl9SRUFEIiwiUk9MRV9DVVNUT01FUlVTRVJfQ1JFQVRFIiwiUk9MRV9DVVNUT01FUlVTRVJfVVBEQVRFIiwiUk9MRV9DVVNUT01FUlVTRVJfREVMRVRFIiwiUk9MRV9DVVNUT01FUlVTRVJfRVhQT1JUIiwiUk9MRV9DVVNUT01FUlVTRVJfSU1QT1JUIiwiUk9MRV9DVVNUT01FUkdST1VQX1JFQUQiLCJST0xFX0NVU1RPTUVSR1JPVVBfQ1JFQVRFIiwiUk9MRV9DVVNUT01FUkdST1VQX1VQREFURSIsIlJPTEVfQ1VTVE9NRVJHUk9VUF9ERUxFVEUiLCJST0xFX0FQSV9SRUFEIiwiUk9MRV9BUElfQ1JFQVRFIiwiUk9MRV9BUElfVVBEQVRFIiwiUk9MRV9BUElfREVMRVRFIiwiUk9MRV9SRVNUUklDVEVESVBfUkVBRCIsIlJPTEVfUkVTVFJJQ1RFRElQX1VQREFURSIsIlJPTEVfUkVTVFJJQ1RFRElQX0RFTEVURSIsIlJPTEVfRU1QTE9ZRUVfUkVBRCIsIlJPTEVfRU1QTE9ZRUVfQ1JFQVRFIiwiUk9MRV9FTVBMT1lFRV9VUERBVEUiLCJST0xFX0VNUExPWUVFX0RFTEVURSIsIlJPTEVfRU1QTE9ZRUVfRVhQT1JUIiwiUk9MRV9FTVBMT1lFRV9JTVBPUlQiLCJST0xFX0VNUExPWUVFR1JPVVBfUkVBRCIsIlJPTEVfRU1QTE9ZRUVHUk9VUF9DUkVBVEUiLCJST0xFX0VNUExPWUVFR1JPVVBfVVBEQVRFIiwiUk9MRV9FTVBMT1lFRUdST1VQX0RFTEVURSIsIlJPTEVfTE9DQVRJT05fUkVBRCIsIlJPTEVfTE9DQVRJT05fQ1JFQVRFIiwiUk9MRV9MT0NBVElPTl9VUERBVEUiLCJST0xFX0xPQ0FUSU9OX0RFTEVURSIsIlJPTEVfTE9DQVRJT05fSU1QT1JUIiwiUk9MRV9MT0NBVElPTl9FWFBPUlQiLCJST0xFX0xPQ0FUSU9OX1FVQVJBTlRJTkUiLCJST0xFX0xPQ0FUSU9OR1JPVVBfUkVBRCIsIlJPTEVfTE9DQVRJT05HUk9VUF9DUkVBVEUiLCJST0xFX0xPQ0FUSU9OR1JPVVBfVVBEQVRFIiwiUk9MRV9MT0NBVElPTkdST1VQX0RFTEVURSIsIlJPTEVfV0FSRUhPVVNFU19SRUFEIiwiUk9MRV9aT05FX1JFQUQiLCJST0xFX1pPTkVfQ1JFQVRFIiwiUk9MRV9aT05FX1VQREFURSIsIlJPTEVfWk9ORV9ERUxFVEUiLCJST0xFX1pPTkVfRVhQT1JUIiwiUk9MRV9aT05FX0lNUE9SVCIsIlJPTEVfUFJJTlRfQkFSQ09ERSIsIlJPTEVfU0hJUFBJTkdNQVRSSVhfUkVBRCIsIlJPTEVfU0hJUFBJTkdNQVRSSVhfVVBEQVRFIiwiUk9MRV9CVVNJTkVTU1JVTEVNQVRSSVhfUkVBRCIsIlJPTEVfQlVTSU5FU1NSVUxFTUFUUklYX1VQREFURSIsIlJPTEVfU0hJUFBJTkdNRVRIT0RfUkVBRCIsIlJPTEVfU0hJUFBJTkdNRVRIT0RfQ1JFQVRFIiwiUk9MRV9TSElQUElOR01FVEhPRF9VUERBVEUiLCJST0xFX0VYUE9SVF9SRUFEX0ZJTkFOQ0lBTCIsIlJPTEVfRVhQT1JUX1JFQURfQklMTElORyIsIlJPTEVfSVNTVUVfUkVBRCIsIlJPTEVfSVNTVUVfQVNTSUdOIiwiUk9MRV9JU1NVRV9DUkVBVEVfQ09NTUVOVCIsIlJPTEVfSVNTVUVfUkVBRF9DT01NRU5UIiwiUk9MRV9TSElQUElOR1RFTVBMQVRFX1JFQUQiLCJST0xFX1NISVBQSU5HVEVNUExBVEVfVVBEQVRFIiwiUk9MRV9DT05UUkFDVF9SRUFEIiwiUk9MRV9DT05UUkFDVF9DUkVBVEUiLCJST0xFX0NPTlRSQUNUX1VQREFURSIsIlJPTEVfQ1VTVE9NRVJQUklDRV9SRUFEIiwiUk9MRV9DVVNUT01FUlBSSUNFX1VQREFURSIsIlJPTEVfU0VSSUFMTlVNQkVSX1JFQUQiLCJST0xFX1NFUklBTE5VTUJFUl9FWFBPUlQiLCJST0xFX1NISVBQSU5HU09GVFdBUkVfUkVBRCIsIlJPTEVfU0hJUFBJTkdTT0ZUV0FSRV9FWFBPUlQiLCJST0xFX01JRERMRVdBUkVfUkVBRCIsIlJPTEVfSU5TVFJVQ1RJT05TX09WRVJWSUVXX1JFQUQiLCJST0xFX0lOU1RSVUNUSU9OU19ET1dOTE9BRF9BUEtfUkVBRCIsIlJPTEVfSU5TVFJVQ1RJT05TX1NFTEVDVF9XTVNfQVBQX1JFQUQiLCJST0xFX1dFQiIsIlJPTEVfVVNFUiJdLCJ1c2VybmFtZSI6ImZlcnJ5IiwidXNlcl9pZCI6IjlmZDNlZmYwLWZiMjgtMTFlNS05YzMyLWJjNWZmNGY3YWVmNCIsInVzZXJfdHlwZSI6ImVtcGxveWVlIiwiY3VzdG9tZXJfaWRzIjpbIjUzYjVhNTQzLTEyOWEtNDAzYy05YTZlLTNkOWM1MjVmZmE1YiIsImYxOTA5MDBhLWViZmQtNDI2Mi05ZGQ2LTA1ZGRkNjE3MjFiMCIsIjhjZTEwNWYwLWZjMWQtNDIzYS1iMDY2LWQ4NGM1ZWE1N2NhYSIsIjQ4ZTc3YTdhLWUwMzMtNDcxOS05MTkxLTc4YTlhMWI0NjQ5MiJdLCJyZXF1ZXN0X2lwIjoiMTkyLjE2OC4xMjguMSJ9.crcZ-2i9u1u5i3RBhV6tCMo-hrdeuQ91yDDVGT9k6iAFbF48k65RQbVPVkrIwZx9wN6hCvl6mMOOGkiLxFtweSi4nt_hGZeCsuieypQHZxf3MCdwo0zKtb0M8NmBB--D7_AvHWqcz6IEgoXMUtYLOkab4BPVdZlHmegbf7qRtNZlaKRVXPqgn3ReiPVvX_TGdK74VEXZzWPStoTxJwVkFvCFV9RFfYb_b9BgfTaSDJAYGFmSE-QxbW1K4TQBgUjuUAQSRh-y5diw4nuY9VJgcJ2LAD6ZX19do1zFCsc8zq2KUoTppPV9xO8WpOdxlXKGLu3rwfvLV9clhrc9ogmEAYF7UDcJkwgL5nHEfmsAD602T6_NtMjwP1dhTL9OeRz6oJwNRUb3hSe6uG7hvhlE7X-O8GwCafyWX8vgGT0D1NPh5ehwFsh8oc57M-W5PczDwZwQJ99jdHcAFRcsEKMJpKrs1G2LYAqDMS38i6IbZghPqN88Cnc6cpPfWVI6rs1BPZ4DxRBkQkXLWdamAVck6mCpW1QOA-YnNbmLn16d88PeMhzt7TN_jJfi0VAf2BK1DEbdy2sdSoqm3kCWqSzG11hTDLjvbpvJ0rCby7kz4c47qyxzxhyYOCBD4Rns9bNRW2xbE4BSJ0eKMeacaaWNQX0LeUaQy2Q6qPCVPO-hxAo"
client.refresh_token = "91aff30e4f3bb35b923892e525bd848ab88cf68d9669b5ccf07ae0262934b43a67cf7df89ef6213ddbb47c400c1b2c32e4d9178790caa1420e28a94b892addb3"
client.expires_at = 2638547175
return client
class ImprovedRequestsMock(responses.RequestsMock):
"""Wrapper adding a few shorthands to responses.RequestMock."""
def get(self, url, filename, status=200):
"""Setup a mock response for a GET request."""
body = self._get_body(filename)
self.add(responses.GET, url, body=body, status=status, content_type="application/json")
def post(self, url, filename, status=200):
"""Setup a mock response for a POST request."""
body = self._get_body(filename)
self.add(responses.POST, url, body=body, status=status, content_type="application/json")
def delete(self, url, filename, status=204):
"""Setup a mock response for a DELETE request."""
body = self._get_body(filename)
self.add(responses.DELETE, url, body=body, status=status, content_type="application/json")
def patch(self, url, filename, status=200):
"""Setup a mock response for a PATCH request."""
body = self._get_body(filename)
self.add(responses.PATCH, url, body=body, status=status, content_type="application/json")
def _get_body(self, filename):
"""Read the response fixture file and return it."""
file = os.path.join(os.path.dirname(__file__), "responses", f"{filename}.json")
with open(file, encoding="utf-8") as f:
return f.read()
@pytest.fixture
def response():
"""Setup the responses fixture."""
with ImprovedRequestsMock() as mock:
yield mock
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
chanjo.cli
~~~~~~~~~~~
Command line interface (console entry points). Based on Click_.
.. _Click: http://click.pocoo.org/
"""
from __future__ import absolute_import, unicode_literals
from pkg_resources import iter_entry_points
import click
from . import __version__
from ._compat import text_type
from .config import Config, config_file_name, markup
from .store import Store
@click.group()
@click.option(
'-c', '--config',
default=config_file_name,
type=click.File('w', encoding='utf-8'),
help='path to config file')
@click.option('--db', type=text_type, help='path/URI of the SQL database')
@click.option(
'-d', '--dialect',
type=click.Choice(['sqlite', 'mysql']),
help='type of SQL database')
@click.version_option(__version__)
@click.pass_context
def cli(context, config, db, dialect):
"""Clinical sequencing coverage analysis tool."""
# avoid setting global defaults in Click options, do it below when
# updating the config object
context.obj = Config(config, markup=markup)
# global defaults
db_path = db or context.obj.get('db', 'coverage.sqlite3')
db_dialect = dialect or context.obj.get('dialect', 'sqlite')
context.db = Store(db_path, dialect=db_dialect)
# update the context with new defaults from the config file
context.default_map = context.obj
# add subcommands dynamically to the CLI
for entry_point in iter_entry_points('chanjo.subcommands'):
cli.add_command(entry_point.load())
|
nilq/baby-python
|
python
|
# coding: utf-8
#
# Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for scripts/initial_release_prep.py."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import subprocess
import constants
from core.tests import test_utils
import python_utils
from scripts import common
from scripts.release_scripts import initial_release_prep
class InitialReleasePrepTests(test_utils.GenericTestBase):
"""Test the methods for intial release preparation."""
def test_get_mail_message_template(self):
expected_mail_message_template = (
'Hi Sean,\n\n'
'You will need to run these jobs on the backup server:\n\n'
'[List of jobs formatted as: {{Job Name}} (instructions: '
'{{Instruction doc url}}) (Author: {{Author Name}})]\n'
'The specific instructions for jobs are linked with them. '
'The general instructions are as follows:\n\n'
'1. Login as admin\n'
'2. Navigate to the admin panel and then the jobs tab\n'
'3. Run the above jobs\n'
'4. In case of failure/success, please send the output logs for '
'the job to me and the job authors: {{Author names}}\n\n'
'Thanks!\n')
self.assertEqual(
initial_release_prep.get_mail_message_template(),
expected_mail_message_template)
def test_exception_is_raised_if_release_journal_is_not_created(self):
def mock_open_tab(unused_url):
pass
def mock_ask_user_to_confirm(unused_msg):
pass
def mock_input():
return 'n'
def mock_verify_current_branch_name(unused_branch_name):
pass
open_tab_swap = self.swap(
common, 'open_new_tab_in_browser_if_possible', mock_open_tab)
ask_user_swap = self.swap(
common, 'ask_user_to_confirm', mock_ask_user_to_confirm)
input_swap = self.swap(python_utils, 'INPUT', mock_input)
branch_check_swap = self.swap(
common, 'verify_current_branch_name',
mock_verify_current_branch_name)
with open_tab_swap, ask_user_swap, input_swap, branch_check_swap:
with self.assertRaisesRegexp(
Exception,
'Please ensure a new doc is created for the '
'release before starting with the release process.'):
initial_release_prep.main()
def test_get_extra_jobs_due_to_schema_changes(self):
def mock_run_cmd(unused_cmd_tokens):
return (
'"diff --git a/feconf.py b/feconf.py\n'
'--- a/feconf.py\n+++ b/feconf.py\n'
'@@ -36,6 +36,10 @@ POST_COMMIT_STATUS_PRIVATE = \'private\'\n'
' # Whether to unconditionally log info messages.\n'
' DEBUG = False\n \n'
'+# The path for generating release_summary.md '
'file for the current release.\n'
'-CURRENT_MISCONCEPTIONS_SCHEMA_VERSION = 2\n'
'+CURRENT_MISCONCEPTIONS_SCHEMA_VERSION = 1\n')
run_cmd_swap = self.swap(common, 'run_cmd', mock_run_cmd)
with run_cmd_swap:
self.assertEqual(
initial_release_prep.get_extra_jobs_due_to_schema_changes(
'upstream', '1.2.3'), ['SkillMigrationOneOffJob'])
def test_did_supported_audio_languages_change_with_change_in_languages(
self):
all_cmd_tokens = []
mock_constants = {
'SUPPORTED_AUDIO_LANGUAGES': [{
'id': 'en',
'description': 'English',
'relatedLanguages': ['en']}]}
def mock_run_cmd(cmd_tokens):
mock_constants['SUPPORTED_AUDIO_LANGUAGES'].append({
'id': 'ak',
'description': 'Akan',
'relatedLanguages': ['ak']
})
all_cmd_tokens.append(cmd_tokens)
run_cmd_swap = self.swap(common, 'run_cmd', mock_run_cmd)
constants_swap = self.swap(constants, 'constants', mock_constants)
with run_cmd_swap, constants_swap:
self.assertTrue(
initial_release_prep.did_supported_audio_languages_change(
'upstream', '1.2.3'))
self.assertEqual(
all_cmd_tokens, [
[
'git', 'checkout', 'upstream/release-1.2.3',
'--', 'assets/constants.ts'],
['git', 'reset', 'assets/constants.ts'],
['git', 'checkout', '--', 'assets/constants.ts']])
def test_did_supported_audio_languages_change_without_change_in_languages(
self):
all_cmd_tokens = []
mock_constants = {
'SUPPORTED_AUDIO_LANGUAGES': [{
'id': 'en',
'description': 'English',
'relatedLanguages': ['en']}]}
def mock_run_cmd(cmd_tokens):
all_cmd_tokens.append(cmd_tokens)
run_cmd_swap = self.swap(common, 'run_cmd', mock_run_cmd)
constants_swap = self.swap(constants, 'constants', mock_constants)
with run_cmd_swap, constants_swap:
self.assertFalse(
initial_release_prep.did_supported_audio_languages_change(
'upstream', '1.2.3'))
self.assertEqual(
all_cmd_tokens, [
[
'git', 'checkout', 'upstream/release-1.2.3',
'--', 'assets/constants.ts'],
['git', 'reset', 'assets/constants.ts'],
['git', 'checkout', '--', 'assets/constants.ts']])
def test_cut_release_branch_with_correct_version(self):
check_function_calls = {
'open_new_tab_in_browser_if_possible_is_called': False,
'check_call_is_called': False
}
expected_check_function_calls = {
'open_new_tab_in_browser_if_possible_is_called': True,
'check_call_is_called': True
}
def mock_open_tab(unused_url):
check_function_calls[
'open_new_tab_in_browser_if_possible_is_called'] = True
def mock_check_call(unused_cmd_tokens):
check_function_calls['check_call_is_called'] = True
def mock_input():
return '1.2.3'
open_tab_swap = self.swap(
common, 'open_new_tab_in_browser_if_possible',
mock_open_tab)
check_call_swap = self.swap(
subprocess, 'check_call', mock_check_call)
input_swap = self.swap(
python_utils, 'INPUT', mock_input)
with open_tab_swap, check_call_swap, input_swap:
initial_release_prep.cut_release_branch()
self.assertEqual(check_function_calls, expected_check_function_calls)
def test_cut_release_branch_with_incorrect_version(self):
check_function_calls = {
'open_new_tab_in_browser_if_possible_is_called': False,
'check_call_is_called': False
}
expected_check_function_calls = {
'open_new_tab_in_browser_if_possible_is_called': True,
'check_call_is_called': False
}
def mock_open_tab(unused_url):
check_function_calls[
'open_new_tab_in_browser_if_possible_is_called'] = True
def mock_check_call(unused_cmd_tokens):
check_function_calls['check_call_is_called'] = True
def mock_input():
return 'invalid'
open_tab_swap = self.swap(
common, 'open_new_tab_in_browser_if_possible',
mock_open_tab)
check_call_swap = self.swap(
subprocess, 'check_call', mock_check_call)
input_swap = self.swap(
python_utils, 'INPUT', mock_input)
with open_tab_swap, check_call_swap, input_swap:
with self.assertRaises(AssertionError):
initial_release_prep.cut_release_branch()
self.assertEqual(check_function_calls, expected_check_function_calls)
def test_function_calls(self):
check_function_calls = {
'open_new_tab_in_browser_if_possible_is_called': False,
'ask_user_to_confirm_is_called': False,
'get_mail_message_template_is_called': False,
'get_extra_jobs_due_to_schema_changes_is_called': False,
'did_supported_audio_languages_change_is_called': False,
'get_remote_alias_is_called': False,
'verify_current_branch_name_is_called': False,
'cut_release_branch_is_called': False
}
expected_check_function_calls = {
'open_new_tab_in_browser_if_possible_is_called': True,
'ask_user_to_confirm_is_called': True,
'get_mail_message_template_is_called': True,
'get_extra_jobs_due_to_schema_changes_is_called': True,
'did_supported_audio_languages_change_is_called': True,
'get_remote_alias_is_called': True,
'verify_current_branch_name_is_called': True,
'cut_release_branch_is_called': True
}
def mock_open_tab(unused_url):
check_function_calls[
'open_new_tab_in_browser_if_possible_is_called'] = True
def mock_ask_user_to_confirm(unused_msg):
check_function_calls['ask_user_to_confirm_is_called'] = True
print_arr = []
def mock_input():
if print_arr[-1] == 'Enter version of previous release.':
return '1.2.3'
return 'y'
def mock_print(msg):
print_arr.append(msg)
def mock_get_mail_message_template():
check_function_calls['get_mail_message_template_is_called'] = True
return 'Mail message for testing.'
def mock_get_extra_jobs_due_to_schema_changes(
unused_remote_alias, unused_previous_release_version):
check_function_calls[
'get_extra_jobs_due_to_schema_changes_is_called'] = True
return []
def mock_did_supported_audio_languages_change(
unused_remote_alias, unused_previous_release_version):
check_function_calls[
'did_supported_audio_languages_change_is_called'] = True
return True
def mock_get_remote_alias(unused_remote_url):
check_function_calls['get_remote_alias_is_called'] = True
def mock_verify_current_branch_name(unused_branch_name):
check_function_calls['verify_current_branch_name_is_called'] = True
def mock_cut_release_branch():
check_function_calls['cut_release_branch_is_called'] = True
open_tab_swap = self.swap(
common, 'open_new_tab_in_browser_if_possible', mock_open_tab)
ask_user_swap = self.swap(
common, 'ask_user_to_confirm', mock_ask_user_to_confirm)
input_swap = self.swap(python_utils, 'INPUT', mock_input)
print_swap = self.swap(python_utils, 'PRINT', mock_print)
mail_msg_swap = self.swap(
initial_release_prep, 'get_mail_message_template',
mock_get_mail_message_template)
get_extra_jobs_swap = self.swap(
initial_release_prep, 'get_extra_jobs_due_to_schema_changes',
mock_get_extra_jobs_due_to_schema_changes)
check_changes_swap = self.swap(
initial_release_prep, 'did_supported_audio_languages_change',
mock_did_supported_audio_languages_change)
get_alias_swap = self.swap(
common, 'get_remote_alias', mock_get_remote_alias)
branch_check_swap = self.swap(
common, 'verify_current_branch_name',
mock_verify_current_branch_name)
cut_branch_swap = self.swap(
initial_release_prep, 'cut_release_branch',
mock_cut_release_branch)
with open_tab_swap, ask_user_swap, input_swap, print_swap:
with mail_msg_swap, get_alias_swap, check_changes_swap:
with get_extra_jobs_swap, branch_check_swap, cut_branch_swap:
initial_release_prep.main()
self.assertEqual(check_function_calls, expected_check_function_calls)
|
nilq/baby-python
|
python
|
from random import SystemRandom
import pytest
from cacheout import LFUCache
parametrize = pytest.mark.parametrize
random = SystemRandom()
@pytest.fixture
def cache():
_cache = LFUCache(maxsize=5)
return _cache
def assert_keys_evicted_in_order(cache, keys):
"""Assert that cache keys are evicted in the same order as `keys`."""
keys = keys.copy()
for n in range(cache.maxsize, cache.maxsize * 2):
cache.set(n, n)
assert cache.full()
assert keys.pop(0) not in cache
for key in keys:
assert key in cache
def test_lfu_eviction(cache):
"""Test that LFUCache evicts least frequently used set entries first."""
key_counts = [("a", 4), ("b", 3), ("c", 5), ("d", 1), ("e", 2)]
for key, count in key_counts:
cache.set(key, key)
for _ in range(count):
cache.get(key)
sorted_key_counts = sorted(key_counts, key=lambda kc: kc[1])
eviction_order = [kc[0] for kc in sorted_key_counts]
max_access_count = max([kc[1] for kc in sorted_key_counts])
for n in range(len(key_counts)):
cache.set(n, n)
for _ in range(max_access_count + 1):
cache.get(n)
assert cache.full()
assert eviction_order[n] not in cache
for key in eviction_order[(n + 1) :]:
assert key in cache
def test_lfu_get(cache):
"""Test that LFUCache.get() returns cached value."""
for key, value in cache.items():
assert cache.get(key) == value
def test_lfu_clear(cache):
"""Test that LFUCache.clear() resets access counts."""
cache.maxsize = 2
cache.set(1, 1)
cache.set(2, 2)
for _ in range(5):
cache.get(1)
cache.set(3, 3)
assert 2 not in cache
cache.clear()
assert len(cache) == 0
cache.set(1, 1)
cache.set(2, 2)
cache.get(2)
cache.set(3, 3)
assert 1 not in cache
|
nilq/baby-python
|
python
|
pessoaslist = []
dicionario = {}
pessoastotal = 0
soma = media = 0
mulheres = []
acimamedia = []
while True:
dicionario['Nome'] = str(input("Nome: "))
dicionario['Sexo'] = str(input("Sexo: [M/F] "))
dicionario['Idade'] = int(input("Idade: "))
resp = str(input("Continuar?: [S/N]"))
pessoaslist.append(dicionario.copy())
pessoastotal += 1
dicionario.clear()
if resp == "N":
break
for i, v in enumerate(pessoaslist):
soma += pessoaslist[i]['Idade']
media = soma / pessoastotal
if v['Sexo'] == "F":
mulheres.append(v['Nome'])
print(f'- O grupo tem {pessoastotal} pessoas. \n- A média de idade é de {media:.2f} anos. \n- As mulheres cadastradas foram {mulheres}')
for v in pessoaslist:
if v['Idade'] > media:
acimamedia.append(v)
print(f"- Lista das pessoas que estão acima da média: \n{acimamedia}")
|
nilq/baby-python
|
python
|
from ..utils import Object
class MessageSchedulingStateSendAtDate(Object):
"""
The message will be sent at the specified date
Attributes:
ID (:obj:`str`): ``MessageSchedulingStateSendAtDate``
Args:
send_date (:obj:`int`):
Date the message will be sentThe date must be within 367 days in the future
Returns:
MessageSchedulingState
Raises:
:class:`telegram.Error`
"""
ID = "messageSchedulingStateSendAtDate"
def __init__(self, send_date, **kwargs):
self.send_date = send_date # int
@staticmethod
def read(q: dict, *args) -> "MessageSchedulingStateSendAtDate":
send_date = q.get('send_date')
return MessageSchedulingStateSendAtDate(send_date)
|
nilq/baby-python
|
python
|
'''Desarrollar un programa que cargue los datos de un triángulo.
Implementar una clase con los métodos para inicializar los atributos, imprimir el valor del
lado con un tamaño mayor y el tipo de triángulo que es (equilátero, isósceles o escaleno).'''
import os
class Triangulo():
def __init__(self, lado1, lado2, lado3):
self.lado1 = int(lado1)
self.lado2 = int(lado2)
self.lado3 = int(lado3)
def es_triangulo(self):
if (self.lado1 + self.lado2) > self.lado3 and (self.lado1 + self.lado3) > \
self.lado2 and (self.lado2 + self.lado3) > self.lado1:
return True
else:
return False
def que_soy(self):
if self.lado1 == self.lado2 == self.lado3:
return "Equilatero"
elif self.lado1 != self.lado2 != self.lado3 and self.lado1 != self.lado3:
return "Escaleno"
else:
return "Isosceles"
def mayor_lado(self):
if self.que_soy() != "Equilatero":
return str(max([self.lado1, self.lado2, self.lado3]))
else:
return "\tTodos los lados son iguales"
if __name__ == "__main__":
os.system("cls")
mensaje = ""
lados = dict()
print("\t*****************************************")
print("\tVamos a cargar 3 lados de un Triangulo\n")
print("\t*****************************************")
while True:
print(mensaje)
lista_lados = input("\n\tIngrese los tres lados separados por comas:").split(",")
if len(lista_lados) == 3:
lado1, lado2, lado3 = lista_lados
else:
mensaje = '''\tERROR! Se ingresaron valores incorrectos!
ingrese nuevamente!'''
continue
if lado1 and lado2 and lado3:
lado1, lado2, lado3 = [lado.strip() for lado in [lado1,lado2,lado3]]
if (lado1 + lado2 + lado3).isdigit():
lados[lado1] = "lado1"
lados[lado2] = "lado2"
lados[lado3] = "lado3"
un_triangulo = Triangulo(lado1, lado2, lado3)
if un_triangulo.es_triangulo():
if un_triangulo.que_soy() != "Equilatero":
print(f"\tEl lado mas grande es: {lados[un_triangulo.mayor_lado()]}\n")
else:
print(un_triangulo.mayor_lado())
print(f"\tSoy {type(un_triangulo).__name__} {un_triangulo.que_soy()}")
else:
mensaje = '''\tERROR! los valores ingresados no corresponden a un triangulo
La suma de dos lados debe ser mayor al tercer lado!,
Vuelva a cargar!'''
continue
else:
mensaje = '''\tERROR Debe ingresar solo numeros!
Vuelva a cargar!'''
continue
else:
mensaje ='''\tERROR! Debe ingresar los tres valores separados por comas!
Vuelva a cargar! Ej: valor1, valor2, valor3'''
continue
opcion = input("\tContinua Cargando? (Enter = s o N = Sale)")
if opcion.lower() == "n":
break
print("\n\t*********************")
print("\tGracias! Hasta Pronto")
print("\t*********************")
|
nilq/baby-python
|
python
|
import ldap
import pandas
import datetime.datetime
from ldap_paged_search import LdapPagedSearch
host = 'ldaps://example.com:636'
username = 'domain\\username'
password = 'password'
baseDN = 'DC=example,DC=com'
filter = "(&(objectCategory=computer))"
#attributes = ['dn']
attributes = ['*']
#ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
l = LdapPagedSearch(host, username, password, maxPages=0, pageSize=1000)
results = l.search(baseDN, filter, attributes = attributes)
computers = []
for computer in results:
dn = computer[0]
try:operatingSystem = computer[1]['operatingSystem'][0]
except: operatingSystem = False
try: operatingSystemServicePack = computer[1]['operatingSystemServicePack'][0]
except: operatingSystemServicePack = False
hostname = computer[1]['cn'][0]
try: fqdn = computer[1]['dNSHostName'][0]
except: fqdn = False
whenCreated = computer[1]['whenCreated'][0]
try: lastLogonTimestamp = datetime.datetime.utcfromtimestamp((int(computer[1]['lastLogonTimestamp'][0]) - 116444736000000000) / 10000000)
except: lastLogonTimestamp = False
try: description = computer[1]['description'][0]
except: description = False
GUID = computer[1]['objectGUID'][0]
computers.append((dn,hostname,fqdn,operatingSystem,operatingSystemServicePack,whenCreated,lastLogonTimestamp,description,GUID))
comp = pandas.DataFrame(computers)
comp.columns = ['dn','hostname','fqdn','operatingSystem','operatingSystemServicePack','whenCreated','lastLogonTimestamp','description','GUID']
windows = comp[comp['operatingSystem'] != "Mac OS X"]
|
nilq/baby-python
|
python
|
"""Custom pandas accessors."""
import numpy as np
import plotly.graph_objects as go
from vectorbt import defaults
from vectorbt.root_accessors import register_dataframe_accessor
from vectorbt.utils import checks
from vectorbt.utils.widgets import CustomFigureWidget
from vectorbt.generic.accessors import Generic_DFAccessor
@register_dataframe_accessor('ohlcv')
class OHLCV_DFAccessor(Generic_DFAccessor): # pragma: no cover
"""Accessor on top of OHLCV data. For DataFrames only.
Accessible through `pd.DataFrame.vbt.ohlcv`."""
def __init__(self, obj, column_names=None, freq=None):
if not checks.is_pandas(obj): # parent accessor
obj = obj._obj
self._column_names = column_names
Generic_DFAccessor.__init__(self, obj, freq=freq)
def plot(self,
display_volume=True,
candlestick_kwargs={},
bar_kwargs={},
fig=None,
**layout_kwargs):
"""Plot OHLCV data.
Args:
display_volume (bool): If `True`, displays volume as bar chart.
candlestick_kwargs (dict): Keyword arguments passed to `plotly.graph_objects.Candlestick`.
bar_kwargs (dict): Keyword arguments passed to `plotly.graph_objects.Bar`.
fig (plotly.graph_objects.Figure): Figure to add traces to.
**layout_kwargs: Keyword arguments for layout.
Example:
```py
import vectorbt as vbt
import yfinance as yf
yf.Ticker("BTC-USD").history(period="max").vbt.ohlcv.plot()
```
"""
column_names = defaults.ohlcv['column_names'] if self._column_names is None else self._column_names
open = self._obj[column_names['open']]
high = self._obj[column_names['high']]
low = self._obj[column_names['low']]
close = self._obj[column_names['close']]
# Set up figure
if fig is None:
fig = CustomFigureWidget()
candlestick = go.Candlestick(
x=self.index,
open=open,
high=high,
low=low,
close=close,
name='OHLC',
yaxis="y2",
xaxis="x"
)
candlestick.update(**candlestick_kwargs)
fig.add_trace(candlestick)
if display_volume:
volume = self._obj[column_names['volume']]
marker_colors = np.empty(volume.shape, dtype=np.object)
marker_colors[(close.values - open.values) > 0] = 'green'
marker_colors[(close.values - open.values) == 0] = 'lightgrey'
marker_colors[(close.values - open.values) < 0] = 'red'
bar = go.Bar(
x=self.index,
y=volume,
marker_color=marker_colors,
marker_line_width=0,
name='Volume',
yaxis="y",
xaxis="x"
)
bar.update(**bar_kwargs)
fig.add_trace(bar)
fig.update_layout(
yaxis2=dict(
domain=[0.33, 1]
),
yaxis=dict(
domain=[0, 0.33]
)
)
fig.update_layout(
showlegend=True,
xaxis_rangeslider_visible=False,
xaxis_showgrid=True,
yaxis_showgrid=True
)
fig.update_layout(**layout_kwargs)
return fig
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: npu_utilization.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import telemetry_top_pb2 as telemetry__top__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='npu_utilization.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x15npu_utilization.proto\x1a\x13telemetry_top.proto\"C\n\x1bNetworkProcessorUtilization\x12$\n\x0enpu_util_stats\x18\x01 \x03(\x0b\x32\x0c.Utilization\"q\n\x0bUtilization\x12\x12\n\nidentifier\x18\x01 \x02(\t\x12\x13\n\x0butilization\x18\x02 \x01(\r\x12\x1c\n\x07packets\x18\x03 \x03(\x0b\x32\x0b.PacketLoad\x12\x1b\n\x06memory\x18\x04 \x03(\x0b\x32\x0b.MemoryLoad\"\xba\x01\n\nMemoryLoad\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x61verage_util\x18\x02 \x01(\r\x12\x14\n\x0chighest_util\x18\x03 \x01(\r\x12\x13\n\x0blowest_util\x18\x04 \x01(\r\x12\x1e\n\x16\x61verage_cache_hit_rate\x18\x05 \x01(\r\x12\x1e\n\x16highest_cache_hit_rate\x18\x06 \x01(\r\x12\x1d\n\x15lowest_cache_hit_rate\x18\x07 \x01(\r\"\xa2\x01\n\nPacketLoad\x12\x12\n\nidentifier\x18\x01 \x02(\t\x12\x0c\n\x04rate\x18\x02 \x01(\x04\x12\'\n\x1f\x61verage_instructions_per_packet\x18\x03 \x01(\r\x12&\n\x1e\x61verage_wait_cycles_per_packet\x18\x04 \x01(\r\x12!\n\x19\x61verage_cycles_per_packet\x18\x05 \x01(\r:W\n\x18jnpr_npu_utilization_ext\x12\x17.JuniperNetworksSensors\x18\x0c \x01(\x0b\x32\x1c.NetworkProcessorUtilization')
,
dependencies=[telemetry__top__pb2.DESCRIPTOR,])
JNPR_NPU_UTILIZATION_EXT_FIELD_NUMBER = 12
jnpr_npu_utilization_ext = _descriptor.FieldDescriptor(
name='jnpr_npu_utilization_ext', full_name='jnpr_npu_utilization_ext', index=0,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
_NETWORKPROCESSORUTILIZATION = _descriptor.Descriptor(
name='NetworkProcessorUtilization',
full_name='NetworkProcessorUtilization',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='npu_util_stats', full_name='NetworkProcessorUtilization.npu_util_stats', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=46,
serialized_end=113,
)
_UTILIZATION = _descriptor.Descriptor(
name='Utilization',
full_name='Utilization',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identifier', full_name='Utilization.identifier', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='utilization', full_name='Utilization.utilization', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packets', full_name='Utilization.packets', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='Utilization.memory', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=115,
serialized_end=228,
)
_MEMORYLOAD = _descriptor.Descriptor(
name='MemoryLoad',
full_name='MemoryLoad',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='MemoryLoad.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_util', full_name='MemoryLoad.average_util', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='highest_util', full_name='MemoryLoad.highest_util', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lowest_util', full_name='MemoryLoad.lowest_util', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_cache_hit_rate', full_name='MemoryLoad.average_cache_hit_rate', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='highest_cache_hit_rate', full_name='MemoryLoad.highest_cache_hit_rate', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lowest_cache_hit_rate', full_name='MemoryLoad.lowest_cache_hit_rate', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=231,
serialized_end=417,
)
_PACKETLOAD = _descriptor.Descriptor(
name='PacketLoad',
full_name='PacketLoad',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identifier', full_name='PacketLoad.identifier', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rate', full_name='PacketLoad.rate', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_instructions_per_packet', full_name='PacketLoad.average_instructions_per_packet', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_wait_cycles_per_packet', full_name='PacketLoad.average_wait_cycles_per_packet', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_cycles_per_packet', full_name='PacketLoad.average_cycles_per_packet', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=582,
)
_NETWORKPROCESSORUTILIZATION.fields_by_name['npu_util_stats'].message_type = _UTILIZATION
_UTILIZATION.fields_by_name['packets'].message_type = _PACKETLOAD
_UTILIZATION.fields_by_name['memory'].message_type = _MEMORYLOAD
DESCRIPTOR.message_types_by_name['NetworkProcessorUtilization'] = _NETWORKPROCESSORUTILIZATION
DESCRIPTOR.message_types_by_name['Utilization'] = _UTILIZATION
DESCRIPTOR.message_types_by_name['MemoryLoad'] = _MEMORYLOAD
DESCRIPTOR.message_types_by_name['PacketLoad'] = _PACKETLOAD
DESCRIPTOR.extensions_by_name['jnpr_npu_utilization_ext'] = jnpr_npu_utilization_ext
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
NetworkProcessorUtilization = _reflection.GeneratedProtocolMessageType('NetworkProcessorUtilization', (_message.Message,), {
'DESCRIPTOR' : _NETWORKPROCESSORUTILIZATION,
'__module__' : 'npu_utilization_pb2'
# @@protoc_insertion_point(class_scope:NetworkProcessorUtilization)
})
_sym_db.RegisterMessage(NetworkProcessorUtilization)
Utilization = _reflection.GeneratedProtocolMessageType('Utilization', (_message.Message,), {
'DESCRIPTOR' : _UTILIZATION,
'__module__' : 'npu_utilization_pb2'
# @@protoc_insertion_point(class_scope:Utilization)
})
_sym_db.RegisterMessage(Utilization)
MemoryLoad = _reflection.GeneratedProtocolMessageType('MemoryLoad', (_message.Message,), {
'DESCRIPTOR' : _MEMORYLOAD,
'__module__' : 'npu_utilization_pb2'
# @@protoc_insertion_point(class_scope:MemoryLoad)
})
_sym_db.RegisterMessage(MemoryLoad)
PacketLoad = _reflection.GeneratedProtocolMessageType('PacketLoad', (_message.Message,), {
'DESCRIPTOR' : _PACKETLOAD,
'__module__' : 'npu_utilization_pb2'
# @@protoc_insertion_point(class_scope:PacketLoad)
})
_sym_db.RegisterMessage(PacketLoad)
jnpr_npu_utilization_ext.message_type = _NETWORKPROCESSORUTILIZATION
telemetry__top__pb2.JuniperNetworksSensors.RegisterExtension(jnpr_npu_utilization_ext)
# @@protoc_insertion_point(module_scope)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import fire
from lib.dsv.commands.dsv_command import DSVCommand
if __name__ == '__main__':
fire.Fire(DSVCommand)
|
nilq/baby-python
|
python
|
import predpy
from predpy.predpy import *
#from predpy.predpy import predpy
from predpy.predpy import cleandata
from predpy.predpy import galgraphs
|
nilq/baby-python
|
python
|
#
# Copyright 2020 Logical Clocks AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import humps
import json
from hsfs import util
class FeatureGroupCommit:
def __init__(
self,
commitid=None,
commit_date_string=None,
rows_inserted=None,
rows_updated=None,
rows_deleted=None,
committime=None,
type=None,
items=None,
count=None,
href=None,
):
self._commitid = commitid
self._commit_date_string = commit_date_string
self._rows_inserted = rows_inserted
self._rows_updated = rows_updated
self._rows_deleted = rows_deleted
@classmethod
def from_response_json(cls, json_dict):
json_decamelized = humps.decamelize(json_dict)
if json_decamelized["count"] >= 1:
return [cls(**commit_dto) for commit_dto in json_decamelized["items"]]
return cls(**json_decamelized)
def update_from_response_json(self, json_dict):
json_decamelized = humps.decamelize(json_dict)
_ = json_decamelized.pop("type")
_ = json_decamelized.pop("href")
_ = json_decamelized.pop("committime")
self.__init__(**json_decamelized)
return self
def json(self):
return json.dumps(self, cls=util.FeatureStoreEncoder)
def to_dict(self):
return {
"commitID": self._commitid,
"commitDateString": self._commit_date_string,
"rowsInserted": self._rows_inserted,
"rowsUpdated": self._rows_updated,
"rowsDeleted": self._rows_deleted,
}
@property
def commitid(self):
return self._commitid
@property
def commit_date_string(self):
return self._commit_date_string
@property
def rows_inserted(self):
return self._rows_inserted
@property
def rows_updated(self):
return self._rows_updated
@property
def rows_deleted(self):
return self._rows_deleted
@commitid.setter
def commitid(self, commitid):
self._commitid = commitid
@commit_date_string.setter
def commit_date_string(self, commit_date_string):
self._commit_date_string = commit_date_string
@rows_inserted.setter
def rows_inserted(self, rows_inserted):
self._rows_inserted = rows_inserted
@rows_updated.setter
def rows_updated(self, rows_updated):
self._rows_updated = rows_updated
@rows_deleted.setter
def rows_deleted(self, rows_deleted):
self._rows_deleted = rows_deleted
|
nilq/baby-python
|
python
|
from rest_framework import serializers
from rest_framework.reverse import reverse
from onadata.apps.fsforms.models import FieldSightXF
from onadata.apps.logger.models import XForm
from onadata.libs.utils.decorators import check_obj
class XFormListSerializer(serializers.ModelSerializer):
class Meta:
model = XForm
fields = ('id', 'title')
|
nilq/baby-python
|
python
|
import numpy as np
from torch import nn, tensor
import torch
from torch.autograd import Variable
class time_loss(nn.Module):
def __init__(self, margin=0.1, dist_type = 'l2'):
super(time_loss, self).__init__()
self.margin = margin
self.dist_type = dist_type
if dist_type == 'l2':
self.dist = nn.MSELoss(reduction='sum')
if dist_type == 'cos':
self.dist = nn.CosineSimilarity(dim=0)
if dist_type == 'l1':
self.dist = nn.L1Loss()
def forward(self, feat, label1):
feat_size = feat.size()[1]
feat_num = feat.size()[0]
label_num = len(label1.unique())
feat = feat.chunk(label_num, 0)
#loss = Variable(.cuda())
for i in range(label_num):
center1 = torch.mean(feat[i], dim=0)
if self.dist_type == 'l2' or self.dist_type == 'l1':
if i == 0:
dist = max(0, abs(self.dist(center1, center1)))
else:
dist += max(0, abs(self.dist(center1, center1)))
elif self.dist_type == 'cos':
if i == 0:
dist = max(0, 1-self.dist(center1, center1))
else:
dist += max(0, 1-self.dist(center1, center1))
return dist
|
nilq/baby-python
|
python
|
from types import FunctionType
import pygame
from pygame.locals import *
from pygame.font import Font
from pygameMenuPro.event import Event
COLOR_BLACK = Color(0, 0, 0)
COLOR_WHITE = Color(255, 255, 255)
class InputManager:
def __init__(self):
self.last_checked_input = []
self.last_mouse_position:list[tuple[int,int]] = []
self.mouse_clicked = (False,False,False)
self.mouse_wheel = (0,0)
def check_input(self) -> int:
for event in pygame.event.get():
if(event.type == pygame.QUIT):
pygame.quit()
exit(0)
elif(event.type == KEYDOWN):
self.last_checked_input.append(event.key)
return event.key
elif(event.type == MOUSEWHEEL):
self.mouse_wheel = (event.x, event.y)
self.last_mouse_position.append(pygame.mouse.get_pos())
self.mouse_clicked = pygame.mouse.get_pressed()
return 0
def reset(self):
self.reset_last_checked()
self.reset_last_mouse_position()
self.reset_mouse_wheel()
def reset_last_checked(self):
self.last_checked_input.clear()
def reset_last_mouse_position(self):
self.last_mouse_position.clear()
def reset_mouse_wheel(self):
self.mouse_wheel = (0,0)
class FontManager:
def __init__(self, fonts: dict[str, Font] = {}):
pygame.font.init()
self._fonts = fonts
def add_font(self, font_str: str, font: Font):
self._fonts[font_str] = font
def get_font(self, font_str: str):
return self._fonts.get(font_str, None)
def set_default_option(self, font: Font):
"""
set the default option font
"""
self.add_font('default_option_font', font)
def set_default_highlight(self, font: Font):
self.add_font('default_highlight_font', font)
def set_default_title(self, font: Font):
self.add_font('default_title_font', font)
def draw_text(self, text: str, font_str: str, color: Color = Color(255, 255, 255)):
font = self._fonts[font_str]
lines = text.splitlines()
maxline = max(lines, key=len)
surface = pygame.Surface((font.size(maxline)[0], font.get_height() * 1.25 * len(lines)), pygame.SRCALPHA, 32)
for i, line in enumerate(lines):
line_surf = font.render(line, True, color)
text_rect = line_surf.get_rect()
text_rect.centerx = surface.get_rect().centerx
text_rect.top = i * font.get_height() * 1.25
surface.blit(line_surf, text_rect.topleft)
surface.convert_alpha()
return surface
class Option:
# static attribute to check the input
input = InputManager()
# static attribute manage the user fonts
font = FontManager()
clock = pygame.time.Clock()
def __init__(self, text: str, font_str: str = 'default_option_font', color: Color = COLOR_WHITE, event=None):
self.add = AddExtention(self)
self._event = event
if(self._event == None):
self._event = Event()
self.text = text
self._pos = None
self._font_str = font_str
self._activation_keys: list[int] = [K_RETURN]
self.color = color
self.rect = None
def is_selected(self):
"""
returns true iff on of the activation keys is in Option.input.last_checked_input
"""
return len(list(set(Option.input.last_checked_input) & set(self._activation_keys))) > 0
def on_select(self):
"""
will be called when is_selected is true
"""
self._event.post_event('on_select', self)
def on_active(self):
"""
will be called when this option is the current active option in the menu
"""
self._event.post_event('on_active', self)
if(self.is_selected()):
self.on_select()
def on_deactive(self):
"""
will be called before the next option is being activated
"""
self._event.post_event('on_deactive', self)
def draw(self, surface:pygame.Surface, pos):
surf = self.render()
self.rect = surface.blit(surf, (self._pos[0] - surf.get_width()//2, self._pos[1]))
def render(self):
return Option.font.draw_text(self.text, self._font_str, color=self.color)
class AddExtention():
def __init__(self, option: Option):
self._option = option
def option(self):
return self._option
def highlight(self, font_str='default_highlight_font'):
"""
Add a Highlight decorator
"""
self._regular_font_str = self._option._font_str
def highlight_me(option: Option):
option._font_str = font_str
def dont_highlight_me(option: Option):
option._font_str = self._regular_font_str
self._option.add.active_listener(highlight_me)\
.add.deactive_listener(dont_highlight_me)
return self._option
def input(self, input):
"""
add input decorator
"""
head = self._option.text
setattr(self._option, 'input_output', input)
self._option.left = K_LEFT
self._option.right = K_RIGHT
self._option.input_output = input
self._option.text = head + ' ' + str(self._option.input_output)
def update_text_with_input(option: Option):
option.text = head + ' ' + str(self._option.input_output)
self._option.add.active_listener(update_text_with_input)
return self._option
def menu(self, surface: pygame.Surface, title_pos: tuple[int, int], title_font_str: str = 'default_title_font', options: list[Option] = [], background_color=COLOR_BLACK, cursor: pygame.Surface = None):
"""
convert this option to a menu.
The menu title will be same as the option text
"""
self._option = Menu(self.option(), surface, title_pos,
title_font_str, options, background_color, cursor)
return self._option
def mouse_menu(self, surface: pygame.Surface, title_pos: tuple[int, int], title_font_str: str = 'default_title_font', options: list[Option] = [], background_color=COLOR_BLACK, cursor: pygame.Surface = None):
"""
convert this option to a mouse menu
The menu title will be same as the option text
The options of this menu will be activated by mouse hover,
and selected by mouse click.
"""
self._option = MouseMenu(self.option(), surface, title_pos, title_font_str, options, background_color, cursor)
return self._option
def select_listener(self, func: FunctionType):
"""
will be called inside on_select()
"""
self.option()._event.subscribe('on_select', func)
return self.option()
def active_listener(self, func: FunctionType):
"""
will be called inside on_active()
"""
self.option()._event.subscribe('on_active', func)
return self.option()
def deactive_listener(self, func: FunctionType):
"""
will be called inside on_deactive()
"""
self.option()._event.subscribe('on_deactive', func)
return self.option()
def activation_key(self, key: int):
"""
add another activation key to this option
"""
self.option()._activation_keys.append(key)
return self.option()
class Menu(Option):
def __init__(self, option: Option, surface: pygame.Surface, title_pos: tuple[int, int], title_font_str: str = 'default_title_font', options: list[Option] = [], background_color=COLOR_BLACK, cursor: pygame.Surface = None):
super().__init__(option.text, option._font_str, option.color, option._event)
# private:
self._option = option
self._surface = surface
self._title_pos = title_pos
self._options = options
self._background_color = background_color
# public:
self.title_font_str = title_font_str
self.run_display = False
self.state = 0
self.up = K_UP
self.down = K_DOWN
self.quit = K_ESCAPE
self.cursor = cursor
self.cursor_offset = 0
def activate_display_menu(_):
Option.input.reset_last_checked()
self.display_menu()
self.add.select_listener(activate_display_menu)
def display_menu(self):
"""
Run this display. It can be called from another menu and "hide" this menu.
Practicaly, this will stop the current menu loop and start this menu loop.
"""
self.run_display = True
while(self.run_display):
self._surface.fill(self._background_color)
# draw title:
title_surf = Option.font.draw_text(self.text, self.title_font_str)
self._surface.blit(title_surf, (self._title_pos[0] - title_surf.get_width()//2, self._title_pos[1]))
# checking input:
k = self.input.check_input()
self.update_state(k)
if(len(self._options) > 0):
# activate selected option:
if(self.state >= 0):
self._options[self.state].on_active()
# draw options:
last_height = Option.font.get_font(
self.title_font_str).get_height() + self._title_pos[1]
for option in self.get_options():
option._pos = (self._title_pos[0], last_height)
option.draw(self._surface, option._pos)
text_height = option.rect.height
last_height = option._pos[1] + text_height
# draw cursor:
if(self.cursor != None):
selected_option = self._options[self.state]
option_font_size = Option.font.get_font(
selected_option._font_str).size(selected_option.text)
self._surface.blit(self.cursor, (selected_option.rect.left + self.cursor_offset, selected_option.rect.top))
# reset input list:
Option.input.reset()
# refresh:
pygame.display.update()
Option.clock.tick(60)
def update_state(self, k: int):
"""
This method is being called once in every menu's main loop iteration.
You shouldn't modify this unless you know what you do
"""
if(k > 0):
if(k == self.quit):
self.run_display = False
if(len(self._options) > 0):
if(k == self.up):
self._options[self.state].on_deactive()
self.state -= 1
elif(k == self.down):
self._options[self.state].on_deactive()
self.state += 1
self.state %= len(self._options)
def add_option(self, option: Option, index: int = -1):
"""
Add an option to this menu. it can be Menu as well...
"""
if(index == -1):
self._options.append(option)
else:
self._options.insert(index, option)
def set_options(self, options: list[Option]):
"""
Set the options list to this menu. The list can contain other menus.
The state of this menu will be reset to 0
"""
self.state = 0
self._options = options
return self
def get_options(self):
"""
Returns the option list of this menu
"""
return self._options
def __getattr__(self, name:str):
return self._option.__getattribute__(name)
class MouseMenu(Menu):
def __init__(self, option: Option, surface: pygame.Surface, title_pos: tuple[int, int], title_font_str: str = 'default_title_font', options: list[Option] = [], background_color=COLOR_BLACK, cursor: pygame.Surface = None):
super().__init__(option, surface, title_pos, title_font_str, options=options, background_color=background_color, cursor=cursor)
self.state = -1
def update_state(self, k: int):
some_option_active = False
for i, option in enumerate(self._options):
rect = option.rect
if(rect != None):
if(len(Option.input.last_mouse_position)>0 and rect.collidepoint(Option.input.last_mouse_position[-1])):
if(self.state != i and self.state >= 0):
self._options[self.state].on_deactive()
some_option_active = True
self.state = i
if(not some_option_active):
if(self.state >= 0):
self._options[self.state].on_deactive()
self.state = -1
def set_options(self, options: list[Option]):
super().set_options(options)
def select_with_mouse(option:Option):
if(Option.input.mouse_clicked[0]):
option.on_select()
for option in self._options:
option.add.active_listener(select_with_mouse)
return self
|
nilq/baby-python
|
python
|
# Define player object
class Player:
def __init__(self, name, house):
self.name = name
self.house = house
self.hasNumber = False
# Method for setting users phonenumbers
def setPhonenumber(self, phoneNumber):
self.phoneNumber = phoneNumber
self.hasNumber = True
# 12B
LF12B = [
"Phill",
"Dave",
"Jake",
"Pat",
"Gabe",
"Evan"
]
# 1308
WARD1308 = [
"Jason",
"Zack",
"Jack",
"JC"
]
# 13C
LF13C = [
"Lynnanne",
"Tori",
"Alyssa",
"Ollie"
]
# 12A
LF12A = [
"Ashley",
"Carly",
"Lauren",
"Alexa",
"Gabby",
"Lexi",
"Steph"
]
# Array of all houses which contain people in each house
houses = [LF12B, LF13C, LF12A, WARD1308]
houseNames = ['Lower Fulton 12B','Lower Fulton 13C', 'Lower Fulton 12A', 'Ward 1308']
# Empty people array to be appended to
people = []
index = -1;
# Adds each person for each house to the list of people
for i in houses:
index += 1
for j in range(len(i)):
people.append(Player(i[j], houseNames[index]))
# people.append(i[j])
for i in people:
print(i.name + ' Representing ' + i.house)
|
nilq/baby-python
|
python
|
# ===============================================================================
# Copyright 2020-2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import argparse
import bench
from cuml import LogisticRegression
parser = argparse.ArgumentParser(description='cuML logistic '
'regression benchmark')
parser.add_argument('--no-fit-intercept', dest='fit_intercept',
action='store_false', default=True,
help="Don't fit intercept")
parser.add_argument('--solver', default='qn', choices=('qn', 'owl'),
help='Solver to use.')
parser.add_argument('--linesearch-max-iter', type=int, default=50,
help='Maximum iterations per solver outer iteration')
parser.add_argument('--maxiter', type=int, default=100,
help='Maximum iterations for the iterative solver')
parser.add_argument('-C', dest='C', type=float, default=1.0,
help='Regularization parameter')
parser.add_argument('--tol', type=float, default=1e-10,
help='Tolerance for solver. Default is 1e-10.')
params = bench.parse_args(parser)
# Load generated data
X_train, X_test, y_train, y_test = bench.load_data(params)
params.n_classes = y_train[y_train.columns[0]].nunique()
# Create our classifier object
clf = LogisticRegression(penalty='l2', C=params.C,
linesearch_max_iter=params.linesearch_max_iter,
fit_intercept=params.fit_intercept, verbose=params.verbose,
tol=params.tol,
max_iter=params.maxiter, solver=params.solver)
# Time fit and predict
fit_time, _ = bench.measure_function_time(clf.fit, X_train, y_train, params=params)
y_pred = clf.predict(X_train)
train_acc = 100 * bench.accuracy_score(y_pred, y_train)
predict_time, y_pred = bench.measure_function_time(
clf.predict, X_test, params=params)
test_acc = 100 * bench.accuracy_score(y_pred, y_test)
bench.print_output(library='cuml', algorithm='log_reg',
stages=['training', 'prediction'], params=params,
functions=['LogReg.fit', 'LogReg.predict'],
times=[fit_time, predict_time], metric_type='accuracy[%]',
metrics=[train_acc, test_acc], data=[X_train, X_test],
alg_instance=clf)
|
nilq/baby-python
|
python
|
from data.station_number_data import StationNumberData
from states_machine.state_context import State
from states_machine.states.on_which_sum import OnWhichSum
class StationNumber(State):
def __init__(self):
pass
def income_handle(self) -> None:
# TODO:
#
print(f"income_handle: StationNumber")
def outcome_handle(self) -> None:
result = None
print(f"outcome_handle: StationNumber")
try:
self.text_machine.start_transaction()
word = self.text_machine.move()
if word in (StationNumberData.number_words + StationNumberData.number_nums):
if self.text_machine.move() in StationNumberData.main_words:
# TODO: Add to state machine info!!!
print("*"*10 + " StationNumber " + "*"*10)
print(self.text_machine.get_current())
if word.isnumeric():
result = word
else:
for i in range(0, len(StationNumberData.number_words)):
if word == StationNumberData.number_words[i]:
result = i + 1
break
print("*" * 10 + " StationNumber " + "*" * 10)
self.text_machine.commit()
else:
self.text_machine.rollback()
else:
self.text_machine.rollback()
except:
print(f" StationNumber: exception ")
self.text_machine.rollback()
else:
pass
finally:
if not self.text_machine.has_finished():
self.text_machine.rollback()
print(f" StationNumber wants to change the state of the context OnWhichSum.")
if result is not None:
self.context.add_result(result, 0)
self.context.transition_to(OnWhichSum())
|
nilq/baby-python
|
python
|
class Transformer:
def transform(self, message):
yield from map(self.map, (message,))
def map(self, message):
raise NotImplementedError('Transformer is an abstract class.')
|
nilq/baby-python
|
python
|
import socket
import struct
from struct import *
import sys
def ethernet_head(raw_data):
dest, src, prototype = struct.unpack('! 6s 6s H', raw_data[:14])
dest_mac = get_mac_addr(dest)
src_mac = get_mac_addr(src)
proto = socket.htons(prototype)
data = raw_data[14:]
return dest_mac, src_mac, proto, data
def get_ip(addr):
return '.'.join(map(str, addr))
def main():
s = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.ntohs(3))
while True:
raw_data, addr = s.recvfrom(65535)
eth = ethernet_head(raw_data)
print('\nEthernet Frame:')
print('Destination: {}, Source: {}, Protocol: {}'.format(eth[0], eth[1], eth[2]))
if eth[2] == 8:
ipv4 = ipv4(eth[4])
print('\t - ' + 'IPv4 Packet:')
print('\t\t - ' + 'Version: {}, Header Length: {}, TTL:{},'.format(ipv4[1], ipv4[2], ipv4[3]))
print('\t\t - ' + 'Protocol: {}, Source: {}, Target:{}'.format(ipv4[4], ipv4[5], ipv4[6]))
def tcp_head(raw_data):
(src_port, dest_port, sequence, acknowledgment, offset_reserved_flags) = struct.unpack('! H H L L H', raw_data[:14])
offset = (offset_reserved_flags >> 12) * 4
flag_urg = (offset_reserved_flags & 32) >> 5
flag_ack = (offset_reserved_flags & 16) >> 4
flag_psh = (offset_reserved_flags & 8) >> 3
flag_rst = (offset_reserved_flags & 4) >> 2
flag_syn = (offset_reserved_flags & 2) >> 1
flag_fin = offset_reserved_flags & 1
data = raw_data[offset:]
return src_port, dest_port, sequence, acknowledgment, flag_urg, flag_ack, flag_psh, flag_rst, flag_syn, flag_fin, data
def ipv4_head(raw_data):
version_header_length = raw_data[0]
version = version_header_length >> 4
header_length = (version_header_length & 15) * 4
ttl, proto, src, target = struct.unpack('! 8x B B 2x 4s 4s', raw_data[:20])
data = raw_data[header_length:]
src = get_ip(src)
target = get_ip(target)
return version, header_length, ttl, proto, src, target, data
HOST = socket.gethostbyname(socket.gethostname())
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_IP)
s.bind((HOST, 0))
s.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
s.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)
while True:
print(s.recvfrom(65565))
#
# s.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)
|
nilq/baby-python
|
python
|
import unittest
import random
import numpy as np
import pyquil
from cirq import GridQubit, LineQubit, X, Y, Z, PauliSum, PauliString
from openfermion import (
QubitOperator,
IsingOperator,
FermionOperator,
get_interaction_operator,
get_fermion_operator,
jordan_wigner,
qubit_operator_sparse,
)
from zquantum.core.measurement import ExpectationValues
from zquantum.core.utils import RNDSEED, create_object
from zquantum.core.interfaces.mock_objects import MockAnsatz
from zquantum.core.testing import create_random_qubitop, create_random_isingop
from zquantum.core.circuit import build_uniform_param_grid
from ._utils import (
generate_random_qubitop,
get_qubitop_from_coeffs_and_labels,
evaluate_qubit_operator,
get_qubitop_from_matrix,
reverse_qubit_order,
expectation,
change_operator_type,
evaluate_operator_for_parameter_grid,
get_fermion_number_operator,
get_diagonal_component,
get_polynomial_tensor,
qubitop_to_paulisum,
)
class TestQubitOperator(unittest.TestCase):
def test_build_qubitoperator_from_coeffs_and_labels(self):
# Given
test_op = QubitOperator(((0, "Y"), (1, "X"), (2, "Z"), (4, "X")), 3.0j)
coeffs = [3.0j]
labels = [[2, 1, 3, 0, 1]]
# When
build_op = get_qubitop_from_coeffs_and_labels(coeffs, labels)
# Then
self.assertEqual(test_op, build_op)
def test_qubitop_matrix_converion(self):
# Given
m = 4
n = 2 ** m
TOL = 10 ** -15
random.seed(RNDSEED)
A = np.array([[random.uniform(-1, 1) for x in range(n)] for y in range(n)])
# When
A_qubitop = get_qubitop_from_matrix(A)
A_qubitop_matrix = np.array(qubit_operator_sparse(A_qubitop).todense())
test_matrix = A_qubitop_matrix - A
# Then
for row in test_matrix:
for elem in row:
self.assertEqual(abs(elem) < TOL, True)
def test_generate_random_qubitop(self):
# Given
nqubits = 4
nterms = 5
nlocality = 2
max_coeff = 1.5
fixed_coeff = False
# When
qubit_op = generate_random_qubitop(
nqubits, nterms, nlocality, max_coeff, fixed_coeff
)
# Then
self.assertEqual(len(qubit_op.terms), nterms)
for term, coefficient in qubit_op.terms.items():
for i in range(nlocality):
self.assertLess(term[i][0], nqubits)
self.assertEqual(len(term), nlocality)
self.assertLessEqual(np.abs(coefficient), max_coeff)
# Given
fixed_coeff = True
# When
qubit_op = generate_random_qubitop(
nqubits, nterms, nlocality, max_coeff, fixed_coeff
)
# Then
self.assertEqual(len(qubit_op.terms), nterms)
for term, coefficient in qubit_op.terms.items():
self.assertEqual(np.abs(coefficient), max_coeff)
def test_evaluate_qubit_operator(self):
# Given
qubit_op = QubitOperator("0.5 [] + 0.5 [Z1]")
expectation_values = ExpectationValues([0.5, 0.5])
# When
value_estimate = evaluate_qubit_operator(qubit_op, expectation_values)
# Then
self.assertAlmostEqual(value_estimate.value, 0.5)
def test_evaluate_operator_for_parameter_grid(self):
# Given
ansatz = MockAnsatz(4, 2)
grid = build_uniform_param_grid(1, 2, 0, np.pi, np.pi / 10)
backend = create_object(
{
"module_name": "zquantum.core.interfaces.mock_objects",
"function_name": "MockQuantumSimulator",
}
)
op = QubitOperator("0.5 [] + 0.5 [Z1]")
previous_layer_parameters = [1, 1]
# When
(
parameter_grid_evaluation,
optimal_parameters,
) = evaluate_operator_for_parameter_grid(
ansatz, grid, backend, op, previous_layer_params=previous_layer_parameters
)
# Then (for brevity, only check first and last evaluations)
self.assertIsInstance(parameter_grid_evaluation[0]["value"].value, float)
self.assertEqual(parameter_grid_evaluation[0]["parameter1"], 0)
self.assertEqual(parameter_grid_evaluation[0]["parameter2"], 0)
self.assertIsInstance(parameter_grid_evaluation[99]["value"].value, float)
self.assertEqual(
parameter_grid_evaluation[99]["parameter1"], np.pi - np.pi / 10
)
self.assertEqual(
parameter_grid_evaluation[99]["parameter2"], np.pi - np.pi / 10
)
self.assertEqual(len(optimal_parameters), 4)
self.assertEqual(optimal_parameters[0], 1)
self.assertEqual(optimal_parameters[1], 1)
def test_reverse_qubit_order(self):
# Given
op1 = QubitOperator("[Z0 Z1]")
op2 = QubitOperator("[Z1 Z0]")
# When/Then
self.assertEqual(op1, reverse_qubit_order(op2))
# Given
op1 = QubitOperator("Z0")
op2 = QubitOperator("Z1")
# When/Then
self.assertEqual(op1, reverse_qubit_order(op2, n_qubits=2))
self.assertEqual(op2, reverse_qubit_order(op1, n_qubits=2))
def test_expectation(self):
"""Check <Z0> and <Z1> for the state |100>"""
# Given
wf = pyquil.wavefunction.Wavefunction([0, 1, 0, 0, 0, 0, 0, 0])
op1 = QubitOperator("Z0")
op2 = QubitOperator("Z1")
# When
exp_op1 = expectation(op1, wf)
exp_op2 = expectation(op2, wf)
# Then
self.assertAlmostEqual(-1, exp_op1)
self.assertAlmostEqual(1, exp_op2)
def test_change_operator_type(self):
# Given
operator1 = QubitOperator("Z0 Z1", 4.5)
operator2 = IsingOperator("Z0 Z1", 4.5)
operator3 = IsingOperator()
operator4 = IsingOperator("Z0", 0.5) + IsingOperator("Z1", 2.5)
# When
new_operator1 = change_operator_type(operator1, IsingOperator)
new_operator2 = change_operator_type(operator2, QubitOperator)
new_operator3 = change_operator_type(operator3, QubitOperator)
new_operator4 = change_operator_type(operator4, QubitOperator)
# Then
self.assertEqual(IsingOperator("Z0 Z1", 4.5), new_operator1)
self.assertEqual(QubitOperator("Z0 Z1", 4.5), new_operator2)
self.assertEqual(QubitOperator(), new_operator3)
self.assertEqual(
QubitOperator("Z0", 0.5) + QubitOperator("Z1", 2.5), new_operator4
)
def test_get_fermion_number_operator(self):
# Given
n_qubits = 4
n_particles = None
correct_operator = get_interaction_operator(
FermionOperator(
"""
0.0 [] +
1.0 [0^ 0] +
1.0 [1^ 1] +
1.0 [2^ 2] +
1.0 [3^ 3]
"""
)
)
# When
number_operator = get_fermion_number_operator(n_qubits)
# Then
self.assertEqual(number_operator, correct_operator)
# Given
n_qubits = 4
n_particles = 2
correct_operator = get_interaction_operator(
FermionOperator(
"""
-2.0 [] +
1.0 [0^ 0] +
1.0 [1^ 1] +
1.0 [2^ 2] +
1.0 [3^ 3]
"""
)
)
# When
number_operator = get_fermion_number_operator(n_qubits, n_particles)
# Then
self.assertEqual(number_operator, correct_operator)
class TestOtherUtils(unittest.TestCase):
def test_get_diagonal_component_polynomial_tensor(self):
fermion_op = FermionOperator("0^ 1^ 2^ 0 1 2", 1.0)
fermion_op += FermionOperator("0^ 1^ 2^ 0 1 3", 2.0)
fermion_op += FermionOperator((), 3.0)
polynomial_tensor = get_polynomial_tensor(fermion_op)
diagonal_op, remainder_op = get_diagonal_component(polynomial_tensor)
self.assertTrue((diagonal_op + remainder_op) == polynomial_tensor)
diagonal_qubit_op = jordan_wigner(get_fermion_operator(diagonal_op))
remainder_qubit_op = jordan_wigner(get_fermion_operator(remainder_op))
for term in diagonal_qubit_op.terms:
for pauli in term:
self.assertTrue(pauli[1] == "Z")
for term in remainder_qubit_op.terms:
is_diagonal = True
for pauli in term:
if pauli[1] != "Z":
is_diagonal = False
break
self.assertFalse(is_diagonal)
def test_get_diagonal_component_interaction_op(self):
fermion_op = FermionOperator("1^ 1", 0.5)
fermion_op += FermionOperator("2^ 2", 0.5)
fermion_op += FermionOperator("1^ 2^ 0 3", 0.5)
diagonal_op, remainder_op = get_diagonal_component(
get_interaction_operator(fermion_op)
)
self.assertTrue(
(diagonal_op + remainder_op) == get_interaction_operator(fermion_op)
)
diagonal_qubit_op = jordan_wigner(diagonal_op)
remainder_qubit_op = jordan_wigner(remainder_op)
for term in diagonal_qubit_op.terms:
for pauli in term:
self.assertTrue(pauli[1] == "Z")
is_diagonal = True
for term in remainder_qubit_op.terms:
for pauli in term:
if pauli[1] != "Z":
is_diagonal = False
break
self.assertFalse(is_diagonal)
def test_qubitop_to_paulisum_identity_operator(self):
# Given
qubit_operator = QubitOperator("", 4)
# When
paulisum = qubitop_to_paulisum(qubit_operator)
# Then
self.assertEqual(paulisum.qubits, ())
self.assertEqual(paulisum, PauliSum() + 4)
def test_qubitop_to_paulisum_z0z1_operator(self):
# Given
qubit_operator = QubitOperator("Z0 Z1", -1.5)
expected_qubits = (GridQubit(0, 0), GridQubit(1, 0))
expected_paulisum = (
PauliSum()
+ PauliString(Z.on(expected_qubits[0]))
* PauliString(Z.on(expected_qubits[1]))
* -1.5
)
# When
paulisum = qubitop_to_paulisum(qubit_operator)
# Then
self.assertEqual(paulisum.qubits, expected_qubits)
self.assertEqual(paulisum, expected_paulisum)
def test_qubitop_to_paulisum_setting_qubits(self):
# Given
qubit_operator = QubitOperator("Z0 Z1", -1.5)
expected_qubits = (LineQubit(0), LineQubit(5))
expected_paulisum = (
PauliSum()
+ PauliString(Z.on(expected_qubits[0]))
* PauliString(Z.on(expected_qubits[1]))
* -1.5
)
# When
paulisum = qubitop_to_paulisum(qubit_operator, qubits=expected_qubits)
# Then
self.assertEqual(paulisum.qubits, expected_qubits)
self.assertEqual(paulisum, expected_paulisum)
def test_qubitop_to_paulisum_more_terms(self):
# Given
qubit_operator = (
QubitOperator("Z0 Z1 Z2", -1.5)
+ QubitOperator("X0", 2.5)
+ QubitOperator("Y1", 3.5)
)
expected_qubits = (LineQubit(0), LineQubit(5), LineQubit(8))
expected_paulisum = (
PauliSum()
+ (
PauliString(Z.on(expected_qubits[0]))
* PauliString(Z.on(expected_qubits[1]))
* PauliString(Z.on(expected_qubits[2]))
* -1.5
)
+ (PauliString(X.on(expected_qubits[0]) * 2.5))
+ (PauliString(Y.on(expected_qubits[1]) * 3.5))
)
# When
paulisum = qubitop_to_paulisum(qubit_operator, qubits=expected_qubits)
# Then
self.assertEqual(paulisum.qubits, expected_qubits)
self.assertEqual(paulisum, expected_paulisum)
|
nilq/baby-python
|
python
|
"""UseCase for updating a metric entry's properties."""
import logging
from argparse import Namespace, ArgumentParser
from typing import Final, Optional
import jupiter.command.command as command
from jupiter.domain.adate import ADate
from jupiter.use_cases.metrics.entry.update import MetricEntryUpdateUseCase
from jupiter.framework.update_action import UpdateAction
from jupiter.framework.base.entity_id import EntityId
LOGGER = logging.getLogger(__name__)
class MetricEntryUpdate(command.Command):
"""UseCase for updating a metric entry's properties."""
_command: Final[MetricEntryUpdateUseCase]
def __init__(self, the_command: MetricEntryUpdateUseCase) -> None:
"""Constructor."""
self._command = the_command
@staticmethod
def name() -> str:
"""The name of the command."""
return "metric-entry-update"
@staticmethod
def description() -> str:
"""The description of the command."""
return "Update a metric entry"
def build_parser(self, parser: ArgumentParser) -> None:
"""Construct a argparse parser for the command."""
parser.add_argument("--id", dest="ref_id", required=True, help="The id of the metric")
parser.add_argument("--collection-time", dest="collection_time", required=False,
help="The time at which a metric should be recorded")
parser.add_argument("--value", dest="value", required=False, type=float,
help="The value for the metric")
parser.add_argument("--notes", dest="notes", required=False, type=str,
help="A note for the metric")
parser.add_argument("--clear-notes", dest="clear_notes", default=False,
action="store_const", const=True, help="Clear the notes")
def run(self, args: Namespace) -> None:
"""Callback to execute when the command is invoked."""
ref_id = EntityId.from_raw(args.ref_id)
collection_time = UpdateAction.change_to(ADate.from_str(args.collection_time)) \
if args.collection_time is not None else UpdateAction.do_nothing()
value = UpdateAction.change_to(args.value) if args.value is not None else UpdateAction.do_nothing()
notes: UpdateAction[Optional[str]]
if args.clear_notes:
notes = UpdateAction.change_to(None)
elif args.notes is not None:
notes = UpdateAction.change_to(args.notes)
else:
notes = UpdateAction.do_nothing()
self._command.execute(MetricEntryUpdateUseCase.Args(
ref_id=ref_id, collection_time=collection_time, value=value, notes=notes))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
USD Opinion Editor widget implementations
"""
from __future__ import print_function, division, absolute_import
__author__ = "Tomas Poveda"
__license__ = "MIT"
__maintainer__ = "Tomas Poveda"
__email__ = "tpovedatd@gmail.com"
from Qt.QtCore import *
from Qt.QtWidgets import *
from pxr import Usd
from pxr.UsdQt._bindings import _DisplayGroupProxy, _PrimProxy, _AttributeProxy, _MetadataProxy
from pxr.UsdQt.opinionStackModel import _AttributeHandler, _PrimMetadataHandler
from pxr.UsdQt import valueDelegate, opinionStackModel
from artellapipe.libs.usd.core import usdqtutils
class OpinionEditor(QWidget, object):
def __init__(self, deleagate=None, parent=None):
super(OpinionEditor, self).__init__(parent=parent)
self._menu_bar = QMenuBar()
self._layout = QVBoxLayout()
self.setLayout(self._layout)
self._filter_line_edit = QLineEdit()
self._view = usdqtutils.SelectionEditTreeView()
item_delegate = deleagate if deleagate else valueDelegate.ValueDelegate()
self._view.setItemDelegate(item_delegate)
self._view.setEditTriggers(
QAbstractItemView.CurrentChanged | QAbstractItemView.SelectedClicked | QAbstractItemView.EditKeyPressed)
self._view.setSelectionMode(QAbstractItemView.ExtendedSelection)
self._splitter = QSplitter(Qt.Vertical, self)
self._layout.addWidget(self._menu_bar)
self._layout.addWidget(self._filter_line_edit)
self._layout.addWidget(self._splitter)
self._splitter.addWidget(self._view)
self._setup_actions()
self._setup_options_menu()
self._setup_edit_menu()
self._setup_option_view_widget()
@property
def view(self):
return self._view
def launch_opinions_viewer(self, prim, handler):
self._opinion_viewer.launch(opinionStackModel.OpinionStackModel(prim, handler))
def set_source_model(self, model):
self._view.setModel(model)
self.reset_column_spanned()
def reset_column_spanned(self):
for index in self._traverse_all_descendents(QModelIndex()):
if type(index.internalPointer()) in (_DisplayGroupProxy, _PrimProxy):
self._view.setFirstColumnSpanned(index.row(), index.parent(), True)
def _traverse_all_descendents(self, index):
for i in range(self._view.model().rowCount(index)):
child_index = self._view.model().index(i, 0, index)
yield child_index
for descendent in self._traverse_all_descendents(child_index):
yield descendent
def _setup_actions(self):
pass
def _setup_options_menu(self):
self._options_menu = QMenu('Options')
self._menu_bar.addMenu(self._options_menu)
def _setup_edit_menu(self):
self._edit_menu = QMenu('Edit')
self._menu_bar.addMenu(self._edit_menu)
def _setup_option_view_widget(self):
self._opinion_viewer = OpinionStackWidget()
self._options_menu.hide()
self._splitter.addWidget(self._opinion_viewer)
class OpinionStackWidget(QWidget, object):
def __init__(self, parent=None):
super(OpinionStackWidget, self).__init__(parent=parent)
self._toolbar = QToolBar()
self._toolbar.addWidget(QLabel('Opinion Stack'))
self._toolbar.addSeparator()
self._show_all_action = self._toolbar.addAction('Show All')
self._show_all_action.setCheckable(True)
self._close_action = self._toolbar.addAction('Close')
self._show_all_action.toggled.connect(self._on_show_all_toggled)
self._close_action.triggered.connect(self._on_close)
self._opinion_filter = opinionStackModel.OpinionStackFilter()
self._view = QTreeView()
self._view.setModel(self._opinion_filter)
self._layout = QVBoxLayout()
self.setLayout(self._layout)
self._layout.addWidget(self._toolbar)
self._layout.addWidget(self._view)
self.setSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.MinimumExpanding)
def launch(self, model):
self._opinion_filter.setSourceModel(model)
self.show()
def close_(self):
self.hide()
self._opinion_filter.setSourceModel(None)
def _on_show_all_toggled(self, checked):
self._opinion_filter.SetShowFullStack(checked)
def _on_close(self):
self.close_()
class OpinionController(QObject, object):
def __init__(self, model, editor, parent=None):
super(OpinionController, self).__init__(parent)
self._model = model
self._editor = editor
self._editor.view.doubleClicked.connect(self._on_double_clicked)
def reset_prims(self, prims):
self._model.ResetPrims(prims)
self._editor.reset_columns_spanned()
def _on_double_clicked(self, index):
proxy = self._model.GetProxyForIndex(index)
if type(proxy) == _AttributeProxy:
if proxy.GetSize() == 1:
attributes = proxy.GetAttributes()
attribute = attributes[0]
self._editor.launch_opinions_viewer(
attribute.GetPrim(), _AttributeHandler(attribute.GetName(), Usd.TimeCode.Default()))
elif type(proxy) == _MetadataProxy:
if proxy.GetSize() == 1:
objects = proxy.GetObjects()
obj = objects[0]
if type(obj) == Usd.Prim:
self._editor.launch_opinions_viewer(obj, _PrimMetadataHandler(proxy.GetName()))
|
nilq/baby-python
|
python
|
# 000
# 999
#
a=['one','two','three','four']
b=range(5)
c=(9,-1,2)
#one 0 9
#...
#four 4 2
S = [4,5,3]
def next_value(current, S):
"[0,0,0] -> next one [1,0,0]"
N = current[:]
i = 0
N[i] += 1
while N[i]==S[i]:
N[i] = 0
i += 1
if i==len(N):
break
N[i] += 1
return N
c =[0,0,0]
for i in range(60):
print(c)
c = next_value(c,S)
def product(S):
N = [0]*len(S)
i = 0
N[i] += 1
while N[i]==S[i]:
N[i] = 0
i += 1
if i==len(N):
break
N[i] += 1
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# This file is part of the pyfixp project hosted at https://github.com/chipmuenk/pyfixp
#
# Copyright © Christian Muenker
# Licensed under the terms of the MIT License
# (see file LICENSE.txt in root directory for details)
"""
Store the version number here for setup.py and pyfixp.py
"""
__version__ = '0.9.0'
|
nilq/baby-python
|
python
|
class Card():
def __init__(self, id: int, img: str, owner, position: int):
""" Card defines a card.
Args:
id (int): card id (incremental int)
img (str): path of the card img
owner (int): card owner player, or None if in the deck/used
position (int): when it is played, the position to vote
removed (bool): if the card is used in this turn, so removed before next
"""
self.id = id
self.img = img
self.owner = owner
self.position = position
self.removed = False
def __str__(self):
text = str(self.id) + " "
text += self.img + " "
#text += str(self.owner) + " "
text += str(self.position) + " "
return text
|
nilq/baby-python
|
python
|
from django.urls import path
from .views import (
RideListView,
RideDetailView,
RideCreateView,
RideUpdateView,
RideDeleteView,
OwnerRideListView,
ShareCreateView,
SharePickRideListView,
ShareRideListView,
DriverListView,
ShareUpdateView
)
from . import views
from .models import Ride, Share
from django.contrib.auth import views as auth_views
app_name = 'ride'
urlpatterns = [
path('', auth_views.LoginView.as_view(template_name='users/login.html')),
path('home/', views.home, name='ride-home'),
# path('about/', views.about, name='ride-about'),
path('owner/', views.owner, name='owner-home'),
path('sharer/', views.sharer, name='sharer-home'),
path('driver/', views.driver, name='driver-home'),
path('owner/request/', RideCreateView.as_view(), name='owner-request'),
path('driver/request/', DriverListView.as_view(), name='driver-request'),
path('owner/view/', OwnerRideListView.as_view(), name='owner-view'),
path('ride/<int:pk>/', RideDetailView.as_view(), name='ride-detail'),
path('owner/view/<int:pk>/update/', RideUpdateView.as_view(), name='owner-update'),
path('owner/view/<int:pk>/delete/', RideDeleteView.as_view(), name='owner-delete'),
path('driver/<int:ride_id>/confirm/', views.driver_confirm, name='driver-confirm'),
path('driver/<int:ride_id>/complete/', views.driver_complete, name='driver-complete'),
path('sharer/request/', ShareCreateView.as_view(), name='share-request'),
path('sharer/list/', SharePickRideListView.as_view(), name='share-list'),
path('sharer/<int:ride_id>/join/', views.share_join, name='share-join'),
path('sharer/<int:ride_id>/cancel/', views.share_cancel, name='share-cancel'),
# path('sharer/view/', ShareRideListView.as_view(), name='share-view'),
path('sharer/view/', views.share_view, name='share-view'),
path('sharer/<int:pk>/update/', ShareUpdateView.as_view(), name='share-update'),
path('driver/view/', views.driver_view, name='driver-view'),
]
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.