content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
"""
The wntr.network package contains methods to define a water network model,
network controls, and graph representation of the network.
"""
from wntr.network.model import WaterNetworkModel, Node, Link, Junction, Reservoir, Tank, Pipe, Pump, Energy, Valve, Curve, LinkStatus, WaterNetworkOptions, LinkType, NodeType
from wntr.network.controls import ControlLogger, ControlAction, TimeControl, ConditionalControl, _CheckValveHeadControl, _MultiConditionalControl, _PRVControl
from wntr.network.graph import WntrMultiDiGraph
|
nilq/baby-python
|
python
|
import cv2
img = cv2.imread("dog.jpg")
cv2.imshow("dog", img)
cv2.waitKey()
cv2.destroyAllWindows()
|
nilq/baby-python
|
python
|
#-*- coding: utf-8 -*-
# https://github.com/Kodi-vStream/venom-xbmc-addons
#test film strem vk 1er page dark higlands & tous ces enfants m'appartiennent
from resources.hosters.hoster import iHoster
from resources.lib.handler.requestHandler import cRequestHandler
from resources.lib.parser import cParser
import re
UA = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:72.0) Gecko/20100101 Firefox/72.0'
class cHoster(iHoster):
def __init__(self):
self.__sDisplayName = 'Netu'
self.__sFileName = self.__sDisplayName
def getDisplayName(self):
return self.__sDisplayName
def setDisplayName(self, sDisplayName):
self.__sDisplayName = sDisplayName + ' [COLOR skyblue]' + self.__sDisplayName + '[/COLOR]'
def setFileName(self, sFileName):
self.__sFileName = sFileName
def getFileName(self):
return self.__sFileName
def setUrl(self, sUrl):
self.__sUrl = sUrl.replace('https', 'http')
self.__sUrl = self.__sUrl.replace('http://netu.tv/', 'http://hqq.tv/')
self.__sUrl = self.__sUrl.replace('http://waaw.tv/', 'http://hqq.tv/')
self.__sUrl = self.__sUrl.replace('http://vizplay.icu/', 'http://hqq.tv/')
self.__sUrl = self.__sUrl.replace('http://hqq.tv/player/hash.php?hash=', 'http://hqq.tv/player/embed_player.php?vid=')
self.__sUrl = self.__sUrl.replace('http://hqq.tv/watch_video.php?v=', 'http://hqq.tv/player/embed_player.php?vid=')
def __getIdFromUrl(self):
sPattern = 'https*:\/\/hqq\.(?:tv|player|watch)\/player\/embed_player\.php\?vid=([0-9A-Za-z]+)'
oParser = cParser()
aResult = oParser.parse(self.__sUrl, sPattern)
if (aResult[0] == True):
return aResult[1][0]
return ''
def getPluginIdentifier(self):
return 'netu'
def isDownloadable(self):
return False
def getMediaLink(self):
return self.__getMediaLinkForGuest()
def GetHost(self,sUrl):
oParser = cParser()
sPattern = 'https*:\/\/(.+?)\/'
aResult = oParser.parse(sUrl, sPattern)
if aResult[0]:
return aResult[1][0]
return ''
def __getMediaLinkForGuest(self):
api_call = ''
ids = self.__getIdFromUrl()
self.__sUrl = 'http://hqq.tv/player/embed_player.php?vid=' + ids + '&autoplay=no'
oRequestHandler = cRequestHandler(self.__sUrl)
oRequestHandler.addHeaderEntry('User-Agent', UA)
html = oRequestHandler.request()
vid = re.search("videokeyorig *= *\'(.+?)\'", html, re.DOTALL).group(1)
url = "time=1&ver=0&secure=0&adb=0%2F&v={}&token=>=&embed_from=0&wasmcheck=1".format(vid)
oRequestHandler = cRequestHandler('https://hqq.tv/player/get_md5.php?' + url)
oRequestHandler.addHeaderEntry('User-Agent', UA)
oRequestHandler.addHeaderEntry('Accept', '*/*')
oRequestHandler.addHeaderEntry('Accept-Language', 'fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3')
oRequestHandler.addHeaderEntry('x-requested-with', 'XMLHttpRequest')
oRequestHandler.addHeaderEntry('Referer', self.__sUrl)
#ok
oRequestHandler.request()
api_call = oRequestHandler.getRealUrl()
if (api_call):
return True, api_call + '.mp4.m3u8' + '|User-Agent=' + UA
return False, False
|
nilq/baby-python
|
python
|
__author__ = 'Richard Lincoln, r.w.lincoln@gmail.com'
""" This example demonstrates how to use the discrete Roth-Erev reinforcement
learning algorithms to learn the n-armed bandit task. """
import pylab
import scipy
from pybrain.rl.agents import LearningAgent
from pybrain.rl.explorers import BoltzmannExplorer #@UnusedImport
from pybrain.rl.experiments import Experiment
from pyreto.bandit import BanditEnvironment, BanditTask
from pyreto.roth_erev import RothErev, PropensityTable #@UnusedImport
from pyreto.roth_erev import VariantRothErev #@UnusedImport
payouts = scipy.array([[200.0, 300.0, 100.0], # Expected value: 210
[900.0, 400.0, 600.0], # Expected value: 510
[700.0, 600.0, 550.0], # Expected value: 595
[150.0, 50.0, 1000.0], # Expected value: 147.5
[700.0, 800.0, 900.0]]) # Expected value: 790
distrib = scipy.array([[0.7, 0.2, 0.1],
[0.1, 0.6, 0.3],
[0.4, 0.2, 0.3],
[0.5, 0.45, 0.05],
[0.3, 0.5, 0.2]])
env = BanditEnvironment(payouts, distrib)
task = BanditTask(env)
table = PropensityTable(payouts.shape[0])
table.initialize(500.0)
#learner = RothErev(experimentation=0.55, recency=0.3)
learner = VariantRothErev(experimentation=0.65, recency=0.3)
learner.explorer = BoltzmannExplorer(tau=100.0, decay=0.9995)
agent = LearningAgent(table, learner)
experiment = Experiment(task, agent)
epis = int(1e1)
batch = 2
avgRewards = scipy.zeros(epis)
allActions = scipy.zeros(epis * batch)
c = 0
for i in range(epis):
experiment.doInteractions(batch)
avgRewards[i] = scipy.mean(agent.history["reward"])
allActions[c:c + batch] = agent.history["action"].flatten() + 1
agent.learn()
agent.reset()
c += batch
pylab.figure(figsize=(16, 6))
#pylab.plot(avgRewards)
pylab.plot(allActions)
pylab.show()
|
nilq/baby-python
|
python
|
import abc
from enum import Enum as EnumCLS
from typing import Any, List, Optional, Tuple, Type
import pendulum
from starlette.requests import Request
from mongoengine import Document
from mongoengine import QuerySet
from fastapi_admin import constants
from fastapi_admin.widgets.inputs import Input
class Filter(Input):
def __init__(self, name: str, label: str, placeholder: str = "", null: bool = True, **context):
"""
Parent class for all filters
:param name: model field name
:param label:
"""
super().__init__(name=name, label=label, placeholder=placeholder, null=null, **context)
async def get_queryset(self, request: Request, value: Any, qs: QuerySet):
value = await self.parse_value(request, value)
filters = {self.context.get("name"): value}
return qs.filter(**filters)
class Search(Filter):
template = "widgets/filters/search.html"
def __init__(
self,
name: str,
label: str,
search_mode: str = "equal",
placeholder: str = "",
null: bool = True,
):
"""
Search for keyword
:param name:
:param label:
:param search_mode: equal,contains,icontains,startswith,istartswith,endswith,iendswith,iexact,search
"""
if search_mode == "equal":
super().__init__(name, label, placeholder, null)
else:
super().__init__(name + "__" + search_mode, label, placeholder)
self.context.update(search_mode=search_mode)
class Datetime(Filter):
template = "widgets/filters/datetime.html"
def __init__(
self,
name: str,
label: str,
format_: str = constants.DATETIME_FORMAT_MOMENT,
null: bool = True,
placeholder: str = "",
):
"""
Datetime filter
:param name:
:param label:
:param format_: the format of moment.js
"""
super().__init__(
name + "__range", label, null=null, format=format_, placeholder=placeholder
)
async def parse_value(self, request: Request, value: Optional[str]):
if value:
ranges = value.split(" - ")
return pendulum.parse(ranges[0]), pendulum.parse(ranges[1])
async def render(self, request: Request, value: Tuple[pendulum.DateTime, pendulum.DateTime]):
format_ = self.context.get("format")
if value is not None:
value = value[0].format(format_) + " - " + value[1].format(format_)
return await super().render(request, value)
class Date(Datetime):
def __init__(
self,
name: str,
label: str,
format_: str = constants.DATE_FORMAT_MOMENT,
null: bool = True,
placeholder: str = "",
):
super().__init__(
name=name, label=label, format_=format_, null=null, placeholder=placeholder
)
self.context.update(date=True)
class Select(Filter):
template = "widgets/filters/select.html"
def __init__(self, name: str, label: str, null: bool = True):
super().__init__(name, label, null=null)
@abc.abstractmethod
async def get_options(self):
"""
return list of tuple with display and value
[("on",1),("off",2)]
:return: list of tuple with display and value
"""
async def render(self, request: Request, value: Any):
options = await self.get_options()
self.context.update(options=options)
return await super(Select, self).render(request, value)
class Enum(Select):
def __init__(
self,
enum: Type[EnumCLS],
name: str,
label: str,
enum_type: Type = int,
null: bool = True,
):
super().__init__(name=name, label=label, null=null)
self.enum = enum
self.enum_type = enum_type
async def parse_value(self, request: Request, value: Any):
return self.enum(self.enum_type(value))
async def get_options(self):
options = [(v.name, v.value) for v in self.enum]
if self.context.get("null"):
options = [("", "")] + options
return options
class ForeignKey(Select):
def __init__(self, model: Type[Document], name: str, label: str, null: bool = True):
super().__init__(name=name, label=label, null=null)
self.model = model
async def get_options(self):
ret = await self.get_models()
options = [
(
str(x),
x.pk,
)
for x in ret
]
if self.context.get("null"):
options = [("", "")] + options
return options
async def get_models(self):
return await self.model.all()
async def render(self, request: Request, value: Any):
if value is not None:
value = int(value)
return await super().render(request, value)
class DistinctColumn(Select):
def __init__(self, model: Type[Document], name: str, label: str, null: bool = True):
super().__init__(name=name, label=label, null=null)
self.model = model
self.name = name
async def get_options(self):
ret = await self.get_values()
options = [
(
str(x[0]),
str(x[0]),
)
for x in ret
]
if self.context.get("null"):
options = [("", "")] + options
return options
async def get_values(self):
return await self.model.all().distinct().values_list(self.name)
class Boolean(Select):
async def get_options(self) -> List[Tuple[str, str]]:
"""Return list of possible values to select from."""
options = [
("TRUE", "true"),
("FALSE", "false"),
]
if self.context.get("null"):
options.insert(0, ("", ""))
return options
async def get_queryset(self, request: Request, value: str, qs: QuerySet) -> QuerySet:
"""Return filtered queryset."""
filters = {self.context.get("name"): (value == "true")}
return qs.filter(**filters)
|
nilq/baby-python
|
python
|
from itertools import chain
from functools import lru_cache
import abc
import collections
from schema import Schema
from experta.pattern import Bindable
from experta.utils import freeze, unfreeze
from experta.conditionalelement import OperableCE
from experta.conditionalelement import ConditionalElement
class BaseField(metaclass=abc.ABCMeta):
@abc.abstractmethod
def validate(self, data):
"""Raise an exception on invalid data."""
pass
class Field(BaseField):
NODEFAULT = object()
def __init__(self, schema_definition, mandatory=False, default=NODEFAULT):
self.validator = Schema(schema_definition)
self.mandatory = mandatory
self.default = default
def validate(self, data):
self.validator.validate(unfreeze(data))
class Validable(type):
def __new__(mcl, name, bases, nmspc):
# Register fields
newnamespace = {"__fields__": dict()}
for base in bases:
if isinstance(base, Validable):
for key, value in base.__fields__.items():
if key.startswith('_') and key[1:].isdigit():
key = int(key[1:])
newnamespace["__fields__"][key] = value
for key, value in nmspc.items():
if key.startswith('_') and key[1:].isdigit():
key = int(key[1:])
if isinstance(value, BaseField):
newnamespace["__fields__"][key] = value
else:
newnamespace[key] = value
return super(Validable, mcl).__new__(mcl, name, bases, newnamespace)
class Fact(OperableCE, Bindable, dict, metaclass=Validable):
"""Base Fact class"""
def __init__(self, *args, **kwargs):
self.update(dict(chain(enumerate(args), kwargs.items())))
self.__defaults = dict()
def __missing__(self, key):
if key not in self.__fields__:
raise KeyError(key)
else:
default = self.__fields__[key].default
if default is Field.NODEFAULT:
raise KeyError(key)
elif key in self.__defaults:
return self.__defaults[key]
elif isinstance(default, collections.abc.Callable):
return self.__defaults.setdefault(key, default())
else:
return self.__defaults.setdefault(key, default)
def __setitem__(self, key, value):
if self.__factid__ is None:
super().__setitem__(key, freeze(value))
else:
raise RuntimeError("A fact can't be modified after declaration.")
def validate(self):
for name, field in self.__fields__.items():
if name in self:
try:
field.validate(self[name])
except Exception as exc:
raise ValueError(
"Invalid value on field %r for fact %r"
% (name, self))
elif field.mandatory:
raise ValueError(
"Mandatory field %r is not defined for fact %r"
% (name, self))
else:
pass
def update(self, mapping):
for k, v in mapping.items():
self[k] = v
def as_dict(self):
"""Return a dictionary containing this `Fact` data."""
return {k: unfreeze(v)
for k, v in self.items()
if not self.is_special(k)}
def copy(self):
"""Return a copy of this `Fact`."""
content = [(k, v) for k, v in self.items()]
intidx = [(k, v) for k, v in content if isinstance(k, int)]
args = [v for k, v in sorted(intidx)]
kwargs = {k: v
for k, v in content
if not isinstance(k, int) and not self.is_special(k)}
return self.__class__(*args, **kwargs)
def has_field_constraints(self):
return any(isinstance(v, ConditionalElement) for v in self.values())
def has_nested_accessor(self):
return any(("__" in str(k).strip('__') for k in self.keys()))
@staticmethod
def is_special(key):
return (isinstance(key, str)
and key.startswith('__')
and key.endswith('__'))
@property
def __bind__(self):
return self.get('__bind__', None)
@__bind__.setter
def __bind__(self, value):
super().__setitem__('__bind__', value)
@property
def __factid__(self):
return self.get('__factid__', None)
@__factid__.setter
def __factid__(self, value):
super().__setitem__('__factid__', value)
@classmethod
def from_iter(cls, pairs):
obj = cls()
obj.update(dict(pairs))
return obj
def __str__(self): # pragma: no cover
if self.__factid__ is None:
return "<Undeclared Fact> %r" % self
else:
return "<f-%d>" % self.__factid__
def __repr__(self): # pragma: no cover
return "{}({})".format(
self.__class__.__name__,
", ".join(
(repr(v) if isinstance(k, int) else "{}={!r}".format(k, v)
for k, v in self.items()
if not self.is_special(k))))
def __hash__(self):
try:
return self._hash
except AttributeError:
self._hash = hash(frozenset(self.items()))
return self._hash
def __eq__(self, other):
return (self.__class__ == other.__class__
and super().__eq__(other))
class InitialFact(Fact):
"""
InitialFact
"""
pass
|
nilq/baby-python
|
python
|
from tensorflow.keras.models import Sequential
import tensorflow.keras.layers as layers
import numpy as np
from os.path import join
import os
from invoke.context import Context
import unittest
import templates
import ennclave_inference as ennclave
import config as cfg
def common(backend: str):
target_dir = join(cfg.get_ennclave_home(), 'backend', 'generated')
preamble_backend = backend
if backend == 'sgx':
preamble_backend = 'sgx_enclave'
with open(join(target_dir, f'{backend}_forward.cpp'), 'w+') as forward_file:
forward_file.write(templates.preamble.render(backend=preamble_backend))
forward_file.write(
f"print_out(\"Hello, this is backend {backend}\\n\");")
forward_file.write(templates.postamble)
with open(join(target_dir, 'parameters.bin'), 'w') as parameter_file:
pass
with open(join(target_dir, 'sgx_config.xml'), 'w') as config_file:
config_file.write("""
<EnclaveConfiguration>
<ProdID>0</ProdID>
<ISVSVN>0</ISVSVN>
<StackMaxSize>0x40000</StackMaxSize>
<HeapInitSize>0x7e00000</HeapInitSize>
<HeapMaxSize>0x7e00000</HeapMaxSize>
<TCSNum>10</TCSNum>
<TCSPolicy>1</TCSPolicy>
<!-- Recommend changing 'DisableDebug' to 1 to make the sgx undebuggable for sgx release -->
<DisableDebug>0</DisableDebug>
<MiscSelect>0</MiscSelect>
<MiscMask>0xFFFFFFFF</MiscMask>
</EnclaveConfiguration>""")
context = Context()
with context.cd(cfg.get_ennclave_home()):
context.run('mkdir -p build')
with context.cd('build'):
# context.run('cmake ..')
context.run(f'make backend_{backend}')
if backend == 'native':
ennclave.native_forward(b'', 0, 0)
else:
ennclave.sgx_forward(b'', 0, 0)
# noinspection PyMethodMayBeStatic
class BasicTests(unittest.TestCase):
def test_native(self):
common('native')
@unittest.skipIf(os.environ.get('SGX_SDK') is None, "SGX is not available")
def test_sgx(self):
common('sgx')
|
nilq/baby-python
|
python
|
import numpy as np
import os
import time
from . import util
from tensorboardX import SummaryWriter
import torch
class TBVisualizer:
def __init__(self, opt):
self._opt = opt
self._save_path = os.path.join(opt.checkpoints_dir, opt.name)
self._log_path = os.path.join(self._save_path, 'loss_log2.txt')
self._tb_path = os.path.join(self._save_path, 'summary.json')
self._writer = SummaryWriter(self._save_path)
with open(self._log_path, "a") as log_file:
now = time.strftime("%c")
log_file.write('================ Training Loss (%s) ================\n' % now)
def __del__(self):
self._writer.close()
def display_current_results(self, visuals, it, is_train, save_visuals=True):
for label, image_numpy in visuals.items():
sum_name = '{}/{}'.format('Train' if is_train else 'Test', label)
# self._writer.add_image(sum_name, image_numpy, it)
I=torch.from_numpy(image_numpy).permute(2,0,1)
self._writer.add_image(sum_name, I/255, it)
if save_visuals:
util.save_image(image_numpy,
os.path.join(self._opt.checkpoints_dir, self._opt.name,
'event_imgs', sum_name, '%08d.png' % it))
self._writer.export_scalars_to_json(self._tb_path)
def plot_scalars(self, scalars, it, is_train):
for label, scalar in scalars.items():
sum_name = '{}/{}'.format('Train' if is_train else 'Test', label)
self._writer.add_scalar(sum_name, scalar, it)
def print_current_train_errors(self, epoch, i, iters_per_epoch, errors, t, visuals_were_stored):
log_time = time.strftime("[%d/%m/%Y %H:%M:%S]")
visuals_info = "v" if visuals_were_stored else ""
message = '%s (T%s, epoch: %d, it: %d/%d, t/smpl: %.3fs) ' % (log_time, visuals_info, epoch, i, iters_per_epoch, t)
for k, v in errors.items():
message += '%s:%.3f ' % (k, v)
print(message)
with open(self._log_path, "a") as log_file:
log_file.write('%s\n' % message)
def print_current_validate_errors(self, epoch, errors, t):
log_time = time.strftime("[%d/%m/%Y %H:%M:%S]")
message = '%s (V, epoch: %d, time_to_val: %ds) ' % (log_time, epoch, t)
for k, v in errors.items():
message += '%s:%.3f ' % (k, v)
print(message)
with open(self._log_path, "a") as log_file:
log_file.write('%s\n' % message)
def save_images(self, visuals):
for label, image_numpy in visuals.items():
image_name = '%s.png' % label
save_path = os.path.join(self._save_path, "samples", image_name)
util.save_image(image_numpy, save_path)
|
nilq/baby-python
|
python
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .instance_agent_command_source_details import InstanceAgentCommandSourceDetails
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class InstanceAgentCommandSourceViaTextDetails(InstanceAgentCommandSourceDetails):
"""
The source of the command when provided using plain text.
"""
def __init__(self, **kwargs):
"""
Initializes a new InstanceAgentCommandSourceViaTextDetails object with values from keyword arguments. The default value of the :py:attr:`~oci.compute_instance_agent.models.InstanceAgentCommandSourceViaTextDetails.source_type` attribute
of this class is ``TEXT`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param source_type:
The value to assign to the source_type property of this InstanceAgentCommandSourceViaTextDetails.
Allowed values for this property are: "TEXT", "OBJECT_STORAGE_URI", "OBJECT_STORAGE_TUPLE"
:type source_type: str
:param text:
The value to assign to the text property of this InstanceAgentCommandSourceViaTextDetails.
:type text: str
:param text_sha256:
The value to assign to the text_sha256 property of this InstanceAgentCommandSourceViaTextDetails.
:type text_sha256: str
"""
self.swagger_types = {
'source_type': 'str',
'text': 'str',
'text_sha256': 'str'
}
self.attribute_map = {
'source_type': 'sourceType',
'text': 'text',
'text_sha256': 'textSha256'
}
self._source_type = None
self._text = None
self._text_sha256 = None
self._source_type = 'TEXT'
@property
def text(self):
"""
**[Required]** Gets the text of this InstanceAgentCommandSourceViaTextDetails.
The plain text command.
:return: The text of this InstanceAgentCommandSourceViaTextDetails.
:rtype: str
"""
return self._text
@text.setter
def text(self, text):
"""
Sets the text of this InstanceAgentCommandSourceViaTextDetails.
The plain text command.
:param text: The text of this InstanceAgentCommandSourceViaTextDetails.
:type: str
"""
self._text = text
@property
def text_sha256(self):
"""
Gets the text_sha256 of this InstanceAgentCommandSourceViaTextDetails.
SHA-256 checksum value of the text content.
:return: The text_sha256 of this InstanceAgentCommandSourceViaTextDetails.
:rtype: str
"""
return self._text_sha256
@text_sha256.setter
def text_sha256(self, text_sha256):
"""
Sets the text_sha256 of this InstanceAgentCommandSourceViaTextDetails.
SHA-256 checksum value of the text content.
:param text_sha256: The text_sha256 of this InstanceAgentCommandSourceViaTextDetails.
:type: str
"""
self._text_sha256 = text_sha256
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import argparse
import pandas as pd
from funcs import shortpath
def print_inp(inp_file_name):
inp_file_full = pd.read_csv(inp_file_name, sep='\t', header=1, dtype=str)
for j in range(len(inp_file_full)):
inp_file = inp_file_full.loc[[j], :]
# format df for display
with pd.option_context('display.colheader_justify', 'left', 'display.max_rows', None,
'display.max_columns', None, 'display.max_colwidth', -1):
df_display = inp_file.copy()
site_name = os.path.basename(os.path.dirname(df_display.sam_path.values[0]))
df_display.sam_path = df_display.sam_path.map(shortpath)
df_display = df_display.T
df_display.rename(index={'dont_average_replicate_measurements': 'dont_average'},
inplace=True)
print("{:-^80}".format(" "+site_name+" "), end="\n")
print("\n".join([" | {}".format(i)
for i in df_display.to_string(header=False).split("\n")]))
print("{:-^80}".format(""))
def main():
parser = argparse.ArgumentParser(prog="parse_inp.py",
description="""Simple tools for inspecting inp
files""")
parser.add_argument('inp_file', nargs='*')
parser.add_argument('-p', '--print', action='store_true',
help="""print contents of inp file in readable format""")
args = vars(parser.parse_args())
inp_file_list = args.pop('inp_file')
for filename_inp in inp_file_list:
if args['print']:
print_inp(filename_inp)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
format = "%(asctime)s - %(levelname)s - %(name)s - %(message)s"
minimal_format = "%(message)s"
def _get_formatter_and_handler(use_minimal_format: bool = False):
logging_dict = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"colored": {
"()": "coloredlogs.ColoredFormatter",
"format": minimal_format if use_minimal_format else format,
"datefmt": "%m-%d %H:%M:%S",
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "colored",
},
},
"loggers": {},
}
return logging_dict
def get_logging_config(django_log_level: str, wkz_log_level: str):
logging_dict = _get_formatter_and_handler()
logging_dict["loggers"] = {
"django": {
"handlers": ["console"],
"level": django_log_level,
},
"wizer": {
"handlers": ["console"],
"level": wkz_log_level,
},
}
return logging_dict
|
nilq/baby-python
|
python
|
import argparse
import json
import os
import shutil
import logging
from weed_annotator.semantic_segmentation import utils
from weed_annotator.semantic_segmentation.train import train
from weed_annotator.semantic_segmentation.inference import inference
from weed_annotator.post_processing.post_process_masks import post_process_masks
from weed_annotator.full_pipeline.mask_proposal_evaluator import MaskProposalsEvaluator
from weed_annotator.image_composition.compose_imgs import compose_images
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='weed_annotator')
parser.add_argument('-c', '--config_folder', default='configs', type=str,
help='Folder with pipeline configs')
args = parser.parse_args()
# create logger
logger = logging.getLogger('weed_annotator_logger')
logger.setLevel(logging.INFO)
fh = logging.StreamHandler()
fh_formatter = logging.Formatter('%(asctime)s %(message)s')
fh.setFormatter(fh_formatter)
logger.addHandler(fh)
# Setting seed for reproducability
utils.set_seeds()
pipeline_config = json.load(open(f"{args.config_folder}/weed_annotator.json"))
# Image Composition
if pipeline_config["image_composition"]["enable"]:
logger.info("Generating image compositions for training.")
img_comp_config = json.load(open(f"{args.config_folder}/image_composition.json"))
compose_images(img_comp_config)
train_folder = f"{img_comp_config['folders']['out_folder']}/{img_comp_config['folders']['ident']}"
else:
train_folder = pipeline_config["image_composition"]["reuse"]
# Training Semantic Segmemntation
train_config = json.load(open(f"{args.config_folder}/seg_config.json"))
if pipeline_config["sem_segmentation"]["enable_train"]:
train_config["data"]["train_data"] = train_folder
logger.info(f"Training semantic segmentation model on: {train_folder}.")
train(train_config)
log_folder = f"{train_config['logging_path']}/{train_config['train_ident']}"
else:
log_folder = pipeline_config["sem_segmentation"]["reuse_model"]
# Inference
input_data = pipeline_config["input_imgs"]
if pipeline_config["sem_segmentation"]["enable_inference"]:
logger.info(f"Generating mask predictions for: {input_data}.")
mp_raw = f"/tmp/{train_config['train_ident']}/mask_proposals/raw"
os.makedirs(mp_raw)
inference(f"{log_folder}/config.json", f"{log_folder}/checkpoints/best.pth", input_data, mp_raw)
else:
mp_raw = pipeline_config["sem_segmentation"]["reuse_masks"]
# Postprocess
if pipeline_config["post_processing"]["enable"]:
logger.info("Post-processing mask predictions.")
mp_pp = pipeline_config["mask_proposals"]
os.makedirs(mp_pp, exist_ok=True)
post_process_masks(f"{input_data}", mp_raw, mp_pp)
else:
mp_pp = pipeline_config["post_processing"]["reuse"]
# Evaluation
if pipeline_config["enable_evaluation"] and os.path.exists(f"{input_data}/annotations.xml"):
logger.info(f"Evaluation of pipeline performance on: {input_data}.")
me = MaskProposalsEvaluator(input_data, train_config["data"]["weed_label"])
result_raw = me.evaluate(mp_raw)
with open(f"{log_folder}/eval_raw.json", 'w') as f:
json.dump(result_raw, f)
result_pp = me.evaluate(mp_pp)
with open(f"{log_folder}/eval_pp.json", 'w') as f:
json.dump(result_pp, f)
# Cleanup
if pipeline_config["sem_segmentation"]["enable_inference"]:
shutil.rmtree(f"{mp_raw}")
|
nilq/baby-python
|
python
|
"""
This module contains helper functions.
The main purpose is to remove clutter in the main
file
"""
from __future__ import print_function
import argparse
import sys
import os
import logging
import copy
import subprocess
from operator import attrgetter
from string import Formatter
try:
# Python 3
import _string
except ImportError:
# Python 2
pass
class StyleFormatter(Formatter):
""" Custom formatter that handles nested field of two levels
such as '{mass[element]}'. Don't know how it works
"""
def get_value(self, field_name, args, kwargs):
# Return kwargs[field_name], else return ''
return kwargs.get(field_name, '')
def get_field(self, field_name, args, kwargs):
# To illustrate, the example '{mass[element]}' is used with
# the kwargs {"element":"Pr", "mass":{"Pr":128}}
# Split the field_name into the field and an iterator
# ex. mass <fieldnameiterator object at 0x105308840>
try:
# Python 2.7
first, rest = field_name._formatter_field_name_split()
except:
# Python 3 (Only tested on 3.5)
first, rest = _string.formatter_field_name_split(field_name)
# print("First:", first)
# print("Kwargs:", kwargs)
# obj = kwargs[field_name] or obj = '' if KeyError
# ex. obj = {"Pr":128}
obj = self.get_value(first, args, kwargs)
# Often, "rest" is only one deep
# is_attr is a bool. I think it is true if something.keyword exists
# keyword is just a keyword, like something[keyword] or something.keyword
for is_attr, keyword in rest:
# This is the juciy stuff. If the keyword is in kwargs, return the
# value in obj
# ex. obj = {"Pr":128}["Pr"] = 128
if keyword in kwargs:
#print(obj)
obj = obj[kwargs.get(keyword)]
# ex. 128
return obj, first
def correct(input_argument):
""" Function to check syntax of input arguments given by user """
if input_argument in('n', 'no'):
return 'no'
elif input_argument in('y', 'yes'):
return 'yes'
# if input argument is given incorrectly, function returns 'error'
else:
error_message = " please make sure these input arguments are gives as: \n input = 'no' or input = 'yes' \n input = 'n' or input = 'y' \n input = ['no', 'yes'] or input = ['n', 'y'] \n"
sys.exit(error_message)
def mkdir(directory):
""" Check if directory exists. If not, create it
Parameters: directory: the name of the directory
Returns: None
Algorithm: Check if the direcctory exists, if not, create it
"""
if not os.path.exists(directory):
os.makedirs(directory)
def make_iterable(dictionary):
""" Makes every entry in the dictionary iterable and returns the result
Parameters: dictionary: the dict to be made iterable
Output: The iterable dictionary
Algorithm: Make every key in the list iterable and make the results
entries unique"""
new_dict = copy.deepcopy(dictionary)
for key in dictionary:
if not isinstance(dictionary[key], (tuple, list)):
new_dict[key] = [new_dict[key]]
# check if every item in user given list is unique
for key, value in new_dict.items():
try:
# if variable tuple or list => new list with value only once
if len(set(value)) != len(value):
newlist = []
for val in value:
if val not in newlist:
newlist.append(val)
new_dict[key] = newlist
except TypeError:
# if variable == dict => new dict with value only once inside
# user_input[key]
for keys, values in value[0].items():
if len(set(values)) != len(values):
newlist = []
for val in values:
if val not in newlist:
newlist.append(val)
value[0][keys] = newlist
new_dict[key] = value[0]
return new_dict
def which(program):
""" Find path of binary
Paramteres: program: name of binary
Returns: Path to binary if found, else none
Algorithm: Mimic the UNIX 'which' command
"""
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def talys_version(local=False):
""" Get the version of TALYS being used
Parameters: local: Wether to use a binary talys file in the current
directory or the system-wide talys
Returns: String of the format #.#
Algorithm: Call shell command "strings" and greps the result
"""
# Find the path of TALYS
if local:
talys_path = os.path.join(os.getcwd(), "talys")
else:
talys_path = which("talys")
if talys_path is None or "talys" not in talys_path:
raise RuntimeError("Could not find talys.")
# Use the UNIX command 'strings' to extract all strings from
# the binary
talys18string = "pshiftadjust"
talys16string = "fisbaradjust"
talys14string = "deuteronomp"
talys12string = "gamgamadjust"
last_resort_string = "massmodel"
strings = subprocess.check_output(["strings", talys_path]).decode("utf8")
if talys18string in strings:
return "1.8"
elif talys16string in strings:
return "1.6"
elif talys14string in strings:
return "1.4"
elif talys12string in strings:
return "1.2"
elif last_resort_string in strings:
return "1.0"
else:
return "unknown"
class SortingHelpFormatter(argparse.RawTextHelpFormatter):
""" Custom formatter for argparse help """
def add_arguments(self, actions):
actions = sorted(actions, key=attrgetter('option_strings'))
super(SortingHelpFormatter, self).add_arguments(actions)
def get_args():
"""
Manages the argparse module.
Any changes to the arguments from terminal are done here
Parameters: none
Returns: class instance of 'argparse.Namespace'
Algorithm: Add arguments to argparse.ArgumentParser(), fix some arguments
regarding logging, and return the parsed arguments.
"""
parser = argparse.ArgumentParser(description=("Automates the process of "
"creating and running thousands of simulations with TALYS"),
formatter_class=SortingHelpFormatter)
parser.add_argument("-d", "--debug",
help="show debugging information. Overrules log and verbosity",
action="store_true")
parser.add_argument("-l", "--log",
help="set the verbosity for the log file",
choices=["DEBUG", "INFO",
"WARNING", "ERROR", "CRITICAL"],
type=str.upper, default="INFO")
parser.add_argument("-v", "--verbosity",
help="set the verbosity level",
choices=["DEBUG", "INFO",
"WARNING", "ERROR", "CRITICAL"],
type=str.upper, default="INFO")
parser.add_argument("--lfile",
help="filename of the log file",
type=str, default="talys.log",
metavar='LOG_FILENAME',
dest="log_filename")
parser.add_argument("--efile",
help="filename of the error file",
type=str, default="error.log",
metavar='ERROR_FILENAME',
dest="error_filename")
parser.add_argument("--ifile",
help=("the filename for where the options are stored"
"\nDefault is input.json"),
type=str, default="structure.json",
metavar='INPUT_FILENAME',
dest="input_filename")
parser.add_argument("-p", "--processes",
help=("set the number of processes the script will use."
"\nShould be less than or equal to number of CPU cores."
"\nIf no N is specified, all available cores are used"),
type=int, nargs="?",
metavar='N', const=0)
parser.add_argument("--enable-pausing",
help="enable pausing by running a process that checks for input",
action="store_true",
dest="enable_pausing")
parser.add_argument("--multi",
help=("the name of the level at which multiprocessing will be run."
"\nThis should only be used if _only_ mass and elements vary"),
nargs='+', type=str, default=[])
parser.add_argument("--default-excepthook",
help="use the default excepthook",
action="store_true",
dest="default_excepthook")
parser.add_argument("--disable-filters",
help="do not filter log messages",
action="store_true",
dest="disable_filters")
parser.add_argument("-r", "--resume",
help=("resume from previous checkpoint. If there are"
"\nmore than one TALYS-directory, it will choose"
"\nthe last directory"),
action="store_true")
parser.add_argument("--dummy",
help="for not run TALYS, only create the directories",
action="store_true")
args = parser.parse_args()
# Convert the input strings to the corresponding logging type
args.log = getattr(logging, args.log)
args.verbosity = getattr(logging, args.verbosity)
# --debug overrules --log and --verbosity
if args.debug:
args.log = logging.DEBUG
args.verbosity = logging.DEBUG
return args
class Cd:
""" Simplifies directory mangement """
def __init__(self, newPath):
""" When an object of cd is created, the given path is expanded all the way back to $HOME"""
self.newPath = os.path.expanduser(newPath)
""" In order for an cd object to be used with the with-statement, __enter__ and __exit__ are needed """
def __enter__(self):
""" Changes directory to the one given in __init__ while saving the current when entering
the with-statement """
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
""" Returns to the original path when exiting the with-statement """
os.chdir(self.savedPath)
def getkey():
# Magic
import termios
TERMIOS = termios
fd = sys.stdin.fileno()
old = termios.tcgetattr(fd)
new = termios.tcgetattr(fd)
new[3] = new[3] & ~TERMIOS.ICANON & ~TERMIOS.ECHO
new[6][TERMIOS.VMIN] = 1
new[6][TERMIOS.VTIME] = 0
termios.tcsetattr(fd, TERMIOS.TCSANOW, new)
c = None
try:
c = os.read(fd, 1)
finally:
termios.tcsetattr(fd, TERMIOS.TCSAFLUSH, old)
return c
|
nilq/baby-python
|
python
|
"""
Use to populate:
from crs.populate_crs_table import CrsFromApi
crs_api = CrsFromApi()
crs_api.populate()
"""
import re
import math
from bills.models import Bill
from crs.scrapers.everycrsreport_com import EveryCrsReport
# Bill's types {'sres', 'hjres', 'hconres', 's', 'hres', 'sjres', 'hr', 'sconres'}
BILL_NUMBER_RE = re.compile(r"\W((?:h\.\s?r\.|s\.|h\.conres\.|s\.conres\.|h\.\s?j\.\s?res\.|s\.\s?j\.\s?res\.|"
+ r"h\.\s?res\.|s\.\s?res\.)\s?(?:[1-9]\d{0,3}))", re.I | re.M)
def cleanBillNumber(billnumber):
return billnumber.replace('.', '').replace(' ', '').lower()
def get_congress_number_for_year(year: str) -> int:
return math.ceil((int(year) - 1788) / 2)
class CrsFromApi:
matched_count = 0
extracted_count = 0
def process_bills_for_report(self, bill_numbers, report, source='title'):
congress_number = get_congress_number_for_year(report.date[:4])
# construct IDs and remove duplicates
bill_ids = set()
for bill_number in bill_numbers:
bill_id = f'{congress_number}{bill_number}'.replace(' ', '')\
.replace('\n', '').lower()
bill_ids.add(bill_id)
# Add prior year if report was in January or February
if int(report.date[5:7]) < 3:
bill_id = f'{congress_number-1}{bill_number}'.replace(' ', '')\
.replace('\n', '').lower()
bill_ids.add(bill_id)
self.extracted_count += len(bill_ids)
for bill_id in bill_ids:
try:
bill = Bill.objects.get(bill_congress_type_number=bill_id)
print(f'{bill_id} was matched, use existing bill.')
self.matched_count += 1
except Bill.DoesNotExist:
print(f'{bill_id} does not have a match in Bills.')
# Do no create bill if it is not found in db
continue
bill.save()
report.bills.add(bill)
def populate(self):
reports_count = 0
api = EveryCrsReport()
for report in api.scrape():
reports_count += 1
print(report)
# ignore years before 2010
try:
reportyear = int(report.date[:4])
except ValueError:
continue
if reportyear < 2010:
continue
report.save()
bill_numbers = map(cleanBillNumber, BILL_NUMBER_RE.findall(report.title))
if bill_numbers:
self.process_bills_for_report(bill_numbers, report, source='title')
if report.report_content_raw:
bill_numbers = map(cleanBillNumber, BILL_NUMBER_RE.findall(report.report_content_raw))
if bill_numbers:
self.process_bills_for_report(bill_numbers, report, source='text')
report.save() # call save after all bills will be added
print(f'{reports_count} reports processed')
print(f'{self.extracted_count} bill numbers extracted')
print(f'{self.matched_count} bills matched')
|
nilq/baby-python
|
python
|
import os
import numpy as np
import pandas as pd
from trackml.dataset import load_event
from trackml.score import score_event
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import DBSCAN
class Clusterer(object):
def __init__(self, eps):
self.eps = eps
def _preprocess(self, hits):
x = hits.x.values
y = hits.y.values
z = hits.z.values
r = np.sqrt(x ** 2 + y ** 2 + z ** 2)
hits['x2'] = x / r
hits['y2'] = y / r
r = np.sqrt(x ** 2 + y ** 2)
hits['z2'] = z / r
ss = StandardScaler()
X = ss.fit_transform(hits[['x2', 'y2', 'z2']].values)
return X
def predict(self, hits):
X = self._preprocess(hits)
cl = DBSCAN(eps=self.eps, min_samples=3, algorithm='kd_tree')
labels = cl.fit_predict(X)
return labels
def create_one_event_submission(event_id, hits, labels):
sub_data = np.column_stack(([event_id]*len(hits), hits.hit_id.values, labels))
submission = pd.DataFrame(data=sub_data, columns=["event_id", "hit_id", "track_id"]).astype(int)
return submission
if __name__ == "__main__":
# training and test data folder paths
path_to_train = "../../data/raw/train_sample/train_100_events"
# chose a single event to work with
event_prefix = "event000001000"
# read data
hits, cells, particles, truth = load_event(os.path.join(path_to_train, event_prefix))
# perform clustering
model = Clusterer(eps=0.006)
labels = model.predict(hits)
print(labels)
submission = create_one_event_submission(0, hits, labels)
score = score_event(truth, submission)
print("Your score: ", score)
|
nilq/baby-python
|
python
|
"""
This module encapsulates QCoDeS database: its schema, structure, convenient
and relevant queries, wrapping around :mod:`sqlite3`, etc.
The dependency structure of the sub-modules is the following:
::
.connection .settings
/ | \ |
/ | \ |
/ | V V
| | .query_helpers
| | | |
| V V |
| .db_upgrades |
| / V
| / .queries
v v
.database
"""
|
nilq/baby-python
|
python
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# vim: fenc=utf-8
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
#
"""
File name: constants.py
Author: dhilipsiva <dhilipsiva@gmail.com>
Date created: 2016-11-20
"""
class QuestionType(object):
UNKNOWN = -1
MULTIPLE_CHOICE = 0
CHOICE = 1
BOOLEAN = 2
TEXT = 3
|
nilq/baby-python
|
python
|
"""
author:xing xiangrui
test os.system()
"""
import os
os.chdir("mAP/")
#os.system("cd mAP/")
os.system("python main.py -na")
|
nilq/baby-python
|
python
|
'''
Author : ZHP
Date : 2022-04-12 16:00:40
LastEditors : ZHP
LastEditTime : 2022-04-12 17:01:01
FilePath : /models/PointFormer/similarity.py
Description :
Copyright 2022 ZHP, All Rights Reserved.
2022-04-12 16:00:40
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import sys
sys.path.append("../..")
from models.pointnet.pointNet2_Ops import *
from models.PointFormer.basic_block import K_MLP_Layer
class Affinity(nn.Module):
def __init__(self) -> None:
super().__init__()
pass
def forward(self, src, dst):
pass
def extra_repr(self) -> str:
print_paras = ["sigma", "k", "mu", "epsilon"]
s = ""
for para in print_paras:
if para in self.__dict__:
s += f'{para}={self.__dict__[para]},'
s = s[:-1]
return s.format(**self.__dict__)
class pointnet2(Affinity):
def __init__(self, k=3) -> None:
super().__init__()
self.k = k
def forward(self, src, dst):
'''
Author: ZHP
description: pointnet++ 中插值函数
param {tensor} src:大基数点云 [B, N, 3]
param {tensor} dst: 小基数点云 [B, S, 3]
return {tensor} score: 相似度矩阵[B, N, S]
'''
B, N, _ = src.shape
# KNN 插值
dists = square_distance(src, dst) # [B, N, S],距离的平方
dists, idx = dists.sort(dim=-1) # [B, N, S]
dists, idx = dists[:, :, :self.k], idx[:, :, :self.k] # [B, N, k]
# 以src的点云(N个)为中心,计算当前点云距离其最近的K(3)个点,记录距离和索引
dist_recip = 1.0 / (dists + 1e-8) # 反向加权 w_i 防止为0 [B, N, k]
norm = torch.sum(dist_recip, dim=2, keepdim=True) # 分母,[B, N, 1]
weight = dist_recip / norm # weight_i = w_i / sum(w_i)
score = torch.zeros(B, N, dst.shape[1]).to(src.device) # [B, N, S]
score = score.scatter_(-1, idx, weight) # [B, N, S]
# 在当前点云中取出[B, N, k]个最近点数据[B, N, k, C],score除了该k个点外,其他位置为0
return score
class euclidean(Affinity):
def __init__(self, mu=2, epsilon=1e-8) -> None:
super().__init__()
self.mu = mu
self.epsilon = epsilon
def forward(self, src, dst):
'''
Author: ZHP
description: 基于欧氏距离反比的权重 1 / (||xi - yj||2)^mu + epsilon
param {tensor} src:大基数点云 [B, N, 3]
param {tensor} dst: 小基数点云 [B, S, 3]
return {tensor} score 相似度矩阵 [B, N, S]
'''
dists = square_distance(src, dst) # [B, N, S]
dists = torch.pow(dists, exponent=self.mu)
score = 1 / (dists + self.epsilon) # [B, N, S]
score = F.softmax(score, dim=-1)
return score
class cosine_similarity(Affinity):
def __init__(self, epsilon=1e-8) -> None:
super().__init__()
self.epsilon = epsilon
def forward(self, src, dst):
'''
Author: ZHP
description: 计算点之间余弦相似度 notion:F.cosine_similarity是向量对应相似度
param {tensor} src:大基数点云 [B, N, 3]
param {tensor} dst: 小基数点云 [B, S, 3]
param {int} epsilon: 防止分母为0的极小值
return {tensor} score 相似度矩阵 [B, N, S]
'''
B, N, _ = src.shape
_, S, _ = dst.shape
cdot = torch.matmul(src, dst.transpose(1,-1)) # [B, N, S]
norm_src = torch.norm(src, dim=-1, keepdim=True) # [B, N, 1] ||src||2
norm_dst = torch.norm(dst, dim=-1, keepdim=True) # [B, S, 1] ||dst||2
norm_ = torch.matmul(norm_src, norm_dst.transpose(1,-1)) # [B, N, S]
norm_ = torch.max(norm_, torch.ones_like(norm_) * self.epsilon)
score = cdot / norm_ # [B, N, S]
score = F.softmax(score, dim=-1)
return score
class gaussian_kernel(Affinity):
def __init__(self, sigma=1) -> None:
super().__init__()
self.sigma = sigma
def forward(self, src, dst):
'''
Author: ZHP
description: 高斯核函数 k(x1,x2) = exp(- ||x1 - x2||^2 / (2*sigma^2))
param {tensor} src:大基数点云 [B, N, 3]
param {tensor} dst: 小基数点云 [B, S, 3]
return {tensor} score 相似度矩阵 [B, N, S]
'''
gap = src[:,:,None] - dst[:,None] # [B, N, S, 3]
gap = torch.norm(gap, dim=-1) # [B, N, S]
gap = - (gap / (self.sigma ** 2)) * 0.5
score = torch.exp(gap) # [B, N, S]
score = F.softmax(score, dim=-1)
return score
class chebyshev_distance(Affinity):
def __init__(self, epsilon=1e-8) -> None:
super().__init__()
self.epsilon = epsilon
def forward(self, src, dst):
'''
Author: ZHP
description: 切比雪夫距离 max|xi-yi|
param {tensor} src:大基数点云 [B, N, 3]
param {tensor} dst: 小基数点云 [B, S, 3]
param {int} epsilon: 防止分母为0的极小值
return {tensor} score 相似度矩阵 [B, N, S]
'''
dist = src[:,:,None] - dst[:,None] # [B, N, S, 3]
dist = torch.max(dist, dim=-1)[0] # [B, N, S]
dist = 1.0 / (dist + self.epsilon)
score = F.softmax(dist, dim=-1) # [B, N, S]
return score
class minkowski_distance(Affinity):
def __init__(self, p=1, epsilon=1e-8) -> None:
super().__init__()
self.p = p
self.epsilon = epsilon
def forward(self, src, dst):
'''
Author: ZHP
description: 闵氏距离 [sum(|xi-yi|^p)]^(1/p)
param {tensor} src:大基数点云 [B, N, 3]
param {tensor} dst: 小基数点云 [B, S, 3]
param {int} p: p=1表示曼哈顿距离,p=2表示欧氏距离,p=无穷大表示切比雪夫距离
param {int} epsilon: 防止分母为0的极小值
return {tensor} score 相似度矩阵 [B, N, S]
'''
#
dist = src[:,:,None] - dst[:,None] # [B, N, S, 3]
dist = torch.pow(dist, self.p)
dist = torch.sum(dist, dim=-1)
dist = torch.pow(dist, 1/self.p)
dist = 1 / (dist + self.epsilon)
score = F.softmax(dist, dim=-1)
return score
class PointUpsampleAttn(nn.Module):
def __init__(self, dim_in, relation=pointnet2(), dim_out=None, dropout=0.):
super().__init__()
if dim_out is None:
self.embed = lambda x : x
else:
self.embed = K_MLP_Layer(3, dim_in, dim_out, True, True, dropout)
self.relation = relation # 计算相似度方法
def forward(self, q, k, v):
'''
Author: ZHP
description: relation(qi,kj)*vj 1 / ||qi-kj||
param {tensor} q : 原始点云坐标 [B, N, 3]
param {tensor} k : 采样后的点云坐标 [B, S, 3]
param {tensor} v : 采样后的点云特征 [B, S, C]
return {tensor} extract: 上采样后的点云特征 [B, D, N]
'''
score = self.relation(q, k) # [B, N, S]
extract = torch.matmul(score, v) # [B, N, C]
extract = extract.transpose(1,-1)
extract = self.embed(extract) # [B, D, N]
return extract
if __name__ == "__main__":
p2 = euclidean()
# src = torch.randn(1, 10, 3, dtype=torch.float)
# dst = torch.randn(1, 10, 3, dtype=torch.float)
# a = p2(src, dst)
# print(a.shape)
print(p2)
|
nilq/baby-python
|
python
|
import re
import pytest
from perl.translator import translate_string
from perl.utils import re_match, reset_vars
@pytest.fixture
def _globals():
return {"re": re, "__perl__re_match": re_match, "__perl__reset_vars": reset_vars}
def test_match__value_present__returns_true(_globals):
ldict = {"var": "one foo two"}
src = translate_string("var =~ /foo/")
result = eval(src, _globals, ldict)
assert isinstance(result, re.Match)
def test_match__value_not_present__returns_false(_globals):
ldict = {"var": "one two"}
src = translate_string("var =~ /foo/")
result = eval(src, _globals, ldict)
assert result is None
def test_match__value_match__value_set(_globals):
ldict = {"var": "one foo two"}
src = translate_string("var =~ /(foo)/")
result = eval(src, _globals, ldict)
assert isinstance(result, re.Match)
assert "__perl__var__1" in _globals["__builtins__"]
assert _globals["__builtins__"]["__perl__var__1"] == "foo"
|
nilq/baby-python
|
python
|
# Authors: Stephane Gaiffas <stephane.gaiffas@gmail.com>
# License: BSD 3 clause
"""This modules introduces the Dataset class allowing to store a binned features matrix.
It uses internally a bitarray to save the values of the features in a memory efficient
fashion. It exploits the fact that any columns j of the features matrix X contain
only contiguous non-negative integers {0, 1, 2, ..., max_value_j} obtained through
binning of both categorical and continuous columns.
If a column contains M modalities, it will look for the minimum number of bits required
to save such values, and will stack them into 64 bits words of a contiguous memory
region of a bitarray (a 1D numpy array, using a F-major ordering of the matrix X).
For familiarity with bitwise operations:
https://en.wikipedia.org/wiki/Bitwise_operation
"""
from math import ceil, floor
import numpy as np
from numba import jit, void, uint8, int8, uint16, int16, uint32, int32, uint64, int64
from numba.experimental import jitclass
from .._utils import get_type
# Global jit decorator options
NOPYTHON = True
NOGIL = True
BOUNDSCHECK = False
CACHE = True
_UINT8_MAX = np.iinfo(np.uint8).max
_UINT16_MAX = np.iinfo(np.uint16).max
_UINT32_MAX = np.iinfo(np.uint32).max
_UINT64_MAX = np.iinfo(np.uint64).max
spec_dataset = [
# Number of samples in the dataset
("n_samples", uint64),
# Number of features in the dataset
("n_features", uint64),
# maximum value in each column
("max_values", uint64[::1]),
# Number of bits used for each values of each columns
("n_bits", uint64[::1]),
# bitarray[offsets[j]:offsets[j+1]] is the array of words for the j-th column
("offsets", uint64[::1]),
# n_values_in_words[j] is the number of values saved in a word for column j
("n_values_in_words", uint64[::1]),
# The bitarray containing all values
("bitarray", uint64[::1]),
# The bitmasks used for each column
("bitmasks", uint64[::1]),
]
@jitclass(spec_dataset)
class Dataset(object):
"""This is a class containing the binned features matrix. It uses internally a
bitarray to save the values of the features in a memory efficient fashion. It
exploits the fact that all the columns of the features matrix X contain only
contiguous non-negative integers {0, 1, 2, ..., max_value} obtained through
binning of both categorical and continuous columns.
If a column contains M modalities, it will look for the minimum number of bits
required to save such values, and will stack them into 64 bits words in a
contiguous memory region of the bitarray (a 1D numpy array, using a F-major
ordering of the matrix X).
For familiarity with bitwise operations:
https://en.wikipedia.org/wiki/Bitwise_operation
Parameters
----------
n_samples : int
Number samples (rows) in the dataset
max_values : ndarray
Number array of shape (n_features,) containing the maximum value (number of
bins + 1) in each column.
Attributes
----------
n_samples : int
Number samples (rows) in the dataset
n_features : int
Number of features (columns) in the dataset
max_values : ndarray
Numpy array of shape (n_features,) containing the maximum value (number of
bins + 1) in each column.
n_bits : ndarray
Numpy array of shape (n_features,) such that n_bits[j] is the number of bits
used for the values of the j-th column
offsets : ndarray
Numpy array of shape (n_features + 1,) such that
bitarray[offsets[j]:offsets[j+1]] is the array of words for the j-th column
n_values_in_words : ndarray
Numpy array of shape (n_features,) such that n_values_in_words[j] is the number
of values saved in a single 64-bits word for the values in column j
bitmasks : ndarray
Numpy array of shape (n_features,) such that bitmasks[j] contains the
bitmask using the shift and back-shift operations to retrieve values from the
bitarray
bitarray : ndarray
Numpy array of shape (n_total_words,) containing the values of the dataset,
where n_total_words is the total number of words used (for all columns) to
store the values.
"""
def __init__(self, n_samples, max_values):
self.n_samples = n_samples
self.n_features = max_values.size
self.max_values = max_values
self.n_bits = np.empty(self.n_features, dtype=np.uint64)
self.offsets = np.empty(self.n_features + 1, dtype=np.uint64)
self.n_values_in_words = np.empty(self.n_features, dtype=np.uint64)
self.bitmasks = np.empty(self.n_features, dtype=np.uint64)
# The first offset is 0
offset = 0
self.offsets[0] = offset
for j, max_value in enumerate(max_values):
# Number of bits required to save numbers up to n_modalities
if max_value == 1:
self.n_bits[j] = 1
self.n_values_in_words[j] = 64
self.bitmasks[j] = 1
else:
self.n_bits[j] = ceil(np.log2(max_value + 1))
self.n_values_in_words[j] = floor(64 / self.n_bits[j])
self.bitmasks[j] = (1 << self.n_bits[j]) - 1
n_words = ceil(n_samples / self.n_values_in_words[j])
offset += n_words
self.offsets[j + 1] = offset
self.bitarray = np.empty(offset, dtype=np.uint64)
DatasetType = get_type(Dataset)
numba_int_types = [uint8, int8, uint16, int16, uint32, int32, uint64, int64]
# TODO: put back signatures everywhere
@jit(
# [void(uint64[::1], uint64, uint64, col_type[:]) for col_type in numba_int_types],
nopython=NOPYTHON,
nogil=NOGIL,
boundscheck=BOUNDSCHECK,
cache=CACHE,
locals={"i": uint64, "x_ij": uint64, "word": uint64, "pos_in_word": uint64},
)
def _dataset_fill_column(col_bitarray, n_bits, n_values_in_word, col):
"""Private function that fills the values of a column in the dataset.
Parameters
----------
col_bitarray : ndarray
Numpy array of shape (n_words,) containing the values of the column, where
n_words is the number of words used to store its values.
n_bits : int
Number of bits used to store one value from the column
n_values_in_word : int
Number of values from the column saved in a single 64-bits word
col : ndarray
Numpy array of shape (n_samples,) corresponding to the values of a column to
add to the dataset. This function exploits the fact that the values in col
contain only contiguous non-negative integers {0, 1, 2, ..., max_value}
coming from binning of both categorical and continuous columns.
"""
for i, x_ij in enumerate(col):
word = i // n_values_in_word
pos_in_word = i % n_values_in_word
if pos_in_word == 0:
col_bitarray[word] = x_ij
else:
col_bitarray[word] = (col_bitarray[word] << n_bits) | x_ij
# We need to shift the last word according to the position of the last value in
# the word, so that the bits of the values in the last word are on the left
# of it. If pos_in_word = n_values_in_word - 1 it does nothing, since the
# word is full and already left-aligned
col_bitarray[word] = col_bitarray[word] << (
(n_values_in_word - pos_in_word - 1) * n_bits
)
@jit(
# [void(DatasetType, col_type[:, :]) for col_type in numba_int_types],
nopython=NOPYTHON,
nogil=NOGIL,
boundscheck=BOUNDSCHECK,
cache=CACHE,
locals={
"bitarray": uint64[::1],
"offsets": uint64[::1],
"n_values_in_words": uint64[::1],
"n_bits": uint64[::1],
"n_features": uint64,
"j": uint64,
"n_values_in_word": uint64,
"bitarray_j": uint64[::1],
"n_bits_j": uint64,
"i": uint64,
"x_ij": uint64,
"word": uint64,
"pos_in_word": uint64,
},
)
def _dataset_fill_values(dataset, X):
"""Private function that fills the values in X inside the dataset.
Parameters
----------
dataset : Dataset
The dataset to fill with the values in X
X : ndarray
Numpy array of shape (n_samples, n_features) corresponding to the matrix of
features to be transformed in a Dataset. This function exploits the fact
that all the columns of X contain only contiguous non-negative integers {0,
1, 2, ..., max_value} obtained through binning of both categorical and
continuous columns.
"""
bitarray = dataset.bitarray
offsets = dataset.offsets
n_values_in_words = dataset.n_values_in_words
n_bits = dataset.n_bits
n_features = dataset.n_features
for j in range(n_features):
col_bitarray = bitarray[offsets[j] : offsets[j + 1]]
_dataset_fill_column(col_bitarray, n_bits[j], n_values_in_words[j], X[:, j])
def dataset_fill_column(dataset, col_idx, col):
"""Fills the values of a column in the dataset.
Parameters
----------
dataset : Dataset
The dataset to fill with the values in X
col_idx : int
Index of the column in the dataset
col : ndarray
Numpy array of shape (n_samples,) corresponding to the values of a column to
add to the dataset. This function exploits the fact that the values in col
contain only contiguous non-negative integers {0, 1, 2, ..., max_value}
coming from binning of both categorical and continuous columns.
"""
bitarray = dataset.bitarray
offsets = dataset.offsets
col_bitarray = bitarray[offsets[col_idx] : offsets[col_idx + 1]]
n_values_in_word = dataset.n_values_in_words[col_idx]
n_bits = dataset.n_bits[col_idx]
_dataset_fill_column(col_bitarray, n_bits, n_values_in_word, col)
def array_to_dataset(X):
"""Converts a numpy array to a Dataset.
Parameters
----------
X : ndarray
Numpy array of shape (n_samples, n_features) corresponding to the matrix of
features to be transformed to a Dataset. This function exploits the fact
that all the columns of X contain only contiguous non-negative integers {0,
1, 2, ..., max_value} obtained through binning of both categorical and
continuous columns.
Returns
-------
output : Dataset
The dataset corresponding to the values in X.
"""
n_samples, n_features = X.shape
max_values = np.empty(n_features, dtype=np.uint64)
X.max(axis=0, initial=0, out=max_values)
if hasattr(X, "ndim") and hasattr(X, "dtype") and hasattr(X, "shape"):
if X.ndim == 2:
if X.dtype not in (np.uint8, np.uint16, np.uint32, np.uint64):
raise ValueError(
"X dtype must be one of uint8, uint16, uint32 or " "uint64"
)
else:
raise ValueError("X is must be a 2D numpy array")
else:
raise ValueError("X is not a numpy array")
if X.shape[1] != max_values.size:
raise ValueError("max_values size must match X.shape[1]")
dataset = Dataset(n_samples, max_values)
_dataset_fill_values(dataset, X)
return dataset
def _get_empty_matrix(n_samples, n_features, max_value):
"""A private function that creates an empty F-ordered ndarray with shape
(n_samples, n_features) and dtype in (uint8, uint16, uint32, uint64) depending on
the exected maximum value to store in it.
Parameters
----------
n_samples : int
Number of samples (number of rows of the matrix)
n_features : int
Number of features (number of columns of the matrix)
max_value : int
Maximum value expected in the matrix (to choose the dtype)
Returns
-------
output : ndarray
An ndarray with shape (n_samples, n_features) and minimal dtype to store values
"""
# Let's find out the correct dtype depending on the max_value
if max_value <= _UINT8_MAX:
X = np.empty((n_samples, n_features), dtype=np.uint8, order="F")
elif _UINT8_MAX < max_value <= _UINT16_MAX:
X = np.empty((n_samples, n_features), dtype=np.uint16, order="F")
elif _UINT16_MAX < max_value <= _UINT32_MAX:
X = np.empty((n_samples, n_features), dtype=np.uint32, order="F")
elif _UINT32_MAX < max_value <= _UINT64_MAX:
X = np.empty((n_samples, n_features), dtype=np.uint64, order="F")
else:
raise ValueError("X cannot be created")
return X
@jit(
[
void(DatasetType, uint8[:, :]),
void(DatasetType, uint16[:, :]),
void(DatasetType, uint32[:, :]),
void(DatasetType, uint64[:, :]),
void(DatasetType, uint8[::1, :]),
void(DatasetType, uint16[::1, :]),
void(DatasetType, uint32[::1, :]),
void(DatasetType, uint64[::1, :]),
],
nopython=NOPYTHON,
nogil=NOGIL,
boundscheck=BOUNDSCHECK,
locals={
"n_samples": uint64,
"n_features": uint64,
"n_values_in_words": uint64[::1],
"offsets": uint64[::1],
"bitarray": uint64[::1],
"n_bits": uint64[::1],
"bitmasks": uint64[::1],
"j": uint64,
"n_values_in_word": uint64,
"bitarray_j": uint64[::1],
"n_bits_j": uint64,
"bitmask": uint64,
"i": uint64,
"word": uint64,
"pos_in_word": uint64,
"b": uint64,
"n_shifts": uint64,
},
)
def _dataset_to_array(dataset, X):
n_samples = dataset.n_samples
n_features = dataset.n_features
n_values_in_words = dataset.n_values_in_words
offsets = dataset.offsets
bitarray = dataset.bitarray
n_bits = dataset.n_bits
bitmasks = dataset.bitmasks
for j in range(n_features):
n_values_in_word = n_values_in_words[j]
bitarray_j = bitarray[offsets[j] : offsets[j + 1]]
n_bits_j = n_bits[j]
bitmask = bitmasks[j]
for i in range(n_samples):
word = i // n_values_in_word
pos_in_word = i % n_values_in_word
b = bitarray_j[word]
n_shifts = (n_values_in_word - pos_in_word - 1) * n_bits_j
X[i, j] = (b & (bitmask << n_shifts)) >> n_shifts
def dataset_to_array(dataset):
X = _get_empty_matrix(
dataset.n_samples, dataset.n_features, dataset.max_values.max()
)
_dataset_to_array(dataset, X)
return X
|
nilq/baby-python
|
python
|
import numpy as np
from sklearn.metrics.pairwise import pairwise_distances
try:
from bottleneck import argpartsort
except ImportError:
try:
# Added in version 1.8, which is pretty new.
# Sadly, it's still slower than bottleneck's version.
argpartsort = np.argpartition
except AttributeError:
argpartsort = lambda arr,k: np.argsort(arr)
def min_k_indices(arr, k, inv_ind=False):
'''Returns indices of the k-smallest values in each row, unsorted.
The `inv_ind` flag returns the tuple (k-smallest,(n-k)-largest). '''
psorted = argpartsort(arr, k)
if inv_ind:
return psorted[...,:k], psorted[...,k:]
return psorted[...,:k]
def neighbor_graph(X, precomputed=False, k=None, epsilon=None, symmetrize=True, weighting='binary'):
'''Construct an adj matrix from a matrix of points (one per row).
When `precomputed` is True, X is a distance matrix.
`weighting` param can be one of {binary, none}.'''
assert ((k is not None) or (epsilon is not None)
), "Must provide `k` or `epsilon`"
assert weighting in ('binary','none'), "Invalid weighting param: "+weighting
num_pts = X.shape[0]
if precomputed:
dist = X.copy()
else:
dist = pairwise_distances(X, metric='sqeuclidean')
if k is not None:
k = min(k+1, num_pts)
nn,not_nn = min_k_indices(dist, k, inv_ind=True)
if epsilon is not None:
if k is not None:
dist[np.arange(dist.shape[0]), not_nn.T] = np.inf
in_ball = dist <= epsilon
dist[~in_ball] = 0 # zero out neighbors too far away
if symmetrize and k is not None:
# filtering may have caused asymmetry
dist = (dist + dist.T) / 2
else:
for i in xrange(num_pts):
dist[i,not_nn[i]] = 0 # zero out neighbors too far away
if symmetrize:
dist = (dist + dist.T) / 2
if weighting is 'binary':
# cycle through boolean and back to get 1/0 in floating points
return dist.astype(bool).astype(float)
return dist
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
#-----------------------------------------------------------------------------
# This file is part of the rogue_example software. It is subject to
# the license terms in the LICENSE.txt file found in the top-level directory
# of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of the rogue_example software, including this file, may be
# copied, modified, propagated, or distributed except according to the terms
# contained in the LICENSE.txt file.
#-----------------------------------------------------------------------------
import sys
import rogue.utilities
import rogue.utilities.fileio
import rogue.interfaces.stream
import pyrogue
import time
class EventReader(rogue.interfaces.stream.Slave):
def __init__(self):
rogue.interfaces.stream.Slave.__init__(self)
self.enable = True
def _acceptFrame(self,frame):
if self.enable:
# Get the channel number
chNum = (frame.getFlags() >> 24)
# Check if channel number is 0x1 (streaming data channel)
if (chNum == 0x1) :
print('-------- Event --------')
# Collect the data
p = bytearray(frame.getPayload())
frame.read(p,0)
cnt = 0
while (cnt < len(p)):
value = 0
for x in range(0,4):
value += (p[cnt] << (x*8))
cnt += 1
print ('data[%d]: 0x%.8x' % ( (cnt/4), value ))
def main(arg):
# Create the objects
fileReader = rogue.utilities.fileio.StreamReader()
eventReader = EventReader()
# Connect the fileReader to our event processor
pyrogue.streamConnect(fileReader,eventReader)
# Open the data file
fileReader.open(arg)
time.sleep(1)
if __name__ == '__main__':
main(sys.argv[1])
|
nilq/baby-python
|
python
|
# Generated by Django 2.2 on 2019-05-18 19:06
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="CIPRSRecord",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("label", models.CharField(max_length=2048)),
("date_uploaded", models.DateTimeField(auto_now_add=True)),
("report_pdf", models.FileField(upload_to="ciprs/")),
("data", django.contrib.postgres.fields.jsonb.JSONField(blank=True)),
],
)
]
|
nilq/baby-python
|
python
|
retrieve = [
{"scenario":"Patient Exists","patient":"9000000009", "response":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"},{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"T456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","text":"Student Accommodation","use":"temp"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NominatedPharmacy","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-PreferredDispenserOrganization","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y23456"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-MedicalApplianceSupplier","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y34567"}}},{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"},{"extension":[{"url":"language","valueCodeableConcept":{"coding":[{"code":"fr","display":"French","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-HumanLanguage","version":"1.0.0"}]}},{"url":"interpreterRequired","valueBoolean":True}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSCommunication"},{"extension":[{"url":"PreferredWrittenCommunicationFormat","valueCodeableConcept":{"coding":[{"code":"12","display":"Braille","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredWrittenCommunicationFormat"}]}},{"url":"PreferredContactMethod","valueCodeableConcept":{"coding":[{"code":"1","display":"Letter","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredContactMethod"}]}},{"url":"PreferredContactTimes","valueString":"Not after 7pm"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactPreference"},{"url":"http://hl7.org/fhir/StructureDefinition/patient-birthPlace","valueAddress":{"city":"Manchester","country":"GBR","district":"Greater Manchester"}}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}, # noqa: E231, E501
{"scenario":"Patient Does Not Exist","patient":"9111231130", "response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"not_found","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"RESOURCE_NOT_FOUND","display":"Resource not found"}]}}]}}, # noqa: E231, E501
{"scenario":"Sensetive Patient Exists","patient":"9000000025", "response":{"birthDate":"2010-10-22","deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","id":"9000000025","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000025"}],"meta":{"security":[{"code":"R","display":"restricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smythe","given":["Janet"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient"}}, # noqa: E231, E501
{"scenario": "Invalid NHS number", "patient": "9000000001", "response": {"resourceType": "OperationOutcome", "issue": [{"severity": "error", "code": "value", "details": {"coding": [{"system": "https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode", "version": "1", "code": "INVALID_RESOURCE_ID", "display": "Resource Id is invalid"}]}}]}}, # noqa: E231, E501
{"scenario": "Invalid X-Request-ID", "patient": "9000000001", "response": {"resourceType": "OperationOutcome", "issue": [{"severity": "error", "code": "value", "details": {"coding": [{"system": "https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode", "version": "1", "code": "INVALID_VALUE", "display": "Provided value is invalid"}]}, "diagnostics": "Invalid value - '1234' in header 'X-Request-ID'"}]}} # noqa: E231, E501
]
search = [
{"scenario":"Simple Search","query_params":{"family":"Smith","gender":"female","birthdate":"eq2010-10-22"},"response":{"resourceType":"Bundle","type":"searchset","total":1,"entry":[{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000009","search":{"score":1},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}]}}, # noqa: E231, E501
{"scenario":"Wildcard Search","query_params":{"family":"Sm*","gender":"female","birthdate":"eq2010-10-22"},"response":{"resourceType":"Bundle","type":"searchset","total":2,"entry":[{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000009","search":{"score":0.8343},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}},{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000017","search":{"score":0.8343},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000017","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000017"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smyth","given":["Jayne"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}]}}, # noqa: E231, E501
{"scenario":"Limited results Search","query_params":{"family":"Sm*","gender":"female","birthdate":"eq2010-10-22","_max-results":"2"},"response":{"resourceType":"Bundle","type":"searchset","total":2,"entry":[{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000009","search":{"score":0.8343},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}},{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000017","search":{"score":0.8343},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000017","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000017"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smyth","given":["Jayne"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}]}}, # noqa: E231, E501
{"scenario":"Date Range Search","response":{"resourceType":"Bundle","type":"searchset","total":1,"entry":[{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000009","search":{"score":1},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}]}}, # noqa: E231, E501
{"scenario":"Fuzzy Search","query_params":{"family":"Smith","given":"jane","gender":"female","birthdate":"2010-10-22","_fuzzy-match":True},"response":{"resourceType":"Bundle","type":"searchset","total":1,"entry":[{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000017","search":{"score":0.8976},"resource":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000017","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000017"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":"2"},"multipleBirthInteger":1,"name":[{"family":"Smyth","given":["Jayne"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}]}}, # noqa: E231, E501
{"scenario": "Restricted Patient Search","query_params": {"family": "Smythe", "given": "janet", "gender": "female", "birthdate": "eq2005-06-16"}, "response": {"resourceType": "Bundle", "type": "searchset", "total": 1, "entry": [{"fullUrl": "https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000025", "search": {"score": 1}, "resource": {"birthDate": "2005-06-16", "deceasedDateTime": "2005-06-16T00:00:00+00:00", "extension": [{"extension": [{"url": "deathNotificationStatus", "valueCodeableConcept": {"coding": [{"code": "2", "display": "Formal - death notice received from Registrar of Deaths", "system": "https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus", "version": "1.0.0"}]}}, {"url": "systemEffectiveDate", "valueDateTime": "2005-06-16T00:00:00+00:00"}], "url": "https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"}], "gender": "female", "id": "9000000025","identifier": [{"extension": [{"url": "https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus", "valueCodeableConcept": {"coding": [{"code": "01", "display": "Number present and verified", "system": "https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus", "version": "1.0.0"}]}}], "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000025"}], "meta": {"security": [{"code": "R", "display": "restricted", "system": "https://www.hl7.org/fhir/valueset-security-labels.html"}], "versionId": "2"}, "multipleBirthInteger": 1, "name": [{"family": "Smythe", "given": ["Janet"], "id": "123", "period": {"end": "2021-12-31", "start": "2020-01-01"}, "prefix": ["Mrs"], "suffix": ["MBE"], "use": "usual"}], "resourceType": "Patient"}}]}}, # noqa: E231, E501
{"scenario":"Unsuccessful Search","query_params":{"family":"Bingham","given":"john","gender":"male","birthdate":"1934-12-18"},"response":{"resourceType":"Bundle","type":"searchset","total":0}}, # noqa: E231, E501
{"scenario": "Invalid Date Format Search","query_params": {"family": "Smith", "given": "jane", "gender": "female", "birthdate": "20101022"}, "response": {"resourceType": "OperationOutcome", "issue": [{"severity": "error", "code": "value", "details": {"coding": [{"system": "https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode", "version": "1", "code": "INVALID_SEARCH_DATA", "display": "Search data is invalid"}]}, "diagnostics": "Invalid value - '20101022' in field 'birthdate'"}]}}, # noqa: E231, E501
{"scenario":"Too Few Search Parameters","response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"required","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"MISSING_VALUE","display":"Required value is missing"}]},"diagnostics":"Not enough search parameters were provided to be able to make a search"}]}}, # noqa: E231, E501
]
update = [
{"scenario":"Add New Name", "patient":"9000000009","patient_record":2,"patch":{"patches": [{"op": "add", "path": "/name/-", "value": {"use": "usual", "period": {"start": "2019-12-31"}, "prefix": "Dr", "given": ["Joe", "Horation", "Maximus"], "family": "Bloggs", "suffix": "PhD"}}]},"response":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"},{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"T456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","text":"Student Accommodation","use":"temp"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NominatedPharmacy","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-PreferredDispenserOrganization","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y23456"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-MedicalApplianceSupplier","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y34567"}}},{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"},{"extension":[{"url":"language","valueCodeableConcept":{"coding":[{"code":"fr","display":"French","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-HumanLanguage","version":"1.0.0"}]}},{"url":"interpreterRequired","valueBoolean":True}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSCommunication"},{"extension":[{"url":"PreferredWrittenCommunicationFormat","valueCodeableConcept":{"coding":[{"code":"12","display":"Braille","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredWrittenCommunicationFormat"}]}},{"url":"PreferredContactMethod","valueCodeableConcept":{"coding":[{"code":"1","display":"Letter","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredContactMethod"}]}},{"url":"PreferredContactTimes","valueString":"Not after 7pm"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactPreference"},{"url":"http://hl7.org/fhir/StructureDefinition/patient-birthPlace","valueAddress":{"city":"Manchester","country":"GBR","district":"Greater Manchester"}}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":3},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"},{"use":"usual","period":{"start":"2019-12-31"},"prefix":"Dr","given":["Joe","Horation","Maximus"],"family":"Bloggs","suffix":"PhD"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}, # noqa: E231, E501
{"scenario":"Replace Given Name", "patient":"9000000009","patient_record":2,"patch":{"patches":[{"op":"replace","path":"/name/0/given/0","value":"Anne"}]},"response":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"},{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"T456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","text":"Student Accommodation","use":"temp"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NominatedPharmacy","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-PreferredDispenserOrganization","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y23456"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-MedicalApplianceSupplier","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y34567"}}},{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"},{"extension":[{"url":"language","valueCodeableConcept":{"coding":[{"code":"fr","display":"French","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-HumanLanguage","version":"1.0.0"}]}},{"url":"interpreterRequired","valueBoolean":True}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSCommunication"},{"extension":[{"url":"PreferredWrittenCommunicationFormat","valueCodeableConcept":{"coding":[{"code":"12","display":"Braille","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredWrittenCommunicationFormat"}]}},{"url":"PreferredContactMethod","valueCodeableConcept":{"coding":[{"code":"1","display":"Letter","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredContactMethod"}]}},{"url":"PreferredContactTimes","valueString":"Not after 7pm"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactPreference"},{"url":"http://hl7.org/fhir/StructureDefinition/patient-birthPlace","valueAddress":{"city":"Manchester","country":"GBR","district":"Greater Manchester"}}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":3},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Anne"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}, # noqa: E231, E501
{"scenario":"Remove Suffix from Name", "patient":"9000000009","patient_record":2,"patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"},{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"id":"T456","line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","text":"Student Accommodation","use":"temp"}],"birthDate":"2010-10-22","contact":[{"id":"C123","period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"C","display":"Emergency Contact","system":"http://terminology.hl7.org/CodeSystem/v2-0131"}]}],"telecom":[{"system":"phone","value":"01632960587"}]}],"deceasedDateTime":"2010-10-22T00:00:00+00:00","extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NominatedPharmacy","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-PreferredDispenserOrganization","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y23456"}}},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-MedicalApplianceSupplier","valueReference":{"identifier":{"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y34567"}}},{"extension":[{"url":"deathNotificationStatus","valueCodeableConcept":{"coding":[{"code":"2","display":"Formal - death notice received from Registrar of Deaths","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-DeathNotificationStatus","version":"1.0.0"}]}},{"url":"systemEffectiveDate","valueDateTime":"2010-10-22T00:00:00+00:00"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-DeathNotificationStatus"},{"extension":[{"url":"language","valueCodeableConcept":{"coding":[{"code":"fr","display":"French","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-HumanLanguage","version":"1.0.0"}]}},{"url":"interpreterRequired","valueBoolean":True}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSCommunication"},{"extension":[{"url":"PreferredWrittenCommunicationFormat","valueCodeableConcept":{"coding":[{"code":"12","display":"Braille","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredWrittenCommunicationFormat"}]}},{"url":"PreferredContactMethod","valueCodeableConcept":{"coding":[{"code":"1","display":"Letter","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredContactMethod"}]}},{"url":"PreferredContactTimes","valueString":"Not after 7pm"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactPreference"},{"url":"http://hl7.org/fhir/StructureDefinition/patient-birthPlace","valueAddress":{"city":"Manchester","country":"GBR","district":"Greater Manchester"}}],"gender":"female","generalPractitioner":[{"id":"254406A3","identifier":{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"https://fhir.nhs.uk/Id/ods-organization-code","value":"Y12345"},"type":"Organization"}],"id":"9000000009","identifier":[{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus","valueCodeableConcept":{"coding":[{"code":"01","display":"Number present and verified","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-NHSNumberVerificationStatus","version":"1.0.0"}]}}],"system":"https://fhir.nhs.uk/Id/nhs-number","value":"9000000009"}],"meta":{"security":[{"code":"U","display":"unrestricted","system":"https://www.hl7.org/fhir/valueset-security-labels.html"}],"versionId":3},"multipleBirthInteger":1,"name":[{"family":"Smith","given":["Jane"],"id":"123","period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":[],"use":"usual"}],"resourceType":"Patient","telecom":[{"id":"789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"},{"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-OtherContactSystem","valueCoding":{"code":"textphone","display":"Minicom (Textphone)","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-OtherContactSystem"}}],"id":"OC789","period":{"end":"2021-12-31","start":"2020-01-01"},"system":"other","use":"home","value":"01632960587"}]}}, # noqa: E231, E501
{"scenario":"No Patch Sent", "patient":"9000000009","patient_record":2,"patch":{},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"structure","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"INVALID_UPDATE","display":"Update is invalid"}]},"diagnostics":"Invalid update with error - No patches found"}]}}, # noqa: E231, E501
{"scenario":"Incorrect resource version", "patient":"9000000009","patient_record":3,"patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"structure","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"PRECONDITION_FAILED","display":"Required condition was not fulfilled"}]},"diagnostics":"Invalid update with error - This resource has changed since you last read. Please re-read and try again with the new version number."}]}}, # noqa: E231, E501
{"scenario":"Invalid Request ID", "patient":"9000000009","patient_record":2,"patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"value","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"INVALID_VALUE","display":"Provided value is invalid"}]},"diagnostics":"Invalid value - '12345' in header 'X-Request-ID'"}]}}, # noqa: E231, E501
{"scenario":"Missing If Match Header", "patient":"9000000009","patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"structure","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"PRECONDITION_FAILED","display":"Required condition was not fulfilled"}]},"diagnostics":"Invalid update with error - If-Match header must be supplied to update this resource"}]}}, # noqa: E231, E501
{"scenario":"Incorrect Content Type", "patient":"9000000009","patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"processing","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"UNSUPPORTED_SERVICE","display":"Unsupported Service"}]}}]}}, # noqa: E231, E501
{"scenario":"Invalid patch", "patient":"9000000009","patient_record":2, "patch":{"patches":[{"op":"bad_value","path":"not a path"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"structure","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"INVALID_UPDATE","display":"Update is invalid"}]},"diagnostics":"Invalid patch: Operation `op` property is not one of operations defined in RFC-6902"}]}}, # noqa: E231, E501
{"scenario":"Invalid NHS Number", "patient":"9000000000","patient_record":2,"patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"value","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"INVALID_RESOURCE_ID","display":"Resource Id is invalid"}]}}]}}, # noqa: E231, E501
{"scenario":"Patient does not Exist", "patient":"9111231130","patient_record":2,"patch":{"patches":[{"op":"test","path":"/name/0/id","value":"123"},{"op":"remove","path":"/name/0/suffix/0"}]},"response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"not_found","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"RESOURCE_NOT_FOUND","display":"Resource not found"}]}}]}} # noqa: E231, E501
]
relatedPerson = [
{"scenario":"Related Person Exists","patient":"9000000009", "response":{"resourceType":"Bundle","type":"searchset","total":2,"entry":[{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000009/RelatedPerson/507B7621","resource":{"active":True,"address":[{"extension":[{"extension":[{"url":"type","valueCoding":{"code":"PAF","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AddressKeyType"}},{"url":"value","valueString":"12345678"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-AddressKey"}],"line":["1 Trevelyan Square","Boar Lane","City Centre","Leeds","West Yorkshire"],"period":{"end":"2021-12-31","start":"2020-01-01"},"postalCode":"LS1 6AE","use":"home"}],"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-CopyCorrespondenceIndicator","valueBoolean":True},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactRank","valuePositiveInt":1},{"extension":[{"url":"PreferredWrittenCommunicationFormat","valueCodeableConcept":{"coding":[{"code":"12","display":"Braille","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredWrittenCommunicationFormat"}]}},{"url":"PreferredContactMethod","valueCodeableConcept":{"coding":[{"code":"1","display":"Letter","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredContactMethod"}]}},{"url":"PreferredContactTimes","valueString":"Not after 7pm"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactPreference"},{"extension":[{"url":"language","valueCodeableConcept":{"coding":[{"code":"fr","display":"French","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-HumanLanguage","version":"1.0.0"}]}},{"url":"interpreterRequired","valueBoolean":True}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSCommunication"}],"id":"507B7621","name":[{"family":"Smith","given":["Jane"],"period":{"end":"2021-12-31","start":"2020-01-01"},"prefix":["Mrs"],"suffix":["MBE"],"use":"usual"}],"patient":{"identifier":{"system":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient","value":"90000000009"},"reference":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/90000000009","type":"Patient"},"period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"Guardian","display":"Guardian of patient","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AdditionalRelatedPersonRole"}]}],"resourceType":"RelatedPerson","telecom":[{"period":{"end":"2021-12-31","start":"2020-01-01"},"system":"phone","use":"home","value":"01632960587"}]}},{"fullUrl":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/9000000009/RelatedPerson/B3380E98","resource":{"active":True,"extension":[{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-CopyCorrespondenceIndicator","valueBoolean":True},{"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactRank","valuePositiveInt":1},{"extension":[{"url":"PreferredWrittenCommunicationFormat","valueCodeableConcept":{"coding":[{"code":"12","display":"Braille","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredWrittenCommunicationFormat"}]}},{"url":"PreferredContactMethod","valueCodeableConcept":{"coding":[{"code":"1","display":"Letter","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-PreferredContactMethod"}]}},{"url":"PreferredContactTimes","valueString":"Not after 7pm"}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-ContactPreference"},{"extension":[{"url":"language","valueCodeableConcept":{"coding":[{"code":"fr","display":"French","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-HumanLanguage","version":"1.0.0"}]}},{"url":"interpreterRequired","valueBoolean":True}],"url":"https://fhir.nhs.uk/R4/StructureDefinition/Extension-UKCore-NHSCommunication"}],"id":"B3380E98","patient":{"identifier":{"system":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient","value":"90000000009"},"reference":"https://api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/90000000009","type":"Patient"},"period":{"end":"2021-12-31","start":"2020-01-01"},"relationship":[{"coding":[{"code":"Guardian","display":"Guardian of patient","system":"https://fhir.nhs.uk/R4/CodeSystem/UKCore-AdditionalRelatedPersonRole"}]}],"resourceType":"RelatedPerson"}}]}}, # noqa: E231, E501
{"scenario":"Patient Does Not Exist","patient":"9111231130","response":{"resourceType":"OperationOutcome","issue":[{"severity":"error","code":"not_found","details":{"coding":[{"system":"https://fhir.nhs.uk/R4/CodeSystem/Spine-ErrorOrWarningCode","version":"1","code":"RESOURCE_NOT_FOUND","display":"Resource not found"}]}}]}}, # noqa: E231, E501
{"scenario": "Related Person Does Not Exist", "patient": "9000000025", "response": {"resourceType":"Bundle","type":"searchset","total":0}} # noqa: E231, E501
]
|
nilq/baby-python
|
python
|
from django.contrib import admin
from . models import Ads
class AdsAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
admin.site.register(Ads, AdsAdmin)
|
nilq/baby-python
|
python
|
#write import statement for Die class
from src.homework.homework9.die import Die
'''
Create a Player class.
'''
class Player:
def __init__(self):
'''
Constructor method creates two Die attributes die1 and die2
'''
self.die1 = Die()
self.die2 = Die()
def roll_doubles(self):
'''
The roll_doubles method that will roll die1 and die2 (attributes from constructor method),
display rolled values,and continue iterating until a double is rolled.
'''
roll1 = 1
roll2 = 2
while roll1 != roll2:
roll1 = self.die1.roll()
roll2 = self.die2.roll()
print ('You got a ', roll1, 'and a ', roll2)
else:
print('Doubles! You got a ', roll1, 'and a ', roll2)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This is needed as multiprocessing shouldn't include nsz
# as it won't be able to optain __main__.__file__ and so crash inside Keys.py
if __name__ == '__main__':
import sys
if sys.hexversion < 0x03060000:
raise ImportError("NSZ requires at least Python 3.6!\nCurrent python version is " + sys.version)
import multiprocessing
multiprocessing.freeze_support()
import nsz
nsz.main()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Cosmos module wrapping the public and private key cryptography and ledger api."""
import base64
import hashlib
import json
import logging
import os
import subprocess # nosec
import tempfile
import time
from pathlib import Path
from typing import Any, BinaryIO, Dict, Optional, Tuple
from bech32 import bech32_encode, convertbits
from ecdsa import SECP256k1, SigningKey, VerifyingKey
from ecdsa.util import sigencode_string_canonize
import requests
from aea.crypto.base import Crypto, FaucetApi, Helper, LedgerApi
from aea.helpers.base import try_decorator
from aea.mail.base import Address
logger = logging.getLogger(__name__)
_COSMOS = "cosmos"
COSMOS_TESTNET_FAUCET_URL = "https://faucet-agent-land.prod.fetch-ai.com:443/claim"
TESTNET_NAME = "testnet"
DEFAULT_ADDRESS = "https://rest-agent-land.prod.fetch-ai.com:443"
DEFAULT_CURRENCY_DENOM = "atestfet"
DEFAULT_CHAIN_ID = "agent-land"
class CosmosCrypto(Crypto[SigningKey]):
"""Class wrapping the Account Generation from Ethereum ledger."""
identifier = _COSMOS
def __init__(self, private_key_path: Optional[str] = None):
"""
Instantiate an ethereum crypto object.
:param private_key_path: the private key path of the agent
"""
super().__init__(private_key_path=private_key_path)
self._public_key = self.entity.get_verifying_key().to_string("compressed").hex()
self._address = CosmosHelper.get_address_from_public_key(self.public_key)
@property
def private_key(self) -> str:
"""
Return a private key.
:return: a private key string
"""
return self.entity.to_string().hex()
@property
def public_key(self) -> str:
"""
Return a public key in hex format.
:return: a public key string in hex format
"""
return self._public_key
@property
def address(self) -> str:
"""
Return the address for the key pair.
:return: a display_address str
"""
return self._address
@classmethod
def load_private_key_from_path(cls, file_name) -> SigningKey:
"""
Load a private key in hex format from a file.
:param file_name: the path to the hex file.
:return: the Entity.
"""
path = Path(file_name)
with open(path, "r") as key:
data = key.read()
signing_key = SigningKey.from_string(bytes.fromhex(data), curve=SECP256k1)
return signing_key
def sign_message(self, message: bytes, is_deprecated_mode: bool = False) -> str:
"""
Sign a message in bytes string form.
:param message: the message to be signed
:param is_deprecated_mode: if the deprecated signing is used
:return: signature of the message in string form
"""
signature_compact = self.entity.sign_deterministic(
message, hashfunc=hashlib.sha256, sigencode=sigencode_string_canonize,
)
signature_base64_str = base64.b64encode(signature_compact).decode("utf-8")
return signature_base64_str
@staticmethod
def format_default_transaction(
transaction: Any, signature: str, base64_pbk: str
) -> Any:
"""
Format default CosmosSDK transaction and add signature
:param transaction: the transaction to be formatted
:param signature: the transaction signature
:param base64_pbk: the base64 formatted public key
:return: formatted transaction with signature
"""
pushable_tx = {
"tx": {
"msg": transaction["msgs"],
"fee": transaction["fee"],
"memo": transaction["memo"],
"signatures": [
{
"signature": signature,
"pub_key": {
"type": "tendermint/PubKeySecp256k1",
"value": base64_pbk,
},
"account_number": transaction["account_number"],
"sequence": transaction["sequence"],
}
],
},
"mode": "async",
}
return pushable_tx
@staticmethod
def format_wasm_transaction(
transaction: Any, signature: str, base64_pbk: str
) -> Any:
"""
Format CosmWasm transaction and add signature
:param transaction: the transaction to be formatted
:param signature: the transaction signature
:param base64_pbk: the base64 formatted public key
:return: formatted transaction with signature
"""
pushable_tx = {
"type": "cosmos-sdk/StdTx",
"value": {
"msg": transaction["msgs"],
"fee": transaction["fee"],
"signatures": [
{
"pub_key": {
"type": "tendermint/PubKeySecp256k1",
"value": base64_pbk,
},
"signature": signature,
}
],
"memo": transaction["memo"],
},
}
return pushable_tx
def sign_transaction(self, transaction: Any) -> Any:
"""
Sign a transaction in bytes string form.
:param transaction: the transaction to be signed
:return: signed transaction
"""
transaction_str = json.dumps(transaction, separators=(",", ":"), sort_keys=True)
transaction_bytes = transaction_str.encode("utf-8")
signed_transaction = self.sign_message(transaction_bytes)
base64_pbk = base64.b64encode(bytes.fromhex(self.public_key)).decode("utf-8")
if (
"msgs" in transaction
and len(transaction["msgs"]) == 1
and "type" in transaction["msgs"][0]
and "wasm" in transaction["msgs"][0]["type"]
):
return self.format_wasm_transaction(
transaction, signed_transaction, base64_pbk
)
else:
return self.format_default_transaction(
transaction, signed_transaction, base64_pbk
)
@classmethod
def generate_private_key(cls) -> SigningKey:
"""Generate a key pair for cosmos network."""
signing_key = SigningKey.generate(curve=SECP256k1)
return signing_key
def dump(self, fp: BinaryIO) -> None:
"""
Serialize crypto object as binary stream to `fp` (a `.write()`-supporting file-like object).
:param fp: the output file pointer. Must be set in binary mode (mode='wb')
:return: None
"""
fp.write(self.private_key.encode("utf-8"))
class CosmosHelper(Helper):
"""Helper class usable as Mixin for CosmosApi or as standalone class."""
@staticmethod
def is_transaction_settled(tx_receipt: Any) -> bool:
"""
Check whether a transaction is settled or not.
:param tx_digest: the digest associated to the transaction.
:return: True if the transaction has been settled, False o/w.
"""
is_successful = False
if tx_receipt is not None:
# TODO: quick fix only, not sure this is reliable
is_successful = True
return is_successful
@staticmethod
def is_transaction_valid(
tx: Any, seller: Address, client: Address, tx_nonce: str, amount: int,
) -> bool:
"""
Check whether a transaction is valid or not.
:param tx: the transaction.
:param seller: the address of the seller.
:param client: the address of the client.
:param tx_nonce: the transaction nonce.
:param amount: the amount we expect to get from the transaction.
:return: True if the random_message is equals to tx['input']
"""
if tx is None:
return False # pragma: no cover
try:
_tx = tx.get("tx").get("value").get("msg")[0]
recovered_amount = int(_tx.get("value").get("amount")[0].get("amount"))
sender = _tx.get("value").get("from_address")
recipient = _tx.get("value").get("to_address")
is_valid = (
recovered_amount == amount and sender == client and recipient == seller
)
except (KeyError, IndexError): # pragma: no cover
is_valid = False
return is_valid
@staticmethod
def generate_tx_nonce(seller: Address, client: Address) -> str:
"""
Generate a unique hash to distinguish txs with the same terms.
:param seller: the address of the seller.
:param client: the address of the client.
:return: return the hash in hex.
"""
time_stamp = int(time.time())
aggregate_hash = hashlib.sha256(
b"".join([seller.encode(), client.encode(), time_stamp.to_bytes(32, "big")])
)
return aggregate_hash.hexdigest()
@staticmethod
def get_address_from_public_key(public_key: str) -> str:
"""
Get the address from the public key.
:param public_key: the public key
:return: str
"""
public_key_bytes = bytes.fromhex(public_key)
s = hashlib.new("sha256", public_key_bytes).digest()
r = hashlib.new("ripemd160", s).digest()
five_bit_r = convertbits(r, 8, 5)
assert five_bit_r is not None, "Unsuccessful bech32.convertbits call"
address = bech32_encode(_COSMOS, five_bit_r)
return address
@staticmethod
def recover_message(
message: bytes, signature: str, is_deprecated_mode: bool = False
) -> Tuple[Address, ...]:
"""
Recover the addresses from the hash.
:param message: the message we expect
:param signature: the transaction signature
:param is_deprecated_mode: if the deprecated signing was used
:return: the recovered addresses
"""
signature_b64 = base64.b64decode(signature)
verifying_keys = VerifyingKey.from_public_key_recovery(
signature_b64, message, SECP256k1, hashfunc=hashlib.sha256,
)
public_keys = [
verifying_key.to_string("compressed").hex()
for verifying_key in verifying_keys
]
addresses = [
CosmosHelper.get_address_from_public_key(public_key)
for public_key in public_keys
]
return tuple(addresses)
@staticmethod
def get_hash(message: bytes) -> str:
"""
Get the hash of a message.
:param message: the message to be hashed.
:return: the hash of the message.
"""
digest = hashlib.sha256(message).hexdigest()
return digest
class CosmosApi(LedgerApi, CosmosHelper):
"""Class to interact with the Cosmos SDK via a HTTP APIs."""
identifier = _COSMOS
def __init__(self, **kwargs):
"""
Initialize the Ethereum ledger APIs.
"""
self._api = None
self.network_address = kwargs.pop("address", DEFAULT_ADDRESS)
self.denom = kwargs.pop("denom", DEFAULT_CURRENCY_DENOM)
self.chain_id = kwargs.pop("chain_id", DEFAULT_CHAIN_ID)
@property
def api(self) -> None:
"""Get the underlying API object."""
return self._api
def get_balance(self, address: Address) -> Optional[int]:
"""Get the balance of a given account."""
balance = self._try_get_balance(address)
return balance
@try_decorator(
"Encountered exception when trying get balance: {}",
logger_method=logger.warning,
)
def _try_get_balance(self, address: Address) -> Optional[int]:
"""Try get the balance of a given account."""
balance = None # type: Optional[int]
url = self.network_address + f"/bank/balances/{address}"
response = requests.get(url=url)
if response.status_code == 200:
result = response.json()["result"]
if len(result) == 0:
balance = 0
else:
balance = int(result[0]["amount"])
return balance
def get_deploy_transaction(
self,
contract_interface: Dict[str, str],
deployer_address: Address,
tx_fee: int = 0,
gas: int = 80000,
denom: Optional[str] = None,
memo: str = "",
chain_id: Optional[str] = None,
**kwargs,
) -> Dict[str, Any]:
"""
Create a CosmWasm bytecode deployment transaction.
:param sender_address: the sender address of the message initiator.
:param filename: the path to wasm bytecode file.
:param gas: Maximum amount of gas to be used on executing command.
:param memo: Any string comment.
:param chain_id: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet).
:return: the unsigned CosmWasm contract deploy message
"""
denom = denom if denom is not None else self.denom
chain_id = chain_id if chain_id is not None else self.chain_id
account_number, sequence = self._try_get_account_number_and_sequence(
deployer_address
)
deploy_msg = {
"type": "wasm/store-code",
"value": {
"sender": deployer_address,
"wasm_byte_code": contract_interface["wasm_byte_code"],
"source": "",
"builder": "",
},
}
tx = self._get_transaction(
account_number,
chain_id,
tx_fee,
denom,
gas,
memo,
sequence,
msg=deploy_msg,
)
return tx
def get_init_transaction(
self,
deployer_address: Address,
code_id: int,
init_msg: Any,
amount: int,
tx_fee: int,
gas: int = 80000,
denom: Optional[str] = None,
label: str = "",
memo: str = "",
chain_id: Optional[str] = None,
) -> Optional[Any]:
"""
Create a CosmWasm InitMsg transaction.
:param deployer_address: the deployer address of the message initiator.
:param amount: Contract's initial funds amount
:param code_id: the ID of contract bytecode.
:param init_msg: the InitMsg containing parameters for contract constructor.
:param gas: Maximum amount of gas to be used on executing command.
:param denom: the name of the denomination of the contract funds
:param label: the label name of the contract
:param memo: Any string comment.
:param chain_id: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet).
:return: the unsigned CosmWasm InitMsg
"""
denom = denom if denom is not None else self.denom
chain_id = chain_id if chain_id is not None else self.chain_id
account_number, sequence = self._try_get_account_number_and_sequence(
deployer_address
)
instantiate_msg = {
"type": "wasm/instantiate",
"value": {
"sender": deployer_address,
"code_id": str(code_id),
"label": label,
"init_msg": init_msg,
"init_funds": [{"denom": denom, "amount": str(amount)}],
},
}
tx = self._get_transaction(
account_number,
chain_id,
tx_fee,
denom,
gas,
memo,
sequence,
msg=instantiate_msg,
)
return tx
def get_handle_transaction(
self,
sender_address: Address,
contract_address: Address,
handle_msg: Any,
amount: int,
tx_fee: int,
denom: Optional[str] = None,
gas: int = 80000,
memo: str = "",
chain_id: Optional[str] = None,
) -> Optional[Any]:
"""
Create a CosmWasm HandleMsg transaction.
:param sender_address: the sender address of the message initiator.
:param contract_address: the address of the smart contract.
:param handle_msg: HandleMsg in JSON format.
:param gas: Maximum amount of gas to be used on executing command.
:param memo: Any string comment.
:param chain_id: the Chain ID of the CosmWasm transaction. Default is 1 (i.e. mainnet).
:return: the unsigned CosmWasm HandleMsg
"""
denom = denom if denom is not None else self.denom
chain_id = chain_id if chain_id is not None else self.chain_id
account_number, sequence = self._try_get_account_number_and_sequence(
sender_address
)
execute_msg = {
"type": "wasm/execute",
"value": {
"sender": sender_address,
"contract": contract_address,
"msg": handle_msg,
"sent_funds": [{"amount": str(amount), "denom": denom}],
},
}
tx = self._get_transaction(
account_number,
chain_id,
tx_fee,
denom,
gas,
memo,
sequence,
msg=execute_msg,
)
return tx
@staticmethod
@try_decorator(
"Encountered exception when trying to execute wasm transaction: {}",
logger_method=logger.warning,
)
def try_execute_wasm_transaction(
tx_signed: Any, signed_tx_filename: str = "tx.signed"
) -> Optional[str]:
"""
Execute a CosmWasm Transaction. QueryMsg doesn't require signing.
:param tx_signed: the signed transaction.
:return: the transaction digest
"""
with tempfile.TemporaryDirectory() as tmpdirname:
with open(os.path.join(tmpdirname, signed_tx_filename), "w") as f:
f.write(json.dumps(tx_signed))
command = [
"wasmcli",
"tx",
"broadcast",
os.path.join(tmpdirname, signed_tx_filename),
]
stdout, _ = subprocess.Popen( # nosec
command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
).communicate()
return stdout.decode("ascii")
@staticmethod
@try_decorator(
"Encountered exception when trying to execute wasm query: {}",
logger_method=logger.warning,
)
def try_execute_wasm_query(
contract_address: Address, query_msg: Any
) -> Optional[str]:
"""
Execute a CosmWasm QueryMsg. QueryMsg doesn't require signing.
:param contract_address: the address of the smart contract.
:param query_msg: QueryMsg in JSON format.
:return: the message receipt
"""
command = [
"wasmcli",
"query",
"wasm",
"contract-state",
"smart",
str(contract_address),
json.dumps(query_msg),
]
stdout, _ = subprocess.Popen( # nosec
command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
).communicate()
return stdout.decode("ascii")
def get_transfer_transaction( # pylint: disable=arguments-differ
self,
sender_address: Address,
destination_address: Address,
amount: int,
tx_fee: int,
tx_nonce: str,
denom: Optional[str] = None,
gas: int = 80000,
memo: str = "",
chain_id: Optional[str] = None,
**kwargs,
) -> Optional[Any]:
"""
Submit a transfer transaction to the ledger.
:param sender_address: the sender address of the payer.
:param destination_address: the destination address of the payee.
:param amount: the amount of wealth to be transferred.
:param tx_fee: the transaction fee.
:param tx_nonce: verifies the authenticity of the tx
:param denom: the denomination of tx fee and amount
:param gas: the gas used.
:param memo: memo to include in tx.
:param chain_id: the chain ID of the transaction.
:return: the transfer transaction
"""
denom = denom if denom is not None else self.denom
chain_id = chain_id if chain_id is not None else self.chain_id
account_number, sequence = self._try_get_account_number_and_sequence(
sender_address
)
transfer_msg = {
"type": "cosmos-sdk/MsgSend",
"value": {
"amount": [{"amount": str(amount), "denom": denom}],
"from_address": sender_address,
"to_address": destination_address,
},
}
tx = self._get_transaction(
account_number,
chain_id,
tx_fee,
denom,
gas,
memo,
sequence,
msg=transfer_msg,
)
return tx
@staticmethod
def _get_transaction(
account_number: int,
chain_id: str,
tx_fee: int,
denom: str,
gas: int,
memo: str,
sequence: int,
msg: Dict[str, Any],
) -> Dict[str, Any]:
"""
Get a transaction.
:param account_number: the account number.
:param chain_id: the chain ID of the transaction.
:param tx_fee: the transaction fee.
:param denom: the denomination of tx fee and amount
:param gas: the gas used.
:param memo: memo to include in tx.
:param msg: the transaction msg.
:param sequence: the sequence.
:return: the transaction
"""
tx = {
"account_number": str(account_number),
"chain_id": chain_id,
"fee": {
"amount": [{"amount": str(tx_fee), "denom": denom}],
"gas": str(gas),
},
"memo": memo,
"msgs": [msg],
"sequence": str(sequence),
}
return tx
@try_decorator(
"Encountered exception when trying to get account number and sequence: {}",
logger_method=logger.warning,
)
def _try_get_account_number_and_sequence(
self, address: Address
) -> Optional[Tuple[int, int]]:
"""
Try get account number and sequence for an address.
:param address: the address
:return: a tuple of account number and sequence
"""
result = None # type: Optional[Tuple[int, int]]
url = self.network_address + f"/auth/accounts/{address}"
response = requests.get(url=url)
if response.status_code == 200:
result = (
int(response.json()["result"]["value"]["account_number"]),
int(response.json()["result"]["value"]["sequence"]),
)
return result
def send_signed_transaction(self, tx_signed: Any) -> Optional[str]:
"""
Send a signed transaction and wait for confirmation.
:param tx_signed: the signed transaction
:return: tx_digest, if present
"""
if self.is_cosmwasm_transaction(tx_signed):
tx_digest = self.try_execute_wasm_transaction(tx_signed)
elif self.is_transfer_transaction(tx_signed):
tx_digest = self._try_send_signed_transaction(tx_signed)
else: # pragma: nocover
logger.warning(
"Cannot send transaction. Unknown transaction type: {}".format(
tx_signed
)
)
tx_digest = None
return tx_digest
@staticmethod
def is_cosmwasm_transaction(tx_signed: Any) -> bool:
"""Check whether it is a cosmwasm tx."""
try:
_type = tx_signed["value"]["msg"][0]["type"]
result = _type in ["wasm/store-code", "wasm/instantiate", "wasm/execute"]
except KeyError: # pragma: nocover
result = False
return result
@staticmethod
def is_transfer_transaction(tx_signed: Any) -> bool:
"""Check whether it is a transfer tx."""
try:
_type = tx_signed["tx"]["msg"][0]["type"]
result = _type in ["cosmos-sdk/MsgSend"]
except KeyError: # pragma: nocover
result = False
return result
@try_decorator(
"Encountered exception when trying to send tx: {}", logger_method=logger.warning
)
def _try_send_signed_transaction(self, tx_signed: Any) -> Optional[str]:
"""
Try send the signed transaction.
:param tx_signed: the signed transaction
:return: tx_digest, if present
"""
tx_digest = None # type: Optional[str]
url = self.network_address + "/txs"
response = requests.post(url=url, json=tx_signed)
if response.status_code == 200:
tx_digest = response.json()["txhash"]
return tx_digest
def get_transaction_receipt(self, tx_digest: str) -> Optional[Any]:
"""
Get the transaction receipt for a transaction digest.
:param tx_digest: the digest associated to the transaction.
:return: the tx receipt, if present
"""
tx_receipt = self._try_get_transaction_receipt(tx_digest)
return tx_receipt
@try_decorator(
"Encountered exception when trying to get transaction receipt: {}",
logger_method=logger.warning,
)
def _try_get_transaction_receipt(self, tx_digest: str) -> Optional[Any]:
"""
Try get the transaction receipt for a transaction digest.
:param tx_digest: the digest associated to the transaction.
:return: the tx receipt, if present
"""
result = None # type: Optional[Any]
url = self.network_address + f"/txs/{tx_digest}"
response = requests.get(url=url)
if response.status_code == 200:
result = response.json()
return result
def get_transaction(self, tx_digest: str) -> Optional[Any]:
"""
Get the transaction for a transaction digest.
:param tx_digest: the digest associated to the transaction.
:return: the tx, if present
"""
# Cosmos does not distinguis between transaction receipt and transaction
tx_receipt = self._try_get_transaction_receipt(tx_digest)
return tx_receipt
def get_contract_instance(
self, contract_interface: Dict[str, str], contract_address: Optional[str] = None
) -> Any:
"""
Get the instance of a contract.
:param contract_interface: the contract interface.
:param contract_address: the contract address.
:return: the contract instance
"""
# Instance object not available for cosmwasm
return None
class CosmWasmCLIWrapper:
"""Wrapper of the CosmWasm CLI."""
class CosmosFaucetApi(FaucetApi):
"""Cosmos testnet faucet API."""
identifier = _COSMOS
testnet_name = TESTNET_NAME
def get_wealth(self, address: Address) -> None:
"""
Get wealth from the faucet for the provided address.
:param address: the address.
:return: None
"""
self._try_get_wealth(address)
@staticmethod
@try_decorator(
"An error occured while attempting to generate wealth:\n{}",
logger_method=logger.error,
)
def _try_get_wealth(address: Address) -> None:
"""
Get wealth from the faucet for the provided address.
:param address: the address.
:return: None
"""
response = requests.post(
url=COSMOS_TESTNET_FAUCET_URL, data={"Address": address}
)
if response.status_code == 200:
tx_hash = response.text
logger.info("Wealth generated, tx_hash: {}".format(tx_hash))
else: # pragma: no cover
logger.warning(
"Response: {}, Text: {}".format(response.status_code, response.text)
)
|
nilq/baby-python
|
python
|
from datetime import datetime
from unittest import mock
import dateutil.relativedelta
from carbonserver.api.infra.repositories.repository_projects import SqlAlchemyRepository
from carbonserver.api.usecases.project.project_sum import ProjectSumsUsecase
PROJECT_ID = "e60afa92-17b7-4720-91a0-1ae91e409ba1"
END_DATE = datetime.now()
START_DATE = END_DATE - dateutil.relativedelta.relativedelta(months=3)
EMISSIONS_SUM = 152.28955200363455
PROJECT_WITH_DETAILS = {
"project_id": PROJECT_ID,
"name": "DataForGood",
"description": "DataForGood Project",
"emissions": 152.28955200363455,
"cpu_power": 5760,
"gpu_power": 2983.9739999999993,
"ram_power": 806.0337192959997,
"cpu_energy": 191.8251863024175,
"gpu_energy": 140.01098718681496,
"ram_energy": 26.84332784201141,
"energy_consumed": 358.6795013312438,
"duration": 7673204,
"emissions_rate": 1.0984556074701752,
"emissions_count": 64,
}
def test_sum_computes_for_project_id():
repository_mock: SqlAlchemyRepository = mock.Mock(spec=SqlAlchemyRepository)
project_id = PROJECT_ID
project_global_sum_usecase = ProjectSumsUsecase(repository_mock)
expected_emission_sum = EMISSIONS_SUM
repository_mock.get_project_detailed_sums.return_value = [PROJECT_WITH_DETAILS]
actual_project_global_sum_by_experiment = (
project_global_sum_usecase.compute_detailed_sum(
project_id, START_DATE, END_DATE
)
)
assert (
actual_project_global_sum_by_experiment[0]["emissions"] == expected_emission_sum
)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
'''
Created on 2017-6-22
@author: hshl.ltd
'''
from __future__ import absolute_import, unicode_literals
import warnings
from sqlalchemy import orm
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from django.conf import settings
from django.dispatch import receiver
from django.core.signals import request_finished
from django.core.exceptions import ImproperlyConfigured
from sqlalchemy_django.middleware import get_current_request
class BaseQuery(orm.Query):
def get_or_404(self, ident):
pass
def first_or_404(self):
return self.first()
def first_dict(self):
row = self.first()
return None if row is None else row.to_dict()
def all_dict(self):
rows = self.all()
if rows is None:
return None
return [row.to_dict() for row in rows]
class Model(object):
#: Query class used by :attr:`query`.
#: Defaults to :class:`SQLAlchemy.Query`, which defaults to :class:`BaseQuery`.
query_class = None
#: Convenience property to query the database for instances of this model using the current session.
#: Equivalent to ``db.session.query(Model)`` unless :attr:`query_class` has been changed.
query = None
# http://ju.outofmemory.cn/entry/200879
def to_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def merge(self, obj):
if isinstance(obj, dict):
for key, value in obj.iteritems():
if hasattr(self, key):
setattr(self, key, value)
class SQLAlchemy(object):
"""django SQLAlchemy主要是把sqlalchemy与web request绑定实现session的自动化管理"""
def __init__(self, session_options=None, metadata=None,
query_class=BaseQuery, model_class=Model, bind_key='default'):
self.config = self.init_config(bind_key)
self.Query = query_class
self.Session = self.create_scoped_session(session_options)
self.Model = self.make_declarative_base(model_class, metadata)
@receiver(request_finished, weak=False)
def shutdown_session(sender, **kwargs):
try:
if self.config['SQLALCHEMY_COMMIT_ON_TEARDOWN']:
self.Session.commit()
self.Session.remove()
except Exception as e:
print(e)
def get_session(self):
session = self.Session()
return session
@property
def metadata(self):
return self.Model.metadata
def create_scoped_session(self, options=None):
if options is None:
options = {}
options.setdefault('query_cls', self.Query)
return orm.scoped_session(self.create_session(options), scopefunc=get_current_request)
def create_session(self, options):
engine = create_engine(
self.config['SQLALCHEMY_DATABASE_URI'], echo=self.config['SQLALCHEMY_ECHO'], pool_size=self.config['SQLALCHEMY_POOL_SIZE'])
return orm.sessionmaker(bind=engine, **options)
def make_declarative_base(self, model, metadata=None):
"""Creates the declarative base."""
base = declarative_base(cls=model, metadata=metadata)
if not getattr(base, 'query_class', None):
base.query_class = self.Query
return base
def init_config(self, bind_key):
if not hasattr(settings, 'SQLALCHEMY_DATABASES'):
raise ImproperlyConfigured(
"SQLALCHEMY_DATABASES not find in settings"
)
sqlalchemy_config = settings.SQLALCHEMY_DATABASES
if bind_key not in sqlalchemy_config:
raise ImproperlyConfigured(
"SQLALCHEMY_DATABASES not find in settings"
)
bind_config = sqlalchemy_config[bind_key]
bind_config.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite:///:memory:')
bind_config.setdefault('SQLALCHEMY_BINDS', None)
bind_config.setdefault('SQLALCHEMY_NATIVE_UNICODE', None)
bind_config.setdefault('SQLALCHEMY_ECHO', True)
bind_config.setdefault('SQLALCHEMY_RECORD_QUERIES', None)
bind_config.setdefault('SQLALCHEMY_POOL_SIZE', None)
bind_config.setdefault('SQLALCHEMY_POOL_TIMEOUT', None)
bind_config.setdefault('SQLALCHEMY_POOL_RECYCLE', None)
bind_config.setdefault('SQLALCHEMY_MAX_OVERFLOW', None)
bind_config.setdefault('SQLALCHEMY_COMMIT_ON_TEARDOWN', True)
return bind_config
|
nilq/baby-python
|
python
|
import os
import requests
from dotenv import load_dotenv
dotenv_path = os.path.join(os.path.dirname(__file__), '.env')
load_dotenv(dotenv_path)
# how to generate URL https://www.youtube.com/watch?v=lEQ68HhpO4g
INCOMING_WEBHOOKS_ACCESS_URL=os.getenv("INCOMING_WEBHOOKS_ACCESS_URL")
def send_message(post_data, api_url, headers={'Content-Type': 'application/json'}):
response = requests.post(api_url, headers=headers, json=post_data)
return response
def generate_post_data(markdown_texts):
# https://api.slack.com/messaging/composing/layouts#attachments
if type(markdown_texts)!=list:
markdown_texts = [markdown_texts]
post_data = {'blocks': []}
for text in markdown_texts:
content = {
"type": "section",
"text": {
"type": "mrkdwn",
"text": text
}
}
post_data['blocks'].append(content)
print(post_data)
return post_data
def send_markdown(text_or_list_of_texts, api_url=INCOMING_WEBHOOKS_ACCESS_URL):
post_data = generate_post_data(text_or_list_of_texts)
return send_message(post_data, api_url)
def main():
post_data = generate_post_data("```hellow!!```")
send_message(post_data, api_url=INCOMING_WEBHOOKS_ACCESS_URL)
if __name__=='__main__':
main()
|
nilq/baby-python
|
python
|
import os
import sys
import random
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.nn.functional as F
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import torchvision.utils
import imgaug as ia
from torch.utils.data import DataLoader,Dataset
from torch.autograd import Variable
from torch import optim
from imgaug import augmenters as iaa
from PIL import Image
from torchsummaryX import summary
def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, 'model_best.pth.tar')
def imshow(img,text=None,should_save=False, name=None):
npimg = img.numpy()
plt.axis("off")
if text:
plt.text(75, 8, text, style='italic',fontweight='bold',
bbox={'facecolor':'white', 'alpha':0.8, 'pad':10})
plt.imshow(np.transpose(npimg, (1, 2, 0)), cmap=plt.cm.gray)
if should_save:
plt.savefig(name)
plt.show()
def show_plot(iteration,loss):
plt.plot(iteration,loss)
plt.show()
class Augmenter():
def __init__(self, seq):
self.seq = seq
def __call__(self, img_and_annotation):
normal_image = img_and_annotation[0]
defect_image = img_and_annotation[1]
box_annotation_dict = img_and_annotation[2]
normal_image = np.array(normal_image)
defect_image = np.array(defect_image)
normal_image_aug, defect_image_aug, bbs_aug = self.augment_image(normal_image, defect_image,
box_annotation_dict, self.seq)
normal_image_aug = Image.fromarray(normal_image_aug)
defect_image_aug = Image.fromarray(defect_image_aug)
return normal_image_aug, defect_image_aug, bbs_aug
def augment_image(self, normal_image, defect_image, box_annotation_dict, seq):
bbs = self.transform_imgaug_style_boxes(box_annotation_dict)
seq_det = seq.to_deterministic()
normal_image_aug = seq_det.augment_images([normal_image])[0]
defect_image_aug = seq_det.augment_images([defect_image])[0]
bbs_aug = seq_det.augment_bounding_boxes([bbs])[0]
bbs_aug = bbs_aug.remove_out_of_image().cut_out_of_image()
augmented_box = self.transofrm_annotation_information_style(box_annotation_dict, bbs_aug)
return normal_image_aug, defect_image_aug, augmented_box
@staticmethod
def transofrm_annotation_information_style(box_annotation_dict, bbs_aug):
assert isinstance(box_annotation_dict, dict)
box_annotation_keys = box_annotation_dict.keys()
assert "size" in box_annotation_keys
assert "object" in box_annotation_keys
size_tag_keys = box_annotation_dict["size"].keys()
assert "width" in size_tag_keys
assert "height" in size_tag_keys
assert "depth" in size_tag_keys
assert isinstance(box_annotation_dict["object"], list)
for _object in box_annotation_dict["object"]:
_object_keys = _object.keys()
assert "name" in _object_keys
assert "xmin" in _object_keys
assert "ymin" in _object_keys
assert "xmax" in _object_keys
assert "ymax" in _object_keys
assert isinstance(bbs_aug, ia.BoundingBoxesOnImage)
objects = box_annotation_dict["object"]
objects.clear()
for i in range(len(bbs_aug.bounding_boxes)):
augmented_box = bbs_aug.bounding_boxes[i]
objects.append(
{
"name": augmented_box.label,
"xmin": augmented_box.x1,
"ymin": augmented_box.y1,
"xmax": augmented_box.x2,
"ymax": augmented_box.y2
}
)
return box_annotation_dict
@staticmethod
def transform_imgaug_style_boxes(box_annotation_dict):
assert isinstance(box_annotation_dict, dict)
box_annotation_keys = box_annotation_dict.keys()
assert "size" in box_annotation_keys
assert "object" in box_annotation_keys
size_tag_keys = box_annotation_dict["size"].keys()
assert "width" in size_tag_keys
assert "height" in size_tag_keys
assert "depth" in size_tag_keys
assert isinstance(box_annotation_dict["object"], list)
for _object in box_annotation_dict["object"]:
_object_keys = _object.keys()
assert "name" in _object_keys
assert "xmin" in _object_keys
assert "ymin" in _object_keys
assert "xmax" in _object_keys
assert "ymax" in _object_keys
image_width = int(box_annotation_dict["size"]["width"])
image_height = int(box_annotation_dict["size"]["height"])
bbs = ia.BoundingBoxesOnImage([], shape=(image_height, image_width))
for _object in box_annotation_dict["object"]:
name = _object["name"]
xmin = int(_object["xmin"])
ymin = int(_object["ymin"])
xmax = int(_object["xmax"])
ymax = int(_object["ymax"])
bbs.bounding_boxes.append(ia.BoundingBox(x1=xmin,
x2=xmax,
y1=ymin,
y2=ymax,
label=name))
return bbs
IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', 'webp']
LABEL_EXTENSIONS = ['.xml']
def has_file_allowed_extension(filename, extensions):
"""Checks if a file is an allowed extension.
Args:
filename (string): path to a file
extensions (iterable of strings): extensions to consider (lowercase)
Returns:
bool: True if the filename ends with one of given extensions
"""
filename_lower = filename.lower()
return any(filename_lower.endswith(ext) for ext in extensions)
class DefectDataset(torch.utils.data.Dataset):
def __init__(self, root, transform=None):
self.folder = self._find_each_folder(root)
self.root = root
self.transform = transform
self.samples = self.load_data()
self.classes = ["defect"]
def load_data(self):
datas = list()
directory = os.path.expanduser(self.root)
for target in sorted(self.folder.keys()):
d = os.path.join(directory, target)
imgs = dict()
label = None
for a in os.scandir(d):
name = a.name.split(".")[0]
ext = a.name.split(".")[-1]
if ext == "tif":
imgs[name] = os.path.join(d, a.name)
elif ext == "xml":
label = os.path.join(d, a.name)
datas.append([imgs, label])
return datas
def __getitem__(self, index):
imgs, label = self.samples[index]
label = self._parse_voc(label)
normal_img = self.pil_loader(imgs["normal"])
defect_img = self.pil_loader(imgs["defect"])
if self.transform != None:
normal_aug_img, defect_aug_img, aug_label = self.transform([normal_img, defect_img, label])
if random.choice([True, False]):
# same image
image1 = normal_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
image2 = normal_aug_img
label = np.array([0.], dtype=np.float)
else:
# difference image
image1 = normal_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
image2 = defect_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
label = np.array([1.], dtype=np.float)
elif self.transform == None:
if random.choice([True, False]):
# same image
image1 = normal_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
image2 = normal_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
label = np.array([0.], dtype=np.float)
else:
# difference image
image1 = normal_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
image2 = defect_img.resize((Config.RESIZE[0], Config.RESIZE[1]), Image.ANTIALIAS)
label = np.array([1.], dtype=np.float)
image1 = image1.convert('L')
image2 = image2.convert('L')
image1 = torchvision.transforms.ToTensor()(image1)
image2 = torchvision.transforms.ToTensor()(image2)
label = torch.from_numpy(label)
return image1, image2, label
def __len__(self):
return len(self.samples)
def pil_loader(self, path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def _find_each_folder(self, dir):
if sys.version_info >= (3, 5):
# Faster and available in Python 3.5 and above
classes = [d.name for d in os.scandir(dir) if d.is_dir()]
else:
classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return class_to_idx
def _convert_box_label_to_yolo_label(self, label, classes_list):
assert isinstance(label, dict)
assert isinstance(classes_list, list)
for cls in classes_list:
assert isinstance(cls, str)
root_keys = label.keys()
size_keys = label["size"].keys()
number_of_objects = len(label["object"])
assert "size" in root_keys
assert "object" in root_keys
assert "width" in size_keys
assert "height" in size_keys
if number_of_objects == 0:
print("here")
return []
yolo_label = list()
image_size = {
"width": float(label["size"]["width"]),
"height": float(label["size"]["height"]),
}
for _object in label["object"]:
_object_keys = _object.keys()
assert "name" in _object_keys
assert "xmin" in _object_keys
assert "ymin" in _object_keys
assert "xmax" in _object_keys
assert "ymax" in _object_keys
name = _object["name"]
cls = float(classes_list.index(name))
box_coordinate = {
"xmin": float(_object["xmin"]),
"ymin": float(_object["ymin"]),
"xmax": float(_object["xmax"]),
"ymax": float(_object["ymax"]),
}
yolo_coordinate = self._convert_coordinate(image_size, box_coordinate)
yolo_coordinate.insert(0, cls)
yolo_label.append(yolo_coordinate)
return yolo_label
@staticmethod
def _parse_voc(annotation_path):
import xml.etree.ElementTree as Et
assert isinstance(annotation_path, str)
xml_file = open(annotation_path, "r")
tree = Et.parse(xml_file)
element_list = list()
for elem in tree.iter():
element_list.append(elem.tag)
assert "size" in element_list
assert "width" in element_list
assert "height" in element_list
assert "object" in element_list
assert "name" in element_list
assert "bndbox" in element_list
assert "xmin" in element_list
assert "ymin" in element_list
assert "xmax" in element_list
assert "ymax" in element_list
result = dict()
root = tree.getroot()
size_tag = root.find("size")
result["size"] = {
"width": size_tag.find("width").text,
"height": size_tag.find("height").text,
"depth": size_tag.find("depth").text
}
result["object"] = list()
objects = root.findall("object")
assert objects
for _object in objects:
result["object"].append({
"name": _object.find("name").text,
"xmin": _object.find("bndbox").find("xmin").text,
"ymin": _object.find("bndbox").find("ymin").text,
"xmax": _object.find("bndbox").find("xmax").text,
"ymax": _object.find("bndbox").find("ymax").text
})
return result
@staticmethod
def _convert_coordinate(image_size, box_coordinate):
image_size_keys = image_size.keys()
box_coordinate_keys = box_coordinate.keys()
assert "width" in image_size_keys
assert "height" in image_size_keys
assert "xmin" in box_coordinate_keys
assert "ymin" in box_coordinate_keys
assert "xmax" in box_coordinate_keys
assert "ymax" in box_coordinate_keys
assert isinstance(image_size, dict)
assert isinstance(box_coordinate, dict)
assert isinstance(image_size["width"], float)
assert isinstance(image_size["height"], float)
assert isinstance(box_coordinate["xmin"], float)
assert isinstance(box_coordinate["ymin"], float)
assert isinstance(box_coordinate["xmax"], float)
assert isinstance(box_coordinate["ymax"], float)
x_of_box = (box_coordinate["xmin"] + box_coordinate["xmax"]) / 2.0
y_of_box = (box_coordinate["ymin"] + box_coordinate["ymax"]) / 2.0
width_of_box = box_coordinate["xmax"] - box_coordinate["xmin"]
height_of_box = box_coordinate["ymax"] - box_coordinate["ymin"]
relative_x_of_center = x_of_box / image_size["width"]
relative_y_of_center = y_of_box / image_size["height"]
relative_box_width = width_of_box / image_size["width"]
relative_box_height = height_of_box / image_size["height"]
return [relative_x_of_center, relative_y_of_center,
relative_box_width, relative_box_height]
class SiameseNetwork(nn.Module):
def __init__(self, size):
self.size = size
super(SiameseNetwork, self).__init__()
self.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
self.cnn1 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(1, 4, kernel_size=3),
nn.ReLU(inplace=True),
nn.BatchNorm2d(4))
self.cnn2 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(4, 8, kernel_size=3),
nn.ReLU(inplace=True),
nn.BatchNorm2d(8)
)
self.cnn3 = nn.Sequential(
nn.ReflectionPad2d(1),
nn.Conv2d(8, 8, kernel_size=3),
nn.ReLU(inplace=True),
nn.BatchNorm2d(8),
)
self.fc1 = nn.Sequential(
nn.Linear(8 * self.size[0] * self.size[1], 500),
nn.ReLU(inplace=True),
nn.Linear(500, 500),
nn.ReLU(inplace=True),
nn.Linear(500, 5))
def forward_once(self, x):
output = self.cnn1(x)
output = self.cnn2(output)
output = self.cnn3(output)
output = output.view(output.size()[0], -1)
output = self.fc1(output)
return output
def forward(self, input1, input2):
output1 = self.forward_once(input1)
output2 = self.forward_once(input2)
return output1, output2
def summary(self):
summary(self, torch.zeros((1, 1, self.size[0], self.size[1])), input2=torch.zeros((1, 1, self.size[0], self.size[1])))
class ContrastiveLoss(torch.nn.Module):
def __init__(self, margin=2.0):
super(ContrastiveLoss, self).__init__()
self.margin = margin
def forward(self, output1, output2, label):
euclidean_distance = F.pairwise_distance(output1, output2)
loss_contrastive = torch.mean((1 - label) * torch.pow(euclidean_distance, 2) +
(label) * torch.pow(torch.clamp(self.margin - euclidean_distance, min=0.0), 2))
return loss_contrastive
class Config():
training_dir = "./dataset/training/"
testing_dir = "./dataset/testing/"
train_batch_size = 64
train_number_epochs = 100
RESIZE = (250, 250)
if __name__ == "__main__":
# Augmentation Demo
seq = iaa.Sequential([
iaa.Resize({"height": Config.RESIZE[0], "width": Config.RESIZE[1]}),
iaa.SomeOf(2, [iaa.Multiply((1, 1.1)), # change brightness, doesn't affect BBs
iaa.Affine(
translate_px={"x": 5, "y": 5},
scale=(1, 1)
), # translate by 40/60px on x/y axis, and scale to 50-70%, affects BBs
iaa.GaussianBlur(sigma=(0.0, 0.1)),
iaa.Affine(rotate=(-10, 10)),
])
#iaa.Sharpen(alpha=(0, 0.0001)),
#iaa.Fliplr(0.5)
])
#seq = iaa.Sometimes(0.5, iaa.Crop(percent=(0.4)))
#seq = iaa.Sequential([iaa.Crop(percent=(0.3))])
composed = transforms.Compose([Augmenter(seq)])
siamese_dataset = DefectDataset(root=Config.training_dir, transform=composed)
vis_dataloader = DataLoader(siamese_dataset,
shuffle=True,
num_workers=0,
batch_size=8)
dataiter = iter(vis_dataloader)
example_batch = next(dataiter)
concatenated = torch.cat((example_batch[0],example_batch[1]),0)
imshow(torchvision.utils.make_grid(concatenated))
print(example_batch[2].numpy())
print(example_batch[0].shape)
train_dataloader = DataLoader(siamese_dataset,
shuffle=True,
num_workers=0,
batch_size=Config.train_batch_size)
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
net = SiameseNetwork(size=(250, 250))
if device.type == 'cpu':
model = torch.nn.DataParallel(net)
else:
model = torch.nn.DataParallel(net, device_ids=[0, 1]).cuda()
model.to(device)
criterion = ContrastiveLoss()
optimizer = optim.Adam(net.parameters(),lr = 0.0005)
counter = []
loss_history = []
iteration_number= 0
for epoch in range(0, Config.train_number_epochs):
for i, data in enumerate(train_dataloader, 0):
img0, img1, label = data
img0, img1, label = img0.to(device), img1.to(device), label.to(device)
optimizer.zero_grad()
output1, output2 = model(img0, img1)
label = label.double()
output1 = output1.double()
output2 = output2.double()
loss_contrastive = criterion(output1, output2, label)
loss_contrastive.backward()
optimizer.step()
if i % 10 == 0:
print("Epoch number {}\n Current loss {}\n".format(epoch, loss_contrastive.item()))
iteration_number += 10
counter.append(iteration_number)
loss_history.append(loss_contrastive.item())
show_plot(counter, loss_history)
save_checkpoint({
'epoch': epoch + 1,
'arch': "YOLOv1",
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}, False, filename=os.path.join("./", 'result.pth.tar'))
# TEST
"""
siamese_dataset = DefectDataset(root=Config.testing_dir, transform=None)
test_dataloader = DataLoader(siamese_dataset, num_workers=6, batch_size=1, shuffle=True)
dataiter = iter(test_dataloader)
x0, _, _ = next(dataiter)
for i in range(10):
_, x1, label2 = next(dataiter)
concatenated = torch.cat((x0, x1), 0)
output1, output2 = net(Variable(x0).cuda(), Variable(x1).cuda())
euclidean_distance = F.pairwise_distance(output1, output2)
imshow(torchvision.utils.make_grid(concatenated), 'Dissimilarity: {:.2f}'.format(euclidean_distance.item()))
"""
|
nilq/baby-python
|
python
|
from django.http import Http404
from django.test.testcases import TestCase
from corehq.apps.app_manager.models import (
AdvancedModule,
Application,
BuildProfile,
GlobalAppConfig,
LatestEnabledBuildProfiles,
Module,
)
from corehq.apps.app_manager.views.utils import get_default_followup_form_xml
from corehq.apps.domain.models import Domain
class TestGetDefaultFollowupForm(TestCase):
def test_default_followup_form(self):
app = Application.new_app('domain', "Untitled Application")
parent_module = app.add_module(AdvancedModule.new_module('parent', None))
parent_module.case_type = 'parent'
parent_module.unique_id = 'id_parent_module'
context = {
'lang': None,
'default_label': "Default label message"
}
attachment = get_default_followup_form_xml(context=context)
followup = app.new_form(0, "Followup Form", None, attachment=attachment)
self.assertEqual(followup.name['en'], "Followup Form")
self.assertEqual(app.modules[0].forms[0].name['en'], "Followup Form")
first_question = app.modules[0].forms[0].get_questions([], include_triggers=True, include_groups=True)[0]
self.assertEqual(first_question['label'], " Default label message ")
class TestGlobalAppConfig(TestCase):
domain = 'test-latest-app'
@classmethod
def setUpClass(cls):
super(TestGlobalAppConfig, cls).setUpClass()
cls.project = Domain(name=cls.domain)
cls.project.save()
cls.build_profile_id = 'english'
app = Application(
domain=cls.domain,
name='foo',
langs=["en"],
version=1,
modules=[Module()],
build_profiles={
cls.build_profile_id: BuildProfile(langs=['en'], name='English only'),
}
) # app is v1
app.save() # app is now v2
cls.v2_build = app.make_build()
cls.v2_build.is_released = True
cls.v2_build.save() # v2 is starred
app.save() # app is now v3
cls.v3_build = app.make_build()
cls.v3_build.is_released = True
cls.v3_build.save() # v3 is starred
app.save() # app is v4
# Add a build-profile-specific release at v2
cls.latest_profile = LatestEnabledBuildProfiles(
domain=cls.domain,
app_id=app.get_id,
build_profile_id=cls.build_profile_id,
version=cls.v2_build.version,
build_id=cls.v2_build.get_id,
active=True,
)
cls.latest_profile.save()
cls.app = app
@classmethod
def tearDownClass(cls):
cls.project.delete()
super(TestGlobalAppConfig, cls).tearDownClass()
def test_apk_prompt(self):
from corehq.apps.builds.utils import get_default_build_spec
latest_apk = get_default_build_spec().version
test_cases = [
('off', {}),
('on', {'value': latest_apk, 'force': False}),
('forced', {'value': latest_apk, 'force': True}),
]
for config, response in test_cases:
app_config = self.app.global_app_config
app_config.apk_prompt = config
app_config.save()
config = GlobalAppConfig.by_app_id(self.domain, self.app.master_id)
self.assertEqual(
config.get_latest_apk_version(),
response
)
def test_apk_prompt_preset(self):
preset_apk = '2.20.0/latest' # some random version
test_cases = [
('off', {}),
('on', {'value': '2.20.0', 'force': False}),
('forced', {'value': '2.20.0', 'force': True}),
]
app_config = self.app.global_app_config
app_config.apk_version = preset_apk
app_config.save()
for config, response in test_cases:
app_config = self.app.global_app_config
app_config.apk_prompt = config
app_config.save()
config = GlobalAppConfig.by_app_id(self.domain, self.app.master_id)
self.assertEqual(
config.get_latest_apk_version(),
response
)
def test_app_prompt(self):
app_config = self.app.global_app_config
app_config.save()
test_cases = [
('off', '', {}),
('on', '', {'value': self.v3_build.version, 'force': False}),
('forced', '', {'value': self.v3_build.version, 'force': True}),
('off', self.build_profile_id, {}),
('on', self.build_profile_id, {'value': self.v2_build.version, 'force': False}),
('forced', self.build_profile_id, {'value': self.v2_build.version, 'force': True}),
]
for config, build_profile_id, response in test_cases:
app_config = self.app.global_app_config
app_config.app_prompt = config
app_config.save()
config = GlobalAppConfig.by_app_id(self.domain, self.app.master_id)
self.assertEqual(
config.get_latest_app_version(build_profile_id),
response
)
def test_app_prompt_preset(self):
preset_app = 21 # some random version
test_cases = [
('off', {}),
('on', {'value': preset_app, 'force': False}),
('forced', {'value': preset_app, 'force': True}),
]
app_config = self.app.global_app_config
app_config.app_version = preset_app
app_config.save()
for config, response in test_cases:
app_config = self.app.global_app_config
app_config.app_prompt = config
app_config.save()
config = GlobalAppConfig.by_app_id(self.domain, self.app.master_id)
self.assertEqual(
config.get_latest_app_version(build_profile_id=''),
response
)
def test_load_from_build(self):
config = self._fresh_config(self.v3_build.id)
with self.assertRaises(AssertionError):
config.get_latest_app_version(build_profile_id='')
def test_missing_app(self):
config = self._fresh_config('missing_id')
with self.assertRaises(Http404):
config.get_latest_app_version(build_profile_id='')
def test_latest_profile_serialize(self):
self.assertEqual(
self.latest_profile.to_json({self.app.get_id: self.app.name}),
{
'id': self.latest_profile.id,
'app_id': self.app.get_id,
'active': True,
'version': self.v2_build.version,
'build_profile_id': self.build_profile_id,
'app_name': 'foo',
'profile_name': 'English only'
}
)
def _fresh_config(self, app_id):
config = GlobalAppConfig.by_app_id(self.domain, app_id)
config.app_prompt = 'on'
return config
|
nilq/baby-python
|
python
|
# Gradient Norm Scaling/Clipping
from keras import optimizers
# configure sgd with gradient norm scaling
# i.e. changing the derivatives of the loss function to have a given vector norm when
# the L2 vector norm (sum of the squared values) of the gradient vector exceeds
# a threshold value.
opt = optimizers.SGD(lr=0.01, momentum=0.9, clipnorm=1.0)
# configure sgd with gradient norm clipping
# clipping the derivatives of the loss function to have a given value if a gradient value is less
# than a negative threshold or more than the positive threshold.
opt = optimizers.SGD(lr=0.01, momentum=0.9, clipvalue=1.0)
#######################################################################
# regression predictive modeling problem
from sklearn.datasets import make_regression
from matplotlib import pyplot
# generate regression dataset
X, y = make_regression(n_samples=1000, n_features=20, noise=0.1, random_state=1)
# histogram of target variable
pyplot.subplot(131)
pyplot.hist(y)
# boxplot of target variable
pyplot.subplot(132)
pyplot.boxplot(y)
pyplot.show()
# scatter plot
pyplot.subplot(133)
pyplot.show(X,y)
####################################################################
# mlp with unscaled data for the regression problem
from sklearn.datasets import make_regression
from keras.layers import Dense
from keras.models import Sequential
from keras.optimizers import SGD
from matplotlib import pyplot
# generate regression dataset
X, y = make_regression(n_samples=1000, n_features=20, noise=0.1, random_state=1)
# split into train and test
n_train = 500
trainX, testX = X[:n_train, :], X[n_train:, :]
trainy, testy = y[:n_train], y[n_train:]
# define model
model = Sequential() # the model with a linear stack of layers
model.add(Dense(25, input_dim=20, activation='relu', kernel_initializer='he_uniform'))
model.add(Dense(1, activation='linear'))
# compile model
# model.compile(loss='mean_squared_error', optimizer=SGD(lr=0.01, momentum=0.9))
opt_scaling = SGD(lr=0.01, momentum=0.9, clipvalue=5.0)
model.compile(loss='mean_squared_error', optimizer=opt_scaling)
# fit model
history = model.fit(trainX, trainy, validation_data=(testX, testy), epochs=100, verbose=0)
# evaluate the model
train_mse = model.evaluate(trainX, trainy, verbose=0)
test_mse = model.evaluate(testX, testy, verbose=0)
print('Train: %.3f, Test: %.3f' % (train_mse, test_mse))
# plot loss during training
pyplot.title('Mean Squared Error')
pyplot.plot(history.history['loss'], label='train')
pyplot.plot(history.history['val_loss'], label='test')
pyplot.legend()
pyplot.show()
# The model above is NOT able to learn for the problem, resulting in nans.
# Solutions:
# 1. The traditional solution is to rescale the target variable using either standardization or normalization.
# 2. using Gradient Norm Scaling: replace the optimizer with:
opt_scaling = optimizers.SGD(lr=0.01, momentum=0.9, clipnorm=1.0)
# 3. using Gradient Norm Clipping: replace the optimizer with:
opt_clipping = SGD(lr=0.01, momentum=0.9, clipvalue=5.0)
|
nilq/baby-python
|
python
|
class DEQue:
__slots__ = '_length', '_data'
def __init__(self):
self._length = 0
self._data = []
def __len__(self):
return self._length
def is_empty(self):
return len(self) == 0
def first(self):
if self.is_empty():
print('DEQue is empty')
return None
return self._data[0]
def last(self):
if self.is_empty():
print('DEQue is empty')
return None
return self._data[-1]
def enqueue_first(self, val):
self._data.insert(0, val)
self._length += 1
def enqueue_last(self, val):
self._data.append(val)
self._length += 1
def dequeue_first(self):
if self.is_empty():
print('DEQue is empty')
return None
value = self._data[0]
self._data.remove(value)
return value
def dequeue_last(self):
if self.is_empty():
print('DEQue is empty')
return None
value = self._data.pop()
return value
deque = DEQue()
deque.enqueue_first(23)
deque.enqueue_last(24)
print(deque.first())
print(deque.last())
deque.dequeue_first()
deque.dequeue_last()
print(deque.is_empty())
|
nilq/baby-python
|
python
|
from tcprecord import TCPRecord, TCPRecordStream
from httprecord import HTTPRecordStream
from tcpsession import TCPSession, tcp_flags, SeqException
from httpsession import parse_http_streams, HTTPParsingError, HTTPResponse, HTTPRequest
from errors import *
import sys
import printing
from printing import print_tcp_session, print_results
# ========================= NEW CODE =============================== #
def make_tcp_sessions_ng(session):
connection = None # key == directed_key
reverse_connection = None
for ip,tcp in session:
directed_key = TCPSession.directed_key(ip.src,ip.dst,tcp.sport,tcp.dport)
not_repeat = None
while not not_repeat:
if not connection:
connection=TCPSession(directed_key)
reversed_key = TCPSession.directed_key(ip.dst,ip.src,tcp.dport,tcp.sport)
reverse_connection=TCPSession(reversed_key)
connection.pair = reverse_connection
reverse_connection.pair = connection
tcp.string_flags = tcp_flags(tcp.flags)
#tcp.partof=set()
if directed_key == connection.directed_key:
not_repeat=connection.packet(tcp)
elif directed_key == reverse_connection.directed_key:
not_repeat=reverse_connection.packet(tcp)
else:
assert False
if not not_repeat:
yield (connection,reverse_connection)
connection=None
reverse_connection=None
yield (connection,reverse_connection)
def handle_lite_tcp_session_ng(lite_tcp_session):
unpacked_content=list(lite_tcp_session.packets())
try:
for connection, reverse_connection in make_tcp_sessions_ng(unpacked_content):
try:
#these calls create side effects on packets
#TODO: refactor it
stream = connection.stream()
rstream = reverse_connection.stream()
tcp_record_stream = TCPRecordStream(connection.content, reverse_connection.content)
http_record_stream = HTTPRecordStream(tcp_record_stream)
print str(tcp_record_stream)
print str(http_record_stream)
except(StreamClassError) as err:
print >> sys.stderr, err
except(ConnectionClassError) as err:
print >> sys.stderr, err
except(FatalClassError) as err:
print >> sys.stderr, err
raise
|
nilq/baby-python
|
python
|
from .fid import FIDScore
|
nilq/baby-python
|
python
|
# Copyright 2016 Ifwe Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
MCollective-based deploy strategy class.
"""
import json
import re
import tds.utils
import tds.utils.processes as processes
from .base import DeployStrategy
import logging
log = logging.getLogger('tds')
class TDSMCODeployStrategy(DeployStrategy):
"""MCO deploy strategy class."""
def __init__(self, bin, **_kwargs):
"""Initialize object."""
self.mco_bin = bin
@tds.utils.debug
def _process_mco_command(self, mco_cmd, retry):
"""Run a given MCollective 'mco' command"""
log.debug('Running MCollective command')
log.debug(5, 'Command is: %s' % ' '.join(mco_cmd))
proc = processes.run(mco_cmd, expect_return_code=None)
stdout, stderr = proc.stdout, proc.stderr
if proc.returncode:
return (False, 'The mco process failed to run successfully.\n'
'return code is %r.\n'
'Stdout: %r\n'
'Stderr: %r' % (proc.returncode, stdout, stderr))
mc_output = None
summary = None
# Extract the JSON output and summary line
for line in stdout.split('\n'):
if not line:
continue
if line.startswith('{'):
mc_output = json.loads(line)
if line.startswith('Finished'):
summary = line.strip()
# Ensure valid response and extract information
if mc_output is None or summary is None:
return (False, 'No output or summary information returned '
'from mco process')
log.debug(summary)
match = re.search(r'processing (\d+) / (\d+) ', summary)
if match is None:
return (False, 'Error parsing summary line.')
# Virtual hosts in dev tend to time out unpredictably, probably
# because vmware is slow to respond when the hosts are not
# active. Subsequent retries after a timeout work better.
if match.group(2) == '0' and retry > 0:
log.debug('Discovery failure, trying again.')
return self._process_mco_command(mco_cmd, retry-1)
for _host, hostinfo in mc_output.iteritems():
if hostinfo['exitcode'] != 0:
return (False, hostinfo['stderr'].strip())
else:
return (True, 'Deploy successful')
return (False, 'Unknown/unparseable mcollective output: %s' %
stdout)
@tds.utils.debug
def restart_host(self, dep_host, app, retry=4):
"""Restart application on a given host"""
log.debug('Restarting application on host %r', dep_host)
mco_cmd = [self.mco_bin, 'tds', '--discovery-timeout', '4',
'--timeout', '60', '-W', 'hostname=%s' % dep_host,
app, 'restart']
return self._process_mco_command(mco_cmd, retry)
@tds.utils.debug
def deploy_to_host(self, dep_host, app, version, retry=4):
log.debug('Deploying to host %r', dep_host)
mco_cmd = [self.mco_bin, 'tds', '--discovery-timeout', '4',
'--timeout', '60', '-W', 'hostname=%s' % dep_host,
app, version]
return self._process_mco_command(mco_cmd, retry)
|
nilq/baby-python
|
python
|
"""
A file just to hold the version number, allows automated version increasing.
"""
SEMANTIC = '0.1.4-SNAPSHOT'
BUILD_TIME = 'UNKNOWN'
try:
with open('build-time.txt') as f:
CONTENTS = f.readline().rstrip()
if CONTENTS:
BUILD_TIME = CONTENTS
except IOError:
pass
|
nilq/baby-python
|
python
|
import unittest
from iterable_collections import collect
class TestMap(unittest.TestCase):
def test_list(self):
c = collect(list(range(10))).map(lambda x: x + 1)
self.assertEqual(c.list(), list(map(lambda x: x + 1, list(range(10)))))
def test_lists(self):
c = collect(list(range(10))).map(lambda x: x + 1)
self.assertEqual(c.list(), list(map(lambda x: x + 1, list(range(10)))))
def test_set(self):
c = collect(set(range(10))).map(lambda x: x + 1)
self.assertEqual(c.set(), set(map(lambda x: x + 1, list(range(10)))))
def test_tuple(self):
c = collect(tuple(range(10))).map(lambda x: x + 1)
self.assertEqual(c.tuple(), tuple(map(lambda x: x + 1, list(range(10)))))
def test_iterator(self):
c = collect(iter(range(10))).map(lambda x: x + 1)
self.assertEqual(c.list(), list(map(lambda x: x + 1, list(range(10)))))
def test_dict(self):
c = collect({'a': 1, 'b': 2}).map(lambda x: x + 'b')
self.assertEqual(c.list(), list(map(lambda x: x + 'b', {'a': 1, 'b': 2})))
def test_dict_items(self):
c = collect({'a': 1, 'b': 2}.items()).map(lambda x: x[1] + 1)
self.assertEqual(c.list(), list(map(lambda x: x[1] + 1, {'a': 1, 'b': 2}.items())))
|
nilq/baby-python
|
python
|
# Copyright 2016 Joel Dunham
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains some multithreading worker and queue logic plus the functionality -- related
to foma compilation ang LM estimation -- that the worther thread initiates.
The the foma worker compiles foma FST phonology, morphology and morphophonology scripts
and estimates morpheme language models. Having a worker perform these tasks in a separate
thread from that processing the HTTP request allows us to immediately respond to the user.
The foma worker can only run a callable that is a global in
:mod:`onlinelinguisticdatabase.lib.foma_worker` and which takes keyword arguments.
Example usage::
from onlinelinguisticdatabase.lib.foma_worker import foma_worker_q
foma_worker_q.put({
'id': h.generate_salt(),
'func': 'compile_foma_script',
'args': {'model_name': u'Phonology', 'model_id': phonology.id,
'script_dir_path': phonology_dir_path, 'user_id': session['user'].id,
'verification_string': u'defined phonology: ', 'timeout': h.phonology_compile_timeout}
})
Cf. http://www.chrismoos.com/2009/03/04/pylons-worker-threads.
For an introduction to Python threading, see
http://www.ibm.com/developerworks/aix/library/au-threadingpython/.
"""
import Queue
import threading
import logging
from uuid import uuid4
import onlinelinguisticdatabase.lib.helpers as h
from onlinelinguisticdatabase.model.meta import Session
import onlinelinguisticdatabase.model as model
log = logging.getLogger(__name__)
################################################################################
# WORKER THREAD & QUEUE
################################################################################
foma_worker_q = Queue.Queue(1)
class FomaWorkerThread(threading.Thread):
"""Define the foma worker.
"""
def run(self):
while True:
msg = foma_worker_q.get()
try:
globals()[msg.get('func')](**msg.get('args'))
except Exception, e:
log.warn('Unable to process in worker thread: %s' % e)
foma_worker_q.task_done()
def start_foma_worker():
"""Called in :mod:`onlinelinguisticdatabase.config.environment.py`.
"""
foma_worker = FomaWorkerThread()
foma_worker.setDaemon(True)
foma_worker.start()
foma_worker2 = FomaWorkerThread()
foma_worker2.setDaemon(True)
foma_worker2.start()
################################################################################
# PHONOLOGY
################################################################################
def compile_phonology(**kwargs):
"""Compile the foma script of a phonology and save it to the db with values that indicating compilation success.
"""
phonology = Session.query(model.Phonology).get(kwargs['phonology_id'])
phonology.compile(kwargs['timeout'])
phonology.datetime_modified = h.now()
phonology.modifier_id = kwargs['user_id']
Session.commit()
################################################################################
# MORPHOLOGY
################################################################################
def generate_and_compile_morphology(**kwargs):
"""Generate a foma script for a morphology and (optionally) compile it.
:param int kwargs['morphology_id']: id of a morphology.
:param bool kwargs['compile']: if True, the script will be generated *and* compiled.
:param int kwargs['user_id']: id of the user model performing the generation/compilation.
:param float kwargs['timeout']: how many seconds to wait before killing the foma compile process.
"""
morphology = Session.query(model.Morphology).get(kwargs['morphology_id'])
unknown_category = h.unknown_category
try:
morphology.write(unknown_category)
except Exception, e:
log.warn(e)
pass
if kwargs.get('compile', True):
try:
morphology.compile(kwargs['timeout'])
except Exception, e:
log.warn(e)
pass
morphology.generate_attempt = unicode(uuid4())
morphology.modifier_id = kwargs['user_id']
morphology.datetime_modified = h.now()
Session.commit()
################################################################################
# MORPHEME LANGUAGE MODEL
################################################################################
def generate_language_model(**kwargs):
"""Write the requisite files (corpus, vocab, ARPA, LMTrie) of a morpheme LM to disk.
:param str kwargs['morpheme_language_model_id']: ``id`` value of a morpheme LM.
:param int/float kwargs['timeout']: seconds to allow for ARPA file creation.
:param str kwargs['user_id']: ``id`` value of an OLD user.
:returns: ``None``; side-effect is to change relevant attributes of LM object.
"""
lm = Session.query(model.MorphemeLanguageModel).get(kwargs['morpheme_language_model_id'])
trie_path = lm.get_file_path('trie')
trie_mod_time = lm.get_modification_time(trie_path)
lm.generate_succeeded = False
try:
lm.write_corpus()
except Exception, e:
lm.generate_message = u'Error writing the corpus file. %s' % e
try:
lm.write_vocabulary()
except Exception, e:
lm.generate_message = u'Error writing the vocabulary file. %s' % e
try:
lm.write_arpa(kwargs['timeout'])
except Exception, e:
lm.generate_message = u'Error writing the ARPA file. %s' % e
try:
lm.generate_trie()
except Exception, e:
lm.generate_message = u'Error generating the LMTrie instance. %s' % e
else:
if lm.get_modification_time(trie_path) != trie_mod_time:
lm.generate_succeeded = True
lm.generate_message = u'Language model successfully generated.'
else:
lm.generate_message = u'Error generating the LMTrie instance.'
lm.generate_attempt = unicode(uuid4())
lm.modifier_id = kwargs['user_id']
lm.datetime_modified = h.now()
Session.commit()
def compute_perplexity(**kwargs):
"""Evaluate the LM by attempting to calculate its perplexity and changing some attribute values to reflect the attempt.
"""
lm = Session.query(model.MorphemeLanguageModel).get(kwargs['morpheme_language_model_id'])
timeout = kwargs['timeout']
iterations = 5
try:
lm.perplexity = lm.compute_perplexity(timeout, iterations)
except Exception:
lm.perplexity = None
if lm.perplexity is None:
lm.perplexity_computed = False
else:
lm.perplexity_computed = True
lm.perplexity_attempt = unicode(uuid4())
lm.modifier_id = kwargs['user_id']
lm.datetime_modified = h.now()
Session.commit()
################################################################################
# MORPHOLOGICAL PARSER (MORPHOPHONOLOGY)
################################################################################
def generate_and_compile_parser(**kwargs):
"""Write the parser's morphophonology FST script to file and compile it if ``compile_`` is True.
Generate the language model and pickle it.
"""
parser = Session.query(model.MorphologicalParser).get(kwargs['morphological_parser_id'])
parser.changed = False
parser.write()
if kwargs.get('compile', True):
parser.compile(kwargs['timeout'])
parser.modifier_id = kwargs['user_id']
parser.datetime_modified = h.now()
if parser.changed:
parser.cache.clear(persist=True)
Session.commit()
|
nilq/baby-python
|
python
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2018, Fraunhofer FKIE/CMS, Alexander Tiderko
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Fraunhofer nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import division, absolute_import, print_function, unicode_literals
import fkie_multimaster_msgs.grpc.launch_pb2 as lmsg
from .common import utf8
from .host import get_hostname
from .url import nmduri, nmdport
STRING = lmsg.Argument.ValueType.Value('STRING')
INT32 = lmsg.Argument.ValueType.Value('INT32')
DOUBLE = lmsg.Argument.ValueType.Value('DOUBLE')
BOOL = lmsg.Argument.ValueType.Value('BOOL')
LIST = lmsg.Argument.ValueType.Value('LIST')
class StartConfig():
def __init__(self, package, binary):
'''
:param str host: master uri from host where to run the node. Masteruri is used for cases where NMD uri needed.
'''
self.package = package
self.binary = binary
self.config_path = ''
self.binary_path = ''
self.name = ''
self.namespace = ''
self.fullname = ''
self.prefix = ''
self.cwd = ''
self.env = {}
self.remaps = {}
self.params = {}
self.clear_params = []
self.args = []
self.masteruri = None
self.host = None
self.loglevel = ''
self.logformat = ''
self.respawn = False
self.respawn_delay = 30
self.respawn_max = 0
self.respawn_min_runtime = 0
def __repr__(self):
params = "name=%s" % self.name
params += ", ns=%s" % self.namespace
params += ", package=%s" % self.package
params += ", binary=%s" % self.binary
params += ", prefix=%s" % self.prefix
params += ", cwd=%s" % self.cwd
params += ", masteruri=%s" % self.masteruri
params += ", host=%s" % self.host
params += ", loglevel=%s" % self.loglevel
params += ", respawn=%s" % self.respawn
return "<StartConfig %s/>" % params
@property
def hostname(self):
'''
:return: host name from host_masteruri if it is not None.
'''
if self.host:
return get_hostname(self.host)
return None
@property
def nmduri(self):
'''
:return: the nmd uri where to launch the node from host_masteruri if it is not None.
'''
if self.host:
try:
return nmduri(self.host, prefix='')
except ValueError:
return '%s:%d' % (self.host, nmdport(self.masteruri))
return None
def _msg_type(self, value):
valtype = type(value)
if valtype == int:
return INT32
if valtype == float:
return DOUBLE
if valtype == bool:
return BOOL
if valtype == list:
return LIST
return STRING
@classmethod
def _from_msg_type(cls, value, value_type):
if value_type == INT32:
return int(value)
if value_type == DOUBLE:
return float(value)
if value_type == BOOL:
return value.lower() in ("yes", "true", "t", "1")
if value_type == LIST:
try:
return eval(value)
except Exception:
return []
return value
def to_msg(self):
msg = lmsg.StartConfig(package=self.package, binary=self.binary)
self.fill_msg(msg)
return msg
def fill_msg(self, msg):
msg.package = self.package
msg.binary = self.binary
if self.binary_path:
msg.binary_path = self.binary_path
if self.name:
msg.name = self.name
if self.namespace:
msg.namespace = self.namespace
if self.fullname:
msg.fullname = self.fullname
if self.prefix:
msg.prefix = self.prefix
if self.cwd:
msg.cwd = self.cwd
if self.env:
msg.env.extend([lmsg.Argument(name=name, value=value) for name, value in self.env.items()])
if self.remaps:
msg.remaps.extend([lmsg.Remapping(from_name=name, to_name=value) for name, value in self.remaps.items()])
if self.params:
msg.params.extend([lmsg.Argument(name=name, value=utf8(value), value_type=self._msg_type(value)) for name, value in self.params.items()])
if self.clear_params:
msg.clear_params.extend(self.clear_params)
if self.args:
msg.args.extend(self.args)
if self.masteruri:
msg.masteruri = self.masteruri
if self.host:
msg.host = self.host
msg.loglevel = self.loglevel
msg.respawn = self.respawn
msg.respawn_delay = self.respawn_delay
msg.respawn_max = self.respawn_max
msg.respawn_min_runtime = self.respawn_min_runtime
@classmethod
def from_msg(cls, msg):
startcfg = StartConfig(msg.package, msg.binary)
startcfg.binary_path = msg.binary_path
startcfg.name = msg.name
startcfg.namespace = msg.namespace
startcfg.fullname = msg.fullname
startcfg.prefix = msg.prefix
startcfg.cwd = msg.cwd
startcfg.env = {env.name: env.value for env in msg.env}
startcfg.remaps = {remap.from_name: remap.to_name for remap in msg.remaps}
startcfg.params = {param.name: cls._from_msg_type(param.value, param.value_type) for param in msg.params}
startcfg.clear_params = list(msg.clear_params)
startcfg.args = list(msg.args)
startcfg.masteruri = msg.masteruri
startcfg.host = msg.host
startcfg.loglevel = msg.loglevel
startcfg.respawn = msg.respawn
startcfg.respawn_delay = msg.respawn_delay
startcfg.respawn_max = msg.respawn_max
startcfg.respawn_min_runtime = msg.respawn_min_runtime
return startcfg
|
nilq/baby-python
|
python
|
# Generated by Django 3.0.2 on 2021-05-11 11:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('banks', '0002_bankcode_otp_enabled'),
('loans', '0021_loanrequests'),
]
operations = [
migrations.CreateModel(
name='DRFDisbursement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('has_data', models.BooleanField(default=False)),
('status', models.BooleanField(default=False)),
('response_id', models.CharField(blank=True, max_length=299, null=True)),
('request_date', models.CharField(blank=True, max_length=299, null=True)),
('response_date', models.CharField(blank=True, max_length=299, null=True)),
('response_code', models.CharField(blank=True, max_length=299, null=True)),
('customer_id', models.CharField(blank=True, max_length=200, null=True)),
('authorisation_code', models.CharField(blank=True, max_length=200, null=True)),
('account_number', models.CharField(blank=True, max_length=200, null=True)),
('amount', models.CharField(blank=True, max_length=200, null=True)),
('mandate_reference', models.CharField(blank=True, max_length=200, null=True)),
('bank', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='banks.BankCode')),
('loan', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='loans.Loan')),
],
options={
'verbose_name': 'DRF Disbursement',
'verbose_name_plural': 'DRF Disbursements',
'db_table': 'DRF Disbursement',
},
),
]
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
from argparse import ArgumentParser, FileType
from collections import OrderedDict
from datetime import datetime
import logging
from json import dumps
from sys import stdout
from time import sleep
from coloredlogs import install as coloredlogs_install
from ssnapshot.ssnapshot import (
create_account_cpu_usage_summary,
create_account_cputime_remaining_summary,
create_fairshare_summaries,
create_node_summaries,
create_partition_cpu_count_summary,
create_partition_cpu_load_summary,
create_partition_memory_summary,
create_partition_node_state_summary,
create_reservation_summaries,
create_top_users_summaries,
sinfo_ttl_cache,
squeue_ttl_cache,
sstat_ttl_cache,
)
def create_arg_parser() -> ArgumentParser:
new_parser = ArgumentParser(
description='ssnapshot returns a brief summary of the status of slurm',
)
new_parser.add_argument(
'--verbose', '-v',
default=0,
action='count',
help='0×v = ERRORs, 1×v = WARNINGs, 2×v = INFOs and 3×v = DEBUGs',
)
new_parser.add_argument(
'--daemonize', '-d',
default=False,
action='store_true',
help='run in daemon mode',
)
new_parser.add_argument(
'--sleep', '-s',
default=300,
type=int,
help='Number of seconds to sleep between runs in daemon mode',
)
new_parser.add_argument(
'--outfile', '-o',
default=stdout,
type=FileType('w'),
help='Where to write output. Default is stdout',
)
new_parser.add_argument(
'--accounts', '-a',
dest='tables',
action='append_const',
const='accounts',
help='Show account summary information. (Default: False)',
)
new_parser.add_argument(
'--fairshare', '-f',
dest='tables',
action='append_const',
const='fairshare',
help='Show fairshare summary information. (Default: False)',
)
new_parser.add_argument(
'--nodes', '-n',
dest='tables',
action='append_const',
const='nodes',
help='Show node summary information. (Default: False)',
)
new_parser.add_argument(
'--partitions', '-p',
dest='tables',
action='append_const',
const='partitions',
help='Show partition summary information. (Default: False)',
)
new_parser.add_argument(
'--reservations', '-r',
dest='tables',
action='append_const',
const='reservations',
help='Display Reservation information. (Default: False)',
)
new_parser.add_argument(
'--top-users', '-t',
dest='tables',
action='append_const',
const='topusers',
help='Display Top Users. (Default: False)',
)
output_group = new_parser.add_mutually_exclusive_group()
output_group.add_argument(
'--json',
dest='output',
action='store_const',
const='json',
help='Output is JSON',
)
output_group.add_argument(
'--html',
dest='output',
action='store_const',
const='html',
help='Output is HTML',
)
output_group.add_argument(
'--markdown',
dest='output',
action='store_const',
const='markdown',
help='Output is markdown',
)
output_group.add_argument(
'--prometheus',
dest='output',
action='store_const',
const='prometheus',
help='Output is for prometheus exporter',
)
new_parser.set_defaults(
output='markdown',
tables=[],
human_readable=True,
)
return new_parser
def generate_markdown(output: dict) -> str:
lines = []
header = output.get('header')
if header:
title = f'{header.get("value")}'
time = header.get('time')
if time:
time = f' ({time})'
lines.append(f'# {title}{time}')
for name, value in output.items():
output_type = value.get('type')
if output_type == 'dataframe':
table_md = value.get('dataframe').reset_index().to_markdown(index=False, floatfmt="0.4f")
lines.append(f'## {name}\n{table_md}\n\n')
return '\n'.join(lines)
def generate_html(output: dict) -> str:
lines = []
header = output.get('header')
if header:
title = f'{header.get("value")}'
time = header.get('time')
if time:
time = f' ({time})'
lines.append(f'<h1>{title}{time}</h1>')
for name, value in output.items():
output_type = value.get('type')
if output_type == 'dataframe':
table_html = value.get('dataframe').reset_index().to_html(index=False)
lines.append(f'<h2>{name}</h2>\n{table_html}\n')
return '\n'.join(lines)
def generate_json(output: dict) -> str:
for key, value in output.items():
value_type = value.get('type')
if key == 'header':
timestamp = value.get('time')
if timestamp:
output['header']['time'] = str(timestamp)
if value_type == 'dataframe':
value['dataframe'] = value.get('dataframe').reset_index().to_dict()
return dumps(output, indent=2)
def generate_prometheus(output: dict) -> str:
lines = []
for key, value in output.items():
output_type = value.get('type')
if output_type == 'dataframe':
table_name = key.lower().replace(' ', '_')
dataframe = value.get('dataframe')
index_names = [name.lower().replace(' ', '_') for name in dataframe.index.names]
for row_index, row in dataframe.iterrows():
if type(row_index) != tuple:
row_index = (row_index, )
logging.debug(row_index)
label_string = ", ".join([
f'{index_name}="{row_index[counter]}"' for counter, index_name in enumerate(index_names)
])
logging.debug(label_string)
for column_number, column in enumerate(dataframe.columns):
column_name = column.lower().replace(' ', '_').replace('/', 'per')
lines.append(
f'ssnapshot_{table_name}{{{label_string}, label="{column_name}"}} '
f'{row[column_number]:.6f}')
return '\n'.join(lines) + '\n'
def main():
arg_parser = create_arg_parser()
args = arg_parser.parse_args()
if args.verbose == 0:
coloredlogs_install(level='ERROR')
if args.verbose == 1:
coloredlogs_install(level='WARNING')
if args.verbose == 2:
coloredlogs_install(level='INFO')
if args.verbose >= 3:
coloredlogs_install(level='DEBUG')
output_method = {
'html': generate_html,
'json': generate_json,
'markdown': generate_markdown,
'prometheus': generate_prometheus,
}.get(args.output)
if args.output == 'prometheus':
args.human_readable = False
summary_functions = {
'accounts': [create_account_cpu_usage_summary, create_account_cputime_remaining_summary],
'fairshare': [create_fairshare_summaries],
'nodes': [create_node_summaries],
'partitions': [
create_partition_memory_summary,
create_partition_cpu_count_summary,
create_partition_cpu_load_summary,
create_partition_node_state_summary,
],
'reservations': [create_reservation_summaries],
'topusers': [create_top_users_summaries],
}
while True:
for cache in sinfo_ttl_cache, squeue_ttl_cache, sstat_ttl_cache:
cache.clear()
output = OrderedDict([('header', {'value': 'Slurm Snapshot', 'time': datetime.now()})])
summaries = []
for table in args.tables:
for summary_function in summary_functions.get(table, []):
summaries.append(summary_function())
for summary in summaries:
for table_name, data in summary.items():
output[table_name] = {'type': 'dataframe', 'dataframe': data}
output_string = ''
if output_method:
output_string = output_method(output)
if output_string:
try:
args.outfile.truncate(0)
args.outfile.seek(0, 0)
except OSError: # expected for stdout
pass
args.outfile.write(output_string)
args.outfile.flush()
if args.daemonize:
sleep(args.sleep)
else:
break
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
import itk
import numpy as np
from segmantic.prepro import core
from segmantic.prepro.core import make_image
def test_extract_slices(labelfield: core.Image3) -> None:
slices_xy = core.extract_slices(labelfield, axis=2)
assert slices_xy[0].GetSpacing()[0] == labelfield.GetSpacing()[0]
assert slices_xy[0].GetSpacing()[1] == labelfield.GetSpacing()[1]
for k, slice in enumerate(slices_xy):
print(type(slice))
slice_view = itk.array_view_from_image(slice)
assert np.all(slice_view == k)
def test_pad_crop_center(labelfield: core.Image3) -> None:
padded = core.pad(labelfield, target_size=(9, 9, 9))
cropped = core.crop_center(padded, target_size=(5, 5, 5))
assert labelfield.GetSpacing() == cropped.GetSpacing()
assert labelfield.GetOrigin() == cropped.GetOrigin()
assert np.all(core.as_array(cropped) == core.as_array(labelfield))
slice = core.crop_center(labelfield, target_size=(5, 5, 1))
size = itk.size(slice)
assert size[2] == 1
def test_resample() -> None:
image = make_image(shape=(3, 3), spacing=(2.0, 2.0), value=1.0, pixel_type=itk.F)
image[1, 1] = 0.0
# double the resolution from (2.0, 2.0) to (1.0, 1.0)
res = core.resample(image, target_spacing=(1.0, 1.0))
assert list(res.shape) == [2 * s for s in image.shape]
|
nilq/baby-python
|
python
|
"""
Fetch dependencies and build a Windows wheel
============================================
This script depends on pycairo being installed to provide cairo.dll; cairo.dll
must have been built with FreeType support.
The cairo headers (and their dependencies) are fetched from the Arch Linux
repositories (the official cairo release tarball contains unbuilt headers (e.g.
missing cairo-features.h) and is huge due to the presence of test baseline
images). The FreeType headers and binary are fetched from the "official"
build__ listed on FreeType's website.
__ https://github.com/ubawurinna/freetype-windows-binaries
"""
from ctypes import (
c_bool, c_char_p, c_ulong, c_void_p, c_wchar_p, POINTER,
byref, create_unicode_buffer, sizeof, windll)
import os
from pathlib import Path
import shutil
import subprocess
import sys
import urllib.request
import cairo # Needed to load the cairo dll.
import setuptools
def enum_process_modules(func_name=None):
k32 = windll.kernel32
psapi = windll.psapi
k32.GetCurrentProcess.restype = c_void_p
k32.GetModuleFileNameW.argtypes = [c_void_p, c_wchar_p, c_ulong]
k32.GetModuleFileNameW.restype = c_ulong
k32.GetProcAddress.argtypes = [c_void_p, c_char_p]
k32.GetProcAddress.restypes = c_void_p
psapi.EnumProcessModules.argtypes = [
c_void_p, POINTER(c_void_p), c_ulong, POINTER(c_ulong)]
psapi.EnumProcessModules.restype = c_bool
process = k32.GetCurrentProcess()
needed = c_ulong()
psapi.EnumProcessModules(process, None, 0, byref(needed))
modules = (c_void_p * (needed.value // sizeof(c_void_p)))()
if not psapi.EnumProcessModules(
process, modules, sizeof(modules), byref(needed)):
raise OSError("Failed to enumerate process modules")
path = create_unicode_buffer(1024)
for module in modules:
if func_name is None or k32.GetProcAddress(module, func_name):
k32.GetModuleFileNameW(module, path, len(path))
yield path.value
# Prepare the directories.
os.chdir(Path(__file__).resolve().parents[1])
Path("build").mkdir(exist_ok=True)
# Download the cairo headers from Arch Linux (<1Mb, vs >40Mb for the official
# tarball, which contains baseline images) from before Arch switched to zstd,
# and the "official" FreeType build.
os.chdir("build")
urls = {
Path("cairo.txz"):
"https://archive.org/download/archlinux_pkg_cairo/"
"cairo-1.17.2%2B17%2Bg52a7c79fd-2-x86_64.pkg.tar.xz",
Path("fontconfig.txz"):
"https://archive.org/download/archlinux_pkg_fontconfig/"
"fontconfig-2%3A2.13.91%2B24%2Bg75eadca-1-x86_64.pkg.tar.xz",
Path("freetype.zip"):
"https://github.com/ubawurinna/freetype-windows-binaries/"
"releases/download/v2.9.1/freetype-2.9.1.zip",
}
for archive_path, url in urls.items():
if not archive_path.exists():
with urllib.request.urlopen(url) as request:
archive_path.write_bytes(request.read())
dest = archive_path.stem
shutil.rmtree(dest, ignore_errors=True)
shutil.unpack_archive(archive_path, dest)
# Get cairo.dll (normally loaded by pycairo), checking that it include
# FreeType support.
Path("cairo/win64").mkdir(parents=True)
cairo_dll, = enum_process_modules(b"cairo_ft_font_face_create_for_ft_face")
shutil.copyfile(cairo_dll, "cairo/win64/cairo.dll")
# Get hold of a CCompiler object, by creating a dummy Distribution with a list
# of extension modules that claims to be truthy (but is actually empty) and
# running its build_ext command. Prior to the deprecation of distutils, this
# was just ``cc = distutils.ccompiler.new_compiler(); cc.initialize()``.
class L(list): __bool__ = lambda self: True
be = setuptools.Distribution({"ext_modules": L()}).get_command_obj("build_ext")
be.finalize_options()
be.run()
cc = be.compiler
cc.initialize()
# Build the import library.
cc.spawn(
["dumpbin", "/EXPORTS", "/OUT:cairo/win64/cairo.exports",
"cairo/win64/cairo.dll"])
with open("cairo/win64/cairo.exports") as raw_exports, \
open("cairo/win64/cairo.def", "x") as def_file:
def_file.write("EXPORTS\n")
for line in raw_exports:
try:
ordinal, hint, rva, name = line.split()
int(ordinal)
int(hint, 16)
int(rva, 16)
except ValueError:
continue
def_file.write(name + "\n")
cc.spawn(
["lib", f"/DEF:{def_file.name}", "/MACHINE:x64",
"/OUT:cairo/win64/cairo.lib"])
# Build the wheel.
os.chdir("..")
subprocess.run(
[sys.executable, "-mpip", "install", "--upgrade", "pip", "wheel"],
check=True)
os.environ.update(
CL=(f"{os.environ.get('CL', '')} "
f"/I{Path()}/build/cairo/usr/include/cairo "
f"/I{Path()}/build/fontconfig/usr/include "
f"/I{Path()}/build/freetype/include "),
LINK=(f"{os.environ.get('LINK', '')} "
f"/LIBPATH:{Path()}/build/cairo/win64 "
f"/LIBPATH:{Path()}/build/freetype/win64 "),
)
subprocess.run(
[sys.executable, "setup.py", "bdist_wheel"],
check=True)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# encoding: utf-8
"""
This module contains unit tests for the arc.main module
"""
import os
import shutil
import unittest
from arc.common import ARC_PATH
from arc.exceptions import InputError
from arc.imports import settings
from arc.main import ARC, StatmechEnum, process_adaptive_levels
from arc.species.species import ARCSpecies
servers = settings['servers']
class TestEnumerationClasses(unittest.TestCase):
"""
Contains unit tests for various enumeration classes.
"""
def test_statmech_enum(self):
"""Test the StatmechEnum class"""
self.assertEqual(StatmechEnum('arkane').value, 'arkane')
with self.assertRaises(ValueError):
StatmechEnum('wrong')
class TestARC(unittest.TestCase):
"""
Contains unit tests for the ARC class
"""
@classmethod
def setUpClass(cls):
"""
A method that is run before all unit tests in this class.
"""
cls.maxDiff = None
cls.servers = servers.keys()
cls.job_types1 = {'conformers': True,
'opt': True,
'fine_grid': False,
'freq': True,
'sp': True,
'rotors': False,
'orbitals': False,
'lennard_jones': False,
'bde': True,
}
def test_as_dict(self):
"""Test the as_dict() method of ARC"""
spc1 = ARCSpecies(label='spc1',
smiles='CC',
compute_thermo=False,
)
arc0 = ARC(project='arc_test',
job_types=self.job_types1,
species=[spc1],
level_of_theory='ccsd(t)-f12/cc-pvdz-f12//b3lyp/6-311+g(3df,2p)',
three_params=False,
)
arc0.freq_level.args['keyword']['general'] = 'scf=(NDump=30)'
restart_dict = arc0.as_dict()
long_thermo_description = restart_dict['species'][0]['long_thermo_description']
self.assertIn('Bond corrections:', long_thermo_description)
self.assertIn("'C-C': 1", long_thermo_description)
self.assertIn("'C-H': 6", long_thermo_description)
# mol.atoms are not tested since all id's (including connectivity) changes depending on how the test is run.
expected_dict = {'T_count': 50,
'T_max': None,
'T_min': None,
'allow_nonisomorphic_2d': False,
'arkane_level_of_theory': {'basis': 'cc-pvdz-f12',
'method': 'ccsd(t)-f12',
'method_type': 'wavefunction',
'software': 'molpro'},
'calc_freq_factor': True,
'compute_transport': False,
'conformer_level': {'basis': 'def2svp',
'compatible_ess': ['gaussian', 'terachem'],
'method': 'wb97xd',
'method_type': 'dft',
'software': 'gaussian'},
'e_confs': 5.0,
'ess_settings': {'gaussian': ['local', 'server2'],
'molpro': ['local', 'server2'],
'onedmin': ['server1'],
'orca': ['local'],
'qchem': ['server1'],
'terachem': ['server1']},
'freq_level': {'basis': '6-311+g(3df,2p)',
'method': 'b3lyp',
'method_type': 'dft',
'software': 'gaussian'},
'freq_scale_factor': 0.967,
'irc_level': {'basis': 'def2tzvp',
'compatible_ess': ['gaussian', 'terachem'],
'method': 'wb97xd',
'method_type': 'dft',
'software': 'gaussian'},
'job_memory': 14,
'job_types': {'bde': True,
'conformers': True,
'fine': False,
'freq': True,
'irc': True,
'onedmin': False,
'opt': True,
'orbitals': False,
'rotors': False,
'sp': True},
'kinetics_adapter': 'arkane',
'max_job_time': 120,
'n_confs': 10,
'opt_level': {'basis': '6-311+g(3df,2p)',
'method': 'b3lyp',
'method_type': 'dft',
'software': 'gaussian'},
'output': {},
'project': 'arc_test',
'reactions': [],
'running_jobs': {},
'sp_level': {'basis': 'cc-pvdz-f12',
'method': 'ccsd(t)-f12',
'method_type': 'wavefunction',
'software': 'molpro'},
'species': [{'arkane_file': None,
'bond_corrections': {'C-C': 1, 'C-H': 6},
'charge': 0,
'compute_thermo': False,
'consider_all_diastereomers': True,
'force_field': 'MMFF94s',
'is_ts': False,
'label': 'spc1',
'long_thermo_description': long_thermo_description,
'mol': {'atoms': restart_dict['species'][0]['mol']['atoms'],
'multiplicity': 1,
'props': {}},
'multiplicity': 1,
'number_of_rotors': 0}],
'thermo_adapter': 'arkane',
'three_params': False}
# import pprint # left intentionally for debugging
# print(pprint.pprint(restart_dict))
self.assertEqual(restart_dict, expected_dict)
def test_from_dict(self):
"""Test the from_dict() method of ARC"""
restart_dict = {'composite_method': '',
'conformer_level': 'b97-d3/6-311+g(d,p)',
'freq_level': 'wb97x-d3/6-311+g(d,p)',
'freq_scale_factor': 0.96,
'opt_level': 'wb97x-d3/6-311+g(d,p)',
'output': {},
'project': 'testing_from_dict',
'reactions': [],
'scan_level': '',
'sp_level': 'ccsd(t)-f12/cc-pvqz-f12',
'species': [{'bond_corrections': {'C-C': 1, 'C-H': 6},
'charge': 1,
'conformer_energies': [],
'conformers': [],
'external_symmetry': 1,
'compute_thermo': False,
'is_ts': False,
'label': 'testing_spc1',
'mol': '1 C u0 p0 c0 {2,S} {3,S} {4,S} {5,S}\n2 C u0 p0 c0 {1,S} {6,S} {7,S} {8,S}'
'\n3 H u0 p0 c0 {1,S}\n4 H u0 p0 c0 {1,S}\n5 H u0 p0 c0 {1,S}\n6 H u0 p0 '
'c0 {2,S}\n7 H u0 p0 c0 {2,S}\n8 H u0 p0 c0 {2,S}\n',
'multiplicity': 1,
'neg_freqs_trshed': [],
'number_of_rotors': 0,
'opt_level': '',
'optical_isomers': 1,
'rotors_dict': {},
'xyzs': []}],
'three_params': False,
'project_directory': os.path.join(ARC_PATH, 'Projects',
'arc_project_for_testing_delete_after_usage_test_from_dict'),
}
arc1 = ARC(project='wrong', freq_scale_factor=0.95)
self.assertEqual(arc1.freq_scale_factor, 0.95) # user input
arc2 = ARC(**restart_dict)
self.assertEqual(arc2.freq_scale_factor, 0.96) # loaded from the restart dict
self.assertEqual(arc2.project, 'testing_from_dict')
self.assertIn('arc_project_for_testing_delete_after_usage', arc2.project_directory)
self.assertTrue(arc2.job_types['fine'])
self.assertTrue(arc2.job_types['rotors'])
self.assertEqual(arc2.sp_level.simple(), 'ccsd(t)-f12/cc-pvqz-f12')
self.assertEqual(arc2.level_of_theory, '')
self.assertEqual(arc2.species[0].label, 'testing_spc1')
self.assertFalse(arc2.species[0].is_ts)
self.assertEqual(arc2.species[0].charge, 1)
self.assertFalse(arc2.three_params)
def test_from_dict_specific_job(self):
"""Test the from_dict() method of ARC"""
restart_dict = {'specific_job_type': 'bde',
'project': 'unit_test_specific_job',
'project_directory': os.path.join(ARC_PATH, 'Projects', 'unit_test_specific_job'),
}
arc1 = ARC(**restart_dict)
job_type_expected = {'conformers': False, 'opt': True, 'freq': True, 'sp': True, 'rotors': False,
'orbitals': False, 'bde': True, 'onedmin': False, 'fine': True, 'irc': False}
self.assertEqual(arc1.job_types, job_type_expected)
def test_check_project_name(self):
"""Test project name invalidity"""
with self.assertRaises(InputError):
ARC(project='ar c')
with self.assertRaises(InputError):
ARC(project='ar:c')
with self.assertRaises(InputError):
ARC(project='ar<c')
with self.assertRaises(InputError):
ARC(project='ar%c')
def test_determine_model_chemistry_and_freq_scale_factor(self):
"""Test determining the model chemistry and the frequency scaling factor"""
arc0 = ARC(project='arc_model_chemistry_test', level_of_theory='CBS-QB3')
self.assertEqual(str(arc0.arkane_level_of_theory), "cbs-qb3, software: gaussian (composite)")
self.assertEqual(arc0.freq_scale_factor, 1.00386) # 0.99 * 1.014 = 1.00386
arc1 = ARC(project='arc_model_chemistry_test',
level_of_theory='cbs-qb3-paraskevas')
self.assertEqual(str(arc1.arkane_level_of_theory), 'cbs-qb3-paraskevas, software: gaussian (composite)')
self.assertEqual(arc1.freq_scale_factor, 1.00386) # 0.99 * 1.014 = 1.00386
self.assertEqual(arc1.bac_type, 'p')
arc2 = ARC(project='arc_model_chemistry_test',
level_of_theory='ccsd(t)-f12/cc-pvtz-f12//m06-2x/cc-pvtz')
self.assertEqual(str(arc2.arkane_level_of_theory), 'ccsd(t)-f12/cc-pvtz-f12, software: molpro (wavefunction)')
self.assertEqual(arc2.freq_scale_factor, 0.955)
arc3 = ARC(project='arc_model_chemistry_test',
sp_level='ccsd(t)-f12/cc-pvtz-f12', opt_level='wb97xd/def2tzvp')
self.assertEqual(str(arc3.arkane_level_of_theory), 'ccsd(t)-f12/cc-pvtz-f12, software: molpro (wavefunction)')
self.assertEqual(arc3.freq_scale_factor, 0.988)
def test_determine_model_chemistry_for_job_types(self):
"""Test determining the model chemistry specification dictionary for job types"""
# Test conflicted inputs: specify both level_of_theory and composite_method
with self.assertRaises(InputError):
ARC(project='test', level_of_theory='ccsd(t)-f12/cc-pvtz-f12//wb97x-d/aug-cc-pvtz',
composite_method='cbs-qb3')
# Test illegal level of theory specification (method contains multiple slashes)
with self.assertRaises(ValueError):
ARC(project='test', level_of_theory='dlpno-mp2-f12/D/cc-pVDZ(fi/sf/fw)//b3lyp/G/def2svp')
# Test illegal job level specification (method contains multiple slashes)
with self.assertRaises(ValueError):
ARC(project='test', opt_level='b3lyp/d/def2tzvp/def2tzvp/c')
# Test illegal job level specification (method contains empty space)
with self.assertRaises(ValueError):
ARC(project='test', opt_level='b3lyp/def2tzvp def2tzvp/c')
# Test direct job level specification conflicts with level of theory specification
with self.assertRaises(InputError):
ARC(project='test', level_of_theory='b3lyp/sto-3g', opt_level='wb97xd/def2tzvp')
# Test deduce levels from default method from settings.py
arc1 = ARC(project='test')
self.assertEqual(arc1.opt_level.simple(), 'wb97xd/def2tzvp')
self.assertEqual(arc1.freq_level.simple(), 'wb97xd/def2tzvp')
self.assertEqual(arc1.sp_level.simple(), 'ccsd(t)-f12/cc-pvtz-f12')
# Test deduce levels from composite method specification
arc2 = ARC(project='test', composite_method='cbs-qb3')
self.assertIsNone(arc2.opt_level)
self.assertIsNone(arc2.sp_level)
self.assertIsNone(arc2.orbitals_level)
self.assertEqual(arc2.freq_level.simple(), 'b3lyp/cbsb7')
self.assertEqual(arc2.scan_level.simple(), 'b3lyp/cbsb7')
self.assertEqual(arc2.composite_method.simple(), 'cbs-qb3')
# Test deduce levels from level of theory specification
arc3 = ARC(project='test', level_of_theory='ccsd(t)-f12/cc-pvtz-f12//wb97m-v/def2tzvpd')
self.assertEqual(arc3.opt_level.simple(), 'wb97m-v/def2tzvpd')
self.assertEqual(arc3.freq_level.simple(), 'wb97m-v/def2tzvpd')
self.assertEqual(arc3.sp_level.simple(), 'ccsd(t)-f12/cc-pvtz-f12')
self.assertEqual(arc3.scan_level.simple(), 'wb97m-v/def2tzvpd')
self.assertIsNone(arc3.orbitals_level)
arc4 = ARC(project='test', opt_level='wb97x-d3/6-311++G(3df,3pd)', freq_level='m062x/def2-tzvpp',
sp_level='ccsd(t)f12/aug-cc-pvqz', calc_freq_factor=False)
self.assertEqual(arc4.opt_level.simple(), 'wb97x-d3/6-311++g(3df,3pd)')
self.assertEqual(arc4.freq_level.simple(), 'm062x/def2-tzvpp')
self.assertEqual(arc4.sp_level.simple(), 'ccsd(t)f12/aug-cc-pvqz')
# Test deduce freq level from opt level
arc7 = ARC(project='test', opt_level='wb97xd/aug-cc-pvtz', calc_freq_factor=False)
self.assertEqual(arc7.opt_level.simple(), 'wb97xd/aug-cc-pvtz')
self.assertEqual(arc7.freq_level.simple(), 'wb97xd/aug-cc-pvtz')
# Test a level not supported by Arkane does not raise error if compute_thermo is False
arc8 = ARC(project='test', sp_level='method/unsupported', calc_freq_factor=False, compute_thermo=False)
self.assertEqual(arc8.sp_level.simple(), 'method/unsupported')
self.assertEqual(arc8.freq_level.simple(), 'wb97xd/def2tzvp')
# Test that a level not supported by Arkane does raise an error if compute_thermo is True (default)
with self.assertRaises(ValueError):
ARC(project='test', sp_level='method/unsupported', calc_freq_factor=False)
# Test dictionary format specification with auxiliary basis and DFT dispersion
arc9 = ARC(project='test', opt_level={},
freq_level={'method': 'B3LYP/G', 'basis': 'cc-pVDZ(fi/sf/fw)', 'auxiliary_basis': 'def2-svp/C',
'dispersion': 'DEF2-tzvp/c'},
sp_level={'method': 'DLPNO-CCSD(T)-F12', 'basis': 'cc-pVTZ-F12',
'auxiliary_basis': 'aug-cc-pVTZ/C cc-pVTZ-F12-CABS'},
calc_freq_factor=False, compute_thermo=False)
self.assertEqual(arc9.opt_level.simple(), 'wb97xd/def2tzvp')
self.assertEqual(str(arc9.freq_level), 'b3lyp/g/cc-pvdz(fi/sf/fw), auxiliary_basis: def2-svp/c, '
'dispersion: def2-tzvp/c, software: gaussian (dft)')
self.assertEqual(str(arc9.sp_level),
'dlpno-ccsd(t)-f12/cc-pvtz-f12, auxiliary_basis: aug-cc-pvtz/c cc-pvtz-f12-cabs, '
'software: orca (wavefunction)')
# Test using default frequency and orbital level for composite job, also forbid rotors job
arc10 = ARC(project='test', composite_method='cbs-qb3', calc_freq_factor=False,
job_types={'rotors': False, 'orbitals': True})
self.assertEqual(arc10.freq_level.simple(), 'b3lyp/cbsb7')
self.assertIsNone(arc10.scan_level)
self.assertEqual(arc10.orbitals_level.simple(), 'b3lyp/cbsb7')
# Test using specified frequency, scan, and orbital for composite job
arc11 = ARC(project='test', composite_method='cbs-qb3', freq_level='wb97xd/6-311g', scan_level='apfd/def2svp',
orbitals_level='hf/sto-3g', job_types={'orbitals': True}, calc_freq_factor=False)
self.assertEqual(arc11.scan_level.simple(), 'apfd/def2svp')
self.assertEqual(arc11.freq_level.simple(), 'wb97xd/6-311g')
self.assertEqual(arc11.orbitals_level.simple(), 'hf/sto-3g')
# Test using default frequency and orbital level for job specified from level of theory, also forbid rotors job
arc12 = ARC(project='test', level_of_theory='b3lyp/sto-3g', calc_freq_factor=False,
job_types={'rotors': False, 'orbitals': True}, compute_thermo=False)
self.assertIsNone(arc12.scan_level)
self.assertEqual(arc12.orbitals_level.simple(), 'wb97x-d3/def2tzvp')
# Test using specified scan level
arc13 = ARC(project='test', level_of_theory='b3lyp/sto-3g', calc_freq_factor=False, scan_level='apfd/def2svp',
job_types={'rotors': True}, compute_thermo=False)
self.assertEqual(arc13.scan_level.simple(), 'apfd/def2svp')
# Test specifying semi-empirical and force-field methods using dictionary
arc14 = ARC(project='test', opt_level={'method': 'AM1'}, freq_level={'method': 'PM6'},
sp_level={'method': 'AMBER'}, calc_freq_factor=False, compute_thermo=False)
self.assertEqual(arc14.opt_level.simple(), 'am1')
self.assertEqual(arc14.freq_level.simple(), 'pm6')
self.assertEqual(arc14.sp_level.simple(), 'amber')
def test_determine_unique_species_labels(self):
"""Test the determine_unique_species_labels method"""
spc0 = ARCSpecies(label='spc0', smiles='CC', compute_thermo=False)
spc1 = ARCSpecies(label='spc1', smiles='CC', compute_thermo=False)
spc2 = ARCSpecies(label='spc2', smiles='CC', compute_thermo=False)
arc0 = ARC(project='arc_test', job_types=self.job_types1, species=[spc0, spc1, spc2],
level_of_theory='ccsd(t)-f12/cc-pvdz-f12//b3lyp/6-311+g(3df,2p)')
self.assertEqual(arc0.unique_species_labels, ['spc0', 'spc1', 'spc2'])
spc3 = ARCSpecies(label='spc0', smiles='CC', compute_thermo=False)
arc0.species.append(spc3)
with self.assertRaises(ValueError):
arc0.determine_unique_species_labels()
def test_add_hydrogen_for_bde(self):
"""Test the add_hydrogen_for_bde method"""
spc0 = ARCSpecies(label='spc0', smiles='CC', compute_thermo=False)
arc0 = ARC(project='arc_test', job_types=self.job_types1, species=[spc0],
level_of_theory='ccsd(t)-f12/cc-pvdz-f12//b3lyp/6-311+g(3df,2p)')
arc0.add_hydrogen_for_bde()
self.assertEqual(len(arc0.species), 1)
spc1 = ARCSpecies(label='spc1', smiles='CC', compute_thermo=False, bdes=['all_h'])
arc1 = ARC(project='arc_test', job_types=self.job_types1, species=[spc1],
level_of_theory='ccsd(t)-f12/cc-pvdz-f12//b3lyp/6-311+g(3df,2p)')
arc1.add_hydrogen_for_bde()
self.assertEqual(len(arc1.species), 2)
self.assertIn('H', [spc.label for spc in arc1.species])
def test_process_adaptive_levels(self):
"""Test processing the adaptive levels"""
adaptive_levels_1 = {(1, 5): {('opt', 'freq'): 'wb97xd/6-311+g(2d,2p)',
('sp',): 'ccsd(t)-f12/aug-cc-pvtz-f12'},
(6, 15): {('opt', 'freq'): 'b3lyp/cbsb7',
'sp': 'dlpno-ccsd(t)/def2-tzvp'},
(16, 30): {('opt', 'freq'): 'b3lyp/6-31g(d,p)',
'sp': {'method': 'wb97xd', 'basis': '6-311+g(2d,2p)'}},
(31, 'inf'): {('opt', 'freq'): 'b3lyp/6-31g(d,p)',
'sp': 'b3lyp/6-311+g(d,p)'}}
processed_1 = process_adaptive_levels(adaptive_levels_1)
self.assertEqual(processed_1[(6, 15)][('sp',)].simple(), 'dlpno-ccsd(t)/def2-tzvp')
self.assertEqual(processed_1[(16, 30)][('sp',)].simple(), 'wb97xd/6-311+g(2d,2p)')
# test non dict
with self.assertRaises(InputError):
process_adaptive_levels(4)
# wrong atom range
with self.assertRaises(InputError):
process_adaptive_levels({5: {('opt', 'freq'): 'wb97xd/6-311+g(2d,2p)',
('sp',): 'ccsd(t)-f12/aug-cc-pvtz-f12'},
(6, 'inf'): {('opt', 'freq'): 'b3lyp/6-31g(d,p)',
'sp': 'b3lyp/6-311+g(d,p)'}})
# no 'inf
with self.assertRaises(InputError):
process_adaptive_levels({(1, 5): {('opt', 'freq'): 'wb97xd/6-311+g(2d,2p)',
('sp',): 'ccsd(t)-f12/aug-cc-pvtz-f12'},
(6, 75): {('opt', 'freq'): 'b3lyp/6-31g(d,p)',
'sp': 'b3lyp/6-311+g(d,p)'}})
# adaptive level not a dict
with self.assertRaises(InputError):
process_adaptive_levels({(1, 5): {('opt', 'freq'): 'wb97xd/6-311+g(2d,2p)',
('sp',): 'ccsd(t)-f12/aug-cc-pvtz-f12'},
(6, 'inf'): 'b3lyp/6-31g(d,p)'})
# non-consecutive atom ranges
with self.assertRaises(InputError):
process_adaptive_levels({(1, 5): {('opt', 'freq'): 'wb97xd/6-311+g(2d,2p)',
('sp',): 'ccsd(t)-f12/aug-cc-pvtz-f12'},
(15, 'inf'): {('opt', 'freq'): 'b3lyp/6-31g(d,p)',
'sp': 'b3lyp/6-311+g(d,p)'}})
@classmethod
def tearDownClass(cls):
"""
A function that is run ONCE after all unit tests in this class.
Delete all project directories created during these unit tests
"""
projects = ['arc_project_for_testing_delete_after_usage_test_from_dict',
'arc_model_chemistry_test', 'arc_test', 'test', 'unit_test_specific_job', 'wrong']
for project in projects:
project_directory = os.path.join(ARC_PATH, 'Projects', project)
shutil.rmtree(project_directory, ignore_errors=True)
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
|
nilq/baby-python
|
python
|
b = 1
for i in range(100000):
b += i * b
print(b)
|
nilq/baby-python
|
python
|
import sys, os, math, random, time, zlib, secrets, threading, time, asyncio
async def say_after(delay, what):
await asyncio.sleep(delay)
return what
async def main():
taskvec=[]
for i in range(10):
taskvec.append(asyncio.create_task(say_after(i,str(i))))
print(f"started at {time.strftime('%X')}")
for task in taskvec:
print(await task)
print(f"finished at {time.strftime('%X')}")
asyncio.run(main())
|
nilq/baby-python
|
python
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
DEMNet, WithLossCell and TrainOneStepCell
"""
import mindspore.nn as nn
import mindspore.ops as ops
import mindspore.context as context
from mindspore.common.initializer import Normal
from mindspore.ops import operations as P
from mindspore.ops import composite as C
from mindspore.ops import functional as F
from mindspore.parallel._utils import _get_gradients_mean, _get_parallel_mode, _get_device_num
from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
class MyTanh(nn.Cell):
def __init__(self):
super(MyTanh, self).__init__()
self.tanh = P.Tanh()
def construct(self, x):
return 1.7159 * self.tanh(2 * x / 3)
class DEMNet1(nn.Cell):
"""cub+att"""
def __init__(self):
super(DEMNet1, self).__init__()
self.relu = nn.ReLU()
self.fc1 = nn.Dense(312, 700, weight_init=Normal(0.0008))
self.fc2 = nn.Dense(700, 1024, weight_init=Normal(0.0012))
def construct(self, x):
x = self.relu(self.fc1(x))
x = self.relu(self.fc2(x))
return x
class DEMNet2(nn.Cell):
"""awa+att"""
def __init__(self):
super(DEMNet2, self).__init__()
self.relu = nn.ReLU()
self.fc1 = nn.Dense(85, 700, weight_init=Normal(0.0005))
self.fc2 = nn.Dense(700, 1024, weight_init=Normal(0.0005))
def construct(self, x):
x = self.relu(self.fc1(x))
x = self.relu(self.fc2(x))
return x
class DEMNet3(nn.Cell):
"""awa+word"""
def __init__(self):
super(DEMNet3, self).__init__()
self.relu = nn.ReLU()
self.fc1 = nn.Dense(1000, 1024, weight_init=Normal(0.0005))
def construct(self, x):
x = self.relu(self.fc1(x))
return x
class DEMNet4(nn.Cell):
"""awa+fusion"""
def __init__(self):
super(DEMNet4, self).__init__()
self.relu = nn.ReLU()
self.tanh = MyTanh()
self.fc1 = nn.Dense(1000, 900, weight_init=Normal(0.0008))
self.fc2 = nn.Dense(85, 900, weight_init=Normal(0.0012))
self.fc3 = nn.Dense(900, 1024, weight_init=Normal(0.0012))
def construct(self, att, word):
word = self.tanh(self.fc1(word))
att = self.tanh(self.fc2(att))
fus = word + 3 * att
fus = self.relu(self.fc3(fus))
return fus
class MyWithLossCell(nn.Cell):
def __init__(self, backbone, loss_fn):
super(MyWithLossCell, self).__init__(auto_prefix=False)
self._backbone = backbone
self._loss_fn = loss_fn
def construct(self, data1, data2, label):
out = self._backbone(data1, data2)
return self._loss_fn(out, label)
class MyTrainOneStepCell(nn.Cell):
"""custom TrainOneStepCell"""
def __init__(self, network, optimizer, sens=1.0):
super(MyTrainOneStepCell, self).__init__(auto_prefix=False)
self.network = network
self.network.set_grad()
self.network.add_flags(defer_inline=True)
self.weights = optimizer.parameters
self.optimizer = optimizer
self.grad = C.GradOperation(get_by_list=True, sens_param=True)
self.sens = sens
self.reducer_flag = False
self.grad_reducer = F.identity
self.parallel_mode = _get_parallel_mode()
if self.parallel_mode in (context.ParallelMode.DATA_PARALLEL, context.ParallelMode.HYBRID_PARALLEL):
self.reducer_flag = True
if self.reducer_flag:
mean = _get_gradients_mean()
degree = _get_device_num()
self.grad_reducer = DistributedGradReducer(self.weights, mean, degree)
def construct(self, *inputs):
weights = self.weights
loss = self.network(*inputs)
sens = P.Fill()(P.DType()(loss), P.Shape()(loss), self.sens)
grads = self.grad(self.network, weights)(*inputs, sens)
grads = self.grad_reducer(grads)
grads = ops.clip_by_global_norm(grads, 0.2)
self.optimizer(grads)
return loss
|
nilq/baby-python
|
python
|
from django.urls import reverse
from rest_framework import status
from django.test import TestCase
from .models import CustomUser
from .serializers import UserDetailsSerializer
from rest_framework.test import APIClient
REGISTRATION_URL = reverse('rest_register')
LOGIN_URL = reverse('rest_login')
PASSWORD_CHANGE_URL = reverse('rest_password_change')
USER_DETAIL_URL = reverse('rest_user_details')
LOGOUT_URL = reverse('rest_logout')
class UsersApiTest(TestCase):
def setUp(self):
self.client = APIClient()
response = self.client.post(REGISTRATION_URL, {
'email': 'test@test.com',
'password1': 'hakunamatata',
'password2': 'hakunamatata',
'first_name' : 'john',
'last_name': 'doe'
}, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + response.data['token'])
def test_password_change(self):
response = self.client.post(PASSWORD_CHANGE_URL, {
'new_password1': 'hdgstgehst01',
'new_password2': 'hdgstgehst01',
}, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_user_details(self):
response = self.client.get(USER_DETAIL_URL)
user_details = CustomUser.objects.get(email='test@test.com')
serializer = UserDetailsSerializer(user_details)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, serializer.data)
def test_logout(self):
response = self.client.post(LOGOUT_URL)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.client.logout()
response = self.client.get(USER_DETAIL_URL)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
nilq/baby-python
|
python
|
print("Please give termination parameter as zero to proceed with number of iterations.")
x = list()
x1, d, e = map(float, input(
"Enter initial point, delta and termination parameter: ").split())
expr = input("Enter expression for x: ")
print("Please give no. of iterations as considerably a high number to check with termination parameter.")
j = int(input("Enter number of iterations to be performed: "))
x2 = x1+d
x = x1
f1 = eval(expr)
x = x2
f2 = eval(expr)
if(f1 >= f2):
x3 = x1+2*d
else:
x3 = x1-d
x = x3
f3 = eval(expr)
c = 0
while(True):
Fmin = min(f1, f2, f3)
if(Fmin == f1):
xmin = x1
elif(Fmin == f2):
xmin = x2
else:
xmin = x3
a0 = f1
a1 = (f2-f1)/(x2-x1)
a2 = (1/(x3-x2))*(((f3-f1)/(x3-x1))-a1)
xbar = (x1+x2)/2-(a1/(2*a2))
x = xbar
fxbar = eval(expr)
xlist = [x1, x2, x3, xbar]
flist = [f1, f2, f3, fxbar]
sortlist = sorted(flist)
newx = list()
newf = list()
for i in range(3):
# flist.index(sortlist[i]) returns index of corresponding f element in original list
newx.append(xlist[flist.index(sortlist[i])])
newx = sorted(newx)
for i in range(3):
# xlist.index(newx[i]) returns index of corresponding x element in original list
newf.append(flist[xlist.index(newx[i])])
x1, x2, x3 = newx
f1, f2, f3 = newf
#print("x values are",x1," ",x2," ",x3)
newmin = xlist[flist.index(sortlist[0])]
#print("new min is ",newmin)
c += 1
if((abs(Fmin-fxbar) < e and abs(xmin-xbar) < e)or c >= j):
break
print("Point corresponding to x=", round(
newmin, 5), " is the minimum of the function.")
|
nilq/baby-python
|
python
|
from bs4 import BeautifulSoup
from faker import Faker
import requests
class faceFarm():
def __init__(self) -> None:
super(faceFarm, self).__init__()
self.requests = requests.Session()
pass
def request(self, method, url, **kwargs):
try:
return self.requests.request(method, url, timeout=(10, 30), **kwargs)
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout) as e:
return e
def identifyEmail(self, email):
url = "https://m.facebook.com/login/identify/"
page = self.request("GET", url, params={
"ctx": "recover",
"c": "/login/",
"search_attempts": "1",
"ars": "facebook_login",
"alternate_search": "0",
"show_friend_search_filtered_list": "0",
"birth_month_search": "0",
"city_search": "0"
})
soup = BeautifulSoup(page.text, "html.parser")
lsd = soup.find("input", {"name": "lsd"})["value"]
jazoest = soup.find("input", {"name": "jazoest"})["value"]
page = self.request("POST", url, params={
"ctx": "recover",
"c": "/login/",
"search_attempts": "1",
"ars": "facebook_login",
"alternate_search": "0",
"show_friend_search_filtered_list": "0",
"birth_month_search": "0",
"city_search": "0"
}, data={
"lsd": lsd,
"jazoest": jazoest,
"email": email,
"did_submit": "Cari"
})
soup = BeautifulSoup(page.text, "html.parser")
login_identify_search_error_msg = soup.find(
"div", {"id": "login_identify_search_error_msg"})
if not login_identify_search_error_msg:
status = soup.find("title").get_text()
print(
"[*] Email Address : {}\n[*] Status : {}\n[+] Saved to 'vuln.txt'.\n".format(email, status))
with open("vuln.txt", "a", encoding="utf-8") as fp:
fp.write(email + "\n")
else:
status = soup.find("title").get_text()
detail_status = login_identify_search_error_msg.get_text()
print("[*] Email Address : {}\n[*] Status : {}\n[*] Detail Status : {}\n".format(
email, status, detail_status))
pass
if __name__ == "__main__":
faceFarmASCII = """ __ ___
/ _|__ _ __ ___| __|_ _ _ _ _ __
| _/ _` / _/ -_) _/ _` | '_| ' \
|_| \__,_\__\___|_|\__,_|_| |_|_|_|
faceFarm - Email Detector for Facebook
"""
print(faceFarmASCII)
faceFarm = faceFarm()
while True:
fake = Faker()
emailAddr = fake.email().split("@")[0] + "@yahoo.com"
faceFarm.identifyEmail(emailAddr)
|
nilq/baby-python
|
python
|
var1 = int(input('Digite um número: '))
print('Analizando o valor {}, seu antecessor é o {} e o seu sucessor é {}'.format(var1, var1-1, var1+1))
|
nilq/baby-python
|
python
|
import pathlib
import aiosql
queries = aiosql.from_path(pathlib.Path(__file__).parent / "sql", "asyncpg")
|
nilq/baby-python
|
python
|
dollars=eval(input("Enter in a value of Dollars:"))
def main():
euros=dollars*0.8007
euros=round(euros,2)
print("That is exactly",euros,"euros.")
main()
|
nilq/baby-python
|
python
|
import logging
from functools import partial
from typing import TYPE_CHECKING, Optional
from magicgui.widgets import create_widget
from napari.qt.threading import thread_worker
from napari_plugin_engine import napari_hook_implementation
from qtpy.QtCore import QEvent, Qt
from qtpy.QtWidgets import (
QCheckBox,
QFormLayout,
QPushButton,
QSlider,
QSpinBox,
QVBoxLayout,
QWidget,
)
from napari_basicpy._mock_basic import MockBaSiC as BaSiC
if TYPE_CHECKING:
import napari # pragma: no cover
logger = logging.getLogger(__name__)
class BasicWidget(QWidget):
"""Example widget class."""
def __init__(self, viewer: "napari.viewer.Viewer"):
"""Init example widget."""
super().__init__()
self.viewer = viewer
self.setLayout(QVBoxLayout())
self.layer_select = create_widget(
annotation="napari.layers.Layer", label="image_layer"
)
self.layout().addWidget(self.layer_select.native)
settings_layout = QFormLayout()
settings_layout.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow)
settings_layout.addRow("Setting 1", QSpinBox())
settings_layout.addRow("Setting 2", QSlider(Qt.Horizontal))
settings_layout.addRow("Setting 3", QCheckBox())
settings_layout.addRow("Setting 4", QCheckBox())
self.settings_container = QWidget()
self.settings_container.setLayout(settings_layout)
self.run_btn = QPushButton("Run")
self.run_btn.clicked.connect(self._run)
self.cancel_btn = QPushButton("Cancel")
self.layout().addWidget(self.settings_container)
self.layout().addWidget(self.run_btn)
self.layout().addWidget(self.cancel_btn)
def _run(self):
def update_layer(image):
try:
self.viewer.layers["result"].data = image
except KeyError:
self.viewer.add_image(image, name="result")
@thread_worker(
start_thread=False,
connect={"yielded": update_layer, "returned": update_layer},
)
def call_basic(image):
basic = BaSiC()
fit = basic.fit(image, updates=True)
while True:
try:
yield next(fit)
except StopIteration as final:
return final.value
logger.info("Starting BaSiC")
data = self.layer_select.value.data
worker = call_basic(data)
self.cancel_btn.clicked.connect(partial(self._cancel, worker=worker))
worker.finished.connect(self.cancel_btn.clicked.disconnect)
worker.start()
def _cancel(self, worker):
logger.info("Canceling BasiC")
worker.quit()
def showEvent(self, event: QEvent) -> None: # noqa: D102
super().showEvent(event)
self.reset_choices()
def reset_choices(self, event: Optional[QEvent] = None) -> None:
"""Repopulate image list."""
self.layer_select.reset_choices(event)
@napari_hook_implementation
def napari_experimental_provide_dock_widget(): # noqa
return [BasicWidget]
|
nilq/baby-python
|
python
|
"""Methods for projecting a feature space to lower dimensionality."""
from .factory import create_projector, IDENTIFIERS, DEFAULT_IDENTIFIER # noqa: F401
from .projector import Projector # noqa: F401
|
nilq/baby-python
|
python
|
import numpy as np
from sklearn.metrics import pairwise_distances
from sklearn.metrics.pairwise import cosine_similarity, euclidean_distances, haversine_distances, chi2_kernel, \
manhattan_distances
class Similarity(object):
"""
Simple kNN class
"""
def __init__(self, data, user_profile_matrix, item_attribute_matrix, similarity):
self._data = data
self._ratings = data.train_dict
self._user_profile_matrix = user_profile_matrix
self._item_attribute_matrix = item_attribute_matrix
self._similarity = similarity
self._users = self._data.users
self._items = self._data.items
self._private_users = self._data.private_users
self._public_users = self._data.public_users
self._private_items = self._data.private_items
self._public_items = self._data.public_items
def initialize(self):
"""
This function initialize the data model
"""
supported_similarities = ["cosine", "dot", ]
supported_dissimilarities = ["euclidean", "manhattan", "haversine", "chi2", 'cityblock', 'l1', 'l2', 'braycurtis', 'canberra', 'chebyshev', 'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule']
print(f"\nSupported Similarities: {supported_similarities}")
print(f"Supported Distances/Dissimilarities: {supported_dissimilarities}\n")
self._transactions = self._data.transactions
self._similarity_matrix = np.empty((len(self._users), len(self._items)))
self.process_similarity(self._similarity)
def process_similarity(self, similarity):
if similarity == "cosine":
self._similarity_matrix = cosine_similarity(self._user_profile_matrix, self._item_attribute_matrix)
elif similarity == "dot":
self._similarity_matrix = (self._data.sp_i_train_ratings @ self._data.sp_i_train_ratings.T).toarray()
elif similarity == "euclidean":
self._similarity_matrix = (1 / (1 + euclidean_distances(self._user_profile_matrix, self._item_attribute_matrix)))
elif similarity == "manhattan":
self._similarity_matrix = (1 / (1 + manhattan_distances(self._user_profile_matrix, self._item_attribute_matrix)))
elif similarity == "haversine":
self._similarity_matrix = (1 / (1 + haversine_distances(self._user_profile_matrix, self._item_attribute_matrix)))
elif similarity == "chi2":
self._similarity_matrix = (1 / (1 + chi2_kernel(self._user_profile_matrix, self._item_attribute_matrix)))
elif similarity in ['cityblock', 'l1', 'l2']:
self._similarity_matrix = (1 / (1 + pairwise_distances(self._user_profile_matrix, self._item_attribute_matrix, metric=similarity)))
elif similarity in ['braycurtis', 'canberra', 'chebyshev', 'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule']:
self._similarity_matrix = (1 / (1 + pairwise_distances(self._user_profile_matrix.toarray(), self._item_attribute_matrix.toarray(), metric=similarity)))
else:
raise Exception("Not implemented similarity")
def get_transactions(self):
return self._transactions
def get_user_recs(self, u, k):
user_items = self._ratings[u].keys()
indexed_user_items = [self._public_items[i] for i in user_items]
predictions = {self._private_items[i]: v for i, v in enumerate(self._similarity_matrix[self._public_users[u]]) if i not in indexed_user_items}
indices, values = zip(*predictions.items())
indices = np.array(indices)
values = np.array(values)
partially_ordered_preds_indices = np.argpartition(values, -k)[-k:]
real_values = values[partially_ordered_preds_indices]
real_indices = indices[partially_ordered_preds_indices]
local_top_k = real_values.argsort()[::-1]
return [(real_indices[item], real_values[item]) for item in local_top_k]
def get_model_state(self):
saving_dict = {}
saving_dict['_neighbors'] = self._neighbors
saving_dict['_similarity'] = self._similarity
saving_dict['_num_neighbors'] = self._num_neighbors
return saving_dict
def set_model_state(self, saving_dict):
self._neighbors = saving_dict['_neighbors']
self._similarity = saving_dict['_similarity']
self._num_neighbors = saving_dict['_num_neighbors']
|
nilq/baby-python
|
python
|
from enum import Enum
from typing import Optional, Sequence
from PyQt5 import QtCore, QtWidgets
from electroncash.address import Address, AddressError
from electroncash.consolidate import (
MAX_STANDARD_TX_SIZE,
MAX_TX_SIZE,
AddressConsolidator,
)
from electroncash.constants import PROJECT_NAME, XEC
from electroncash.transaction import Transaction
from electroncash.wallet import Abstract_Wallet
from electroncash_gui.qt.multi_transactions_dialog import MultiTransactionsWidget
class TransactionsStatus(Enum):
INTERRUPTED = "cancelled"
NOT_STARTED = "not started"
SELECTING = "selecting coins..."
BUILDING = "building transactions..."
FINISHED = "finished building transactions"
NO_RESULT = "finished without generating any transactions"
class ConsolidateWorker(QtCore.QObject):
finished = QtCore.pyqtSignal()
status_changed = QtCore.pyqtSignal(TransactionsStatus)
transactions_ready = QtCore.pyqtSignal(list)
"""Emits the list of :class:`Transaction` after the last transaction is
generated."""
progress = QtCore.pyqtSignal(int)
"""Emits the number of generated transactions after each new transaction."""
def __init__(
self,
address: Address,
wallet: Abstract_Wallet,
include_coinbase: bool,
include_non_coinbase: bool,
include_frozen: bool,
include_slp: bool,
minimum_value: Optional[int],
maximum_value: Optional[int],
minimum_height: Optional[int],
maximum_height: Optional[int],
output_address: Address,
max_tx_size: int,
):
super().__init__()
self.status_changed.emit(TransactionsStatus.SELECTING)
self.consolidator = AddressConsolidator(
address,
wallet,
include_coinbase,
include_non_coinbase,
include_frozen,
include_slp,
minimum_value,
maximum_value,
minimum_height,
maximum_height,
output_address,
max_tx_size,
)
self.interrupt_mutex = QtCore.QMutex()
self.interrupt: bool = False
def was_interruption_requested(self) -> bool:
self.interrupt_mutex.lock()
do_interrupt = self.interrupt
self.interrupt_mutex.unlock()
return do_interrupt
def request_interruption(self):
"""Stop the worker as soon as possible (i.e. in-between two
transactions).
This causes the :attr:`status_changed` and :attr:`finished` signals to be
emitted. The :attr:`transactions_ready` signal is not emitted if the worker
is interrupted before it has generated the last transaction.
"""
self.interrupt_mutex.lock()
self.interrupt = True
self.interrupt_mutex.unlock()
def build_transactions(self):
self.status_changed.emit(TransactionsStatus.BUILDING)
transactions = []
for i, tx in enumerate(self.consolidator.iter_transactions()):
if self.was_interruption_requested():
self.status_changed.emit(TransactionsStatus.INTERRUPTED)
self.finished.emit()
return
transactions.append(tx)
self.progress.emit(i + 1)
if transactions:
self.status_changed.emit(TransactionsStatus.FINISHED)
# else the transaction page will set the status to NO_RESULT upon receiving
# an empty list of transactions
self.transactions_ready.emit(transactions)
self.finished.emit()
class ConsolidateCoinsWizard(QtWidgets.QWizard):
def __init__(
self,
address: Address,
wallet: Abstract_Wallet,
main_window,
parent: Optional[QtWidgets.QWidget] = None,
):
super().__init__(parent)
self.setWindowTitle(f"Consolidate coins for address {address.to_ui_string()}")
self.tx_thread: Optional[QtCore.QThread] = None
self.address: Address = address
self.wallet: Abstract_Wallet = wallet
self.transactions: Sequence[Transaction] = []
self.coins_page = CoinSelectionPage()
self.addPage(self.coins_page)
self.output_page = OutputsPage(address)
self.addPage(self.output_page)
self.tx_page = TransactionsPage(wallet, main_window)
self.addPage(self.tx_page)
self.currentIdChanged.connect(self.on_page_changed)
def on_page_changed(self, page_id: int):
# The thread is only supposed to be started after reaching the tx_page,
# and must be stopped if the user decides to go back to a previous page
# or close the dialog.
self.stop_thread_if_running()
if self.currentPage() is self.tx_page:
self.tx_page.update_status(TransactionsStatus.NOT_STARTED)
self.tx_thread = QtCore.QThread()
self.worker = ConsolidateWorker(
self.address,
self.wallet,
self.coins_page.include_coinbase_cb.isChecked(),
self.coins_page.include_non_coinbase_cb.isChecked(),
self.coins_page.include_frozen_cb.isChecked(),
self.coins_page.include_slp_cb.isChecked(),
self.coins_page.get_minimum_value(),
self.coins_page.get_maximum_value(),
self.coins_page.minimum_height_sb.value(),
self.coins_page.maximum_height_sb.value(),
self.output_page.get_output_address(),
self.output_page.tx_size_sb.value(),
)
# Connections
self.worker.moveToThread(self.tx_thread)
self.tx_thread.started.connect(self.worker.build_transactions)
self.worker.status_changed.connect(self.tx_page.update_status)
self.worker.progress.connect(self.tx_page.update_progress)
self.worker.transactions_ready.connect(self.on_build_transactions_finished)
self.worker.finished.connect(self.tx_thread.quit)
self.tx_thread.start()
def stop_thread_if_running(self):
if self.tx_thread is not None and self.tx_thread.isRunning():
self.worker.request_interruption()
self.tx_thread.quit()
def on_build_transactions_finished(self, transactions: Sequence[Transaction]):
self.transactions = transactions
self.tx_page.set_unsigned_transactions(self.transactions)
class AmountSpinBox(QtWidgets.QDoubleSpinBox):
def __init__(self):
super().__init__()
self.setToolTip(f"Amount in {XEC}")
# 0.01 XEC is 1 satoshi
self.setDecimals(2)
self.setStepType(QtWidgets.QAbstractSpinBox.AdaptiveDecimalStepType)
self.setMaximum(21_000_000_000_000)
self.setGroupSeparatorShown(True)
# Enough width to display "21 000 000 000,00":
self.setMinimumWidth(170)
class BlockHeightSpinBox(QtWidgets.QSpinBox):
def __init__(self):
super().__init__()
self.setToolTip("Block height")
# This maximum should give us a useful range of ~20,000 years
self.setMaximum(1_000_000_000)
self.setGroupSeparatorShown(True)
class CoinSelectionPage(QtWidgets.QWizardPage):
def __init__(self, parent=None):
super().__init__(parent)
self.setTitle("Filter coins")
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.include_coinbase_cb = QtWidgets.QCheckBox("Include coinbase coins")
self.include_coinbase_cb.setChecked(True)
layout.addWidget(self.include_coinbase_cb)
self.include_non_coinbase_cb = QtWidgets.QCheckBox("Include non-coinbase coins")
self.include_non_coinbase_cb.setChecked(True)
layout.addWidget(self.include_non_coinbase_cb)
self.include_frozen_cb = QtWidgets.QCheckBox("Include frozen coins")
self.include_frozen_cb.setChecked(False)
layout.addWidget(self.include_frozen_cb)
self.include_slp_cb = QtWidgets.QCheckBox("Include coins with SLP tokens")
self.include_slp_cb.setChecked(False)
self.include_slp_cb.toggled.connect(self.warn_burn_tokens)
layout.addWidget(self.include_slp_cb)
self.minimum_amount_sb = AmountSpinBox()
self.minimum_amount_sb.setValue(5.46)
self.minimum_amount_sb.valueChanged.connect(self.on_min_or_max_amount_changed)
self.filter_by_min_value_cb = self.add_filter_by_value_line(
"Minimum amount (XEC)", self.minimum_amount_sb
)
self.maximum_amount_sb = AmountSpinBox()
self.maximum_amount_sb.setValue(21_000_000_000_000)
self.maximum_amount_sb.valueChanged.connect(self.on_min_or_max_amount_changed)
self.filter_by_max_value_cb = self.add_filter_by_value_line(
"Maximum amount (XEC)", self.maximum_amount_sb
)
self.minimum_height_sb = BlockHeightSpinBox()
self.minimum_height_sb.setValue(0)
self.minimum_height_sb.valueChanged.connect(self.on_min_or_max_height_changed)
self.filter_by_min_height_cb = self.add_filter_by_value_line(
"Minimum block height", self.minimum_height_sb
)
self.maximum_height_sb = BlockHeightSpinBox()
self.maximum_height_sb.setValue(1_000_000)
self.maximum_height_sb.valueChanged.connect(self.on_min_or_max_height_changed)
self.filter_by_max_height_cb = self.add_filter_by_value_line(
"Maximum block height", self.maximum_height_sb
)
def add_filter_by_value_line(
self, label_text: str, value_widget: QtWidgets.QWidget
) -> QtWidgets.QCheckBox:
"""Add a line with a checkbox and a widget to specify a value.
The value widget is enabled when the checkbox is checked.
Return the created QCheckBox instance."""
sublayout = QtWidgets.QHBoxLayout()
self.layout().addLayout(sublayout)
checkbox = QtWidgets.QCheckBox(label_text)
sublayout.addWidget(checkbox)
checkbox.setChecked(False)
value_widget.setEnabled(False)
checkbox.toggled.connect(value_widget.setEnabled)
sublayout.addWidget(value_widget)
return checkbox
def warn_burn_tokens(self, include_slp_is_checked: bool):
if include_slp_is_checked:
button = QtWidgets.QMessageBox.warning(
self,
"SLP tokens may be lost",
f"{PROJECT_NAME} does not support transferring SLP tokens. If you "
"include them in the consolidation transaction, they will be burned.",
buttons=QtWidgets.QMessageBox.Cancel | QtWidgets.QMessageBox.Ok,
)
if button == QtWidgets.QMessageBox.Cancel:
self.include_slp_cb.setChecked(False)
def get_minimum_value(self) -> Optional[int]:
"""Return minimum value in satoshis, or None"""
return (
None
if not self.filter_by_min_value_cb.isChecked()
else int(100 * self.minimum_amount_sb.value())
)
def get_maximum_value(self) -> Optional[int]:
"""Return maximum value in satoshis, or None"""
return (
None
if not self.filter_by_max_value_cb.isChecked()
else int(100 * self.maximum_amount_sb.value())
)
def on_min_or_max_amount_changed(self, *args):
"""Warn if the min-max range is empty"""
if self.minimum_amount_sb.value() > self.maximum_amount_sb.value():
self.minimum_amount_sb.setStyleSheet("color: red;")
self.maximum_amount_sb.setStyleSheet("color: red;")
else:
self.minimum_amount_sb.setStyleSheet("")
self.maximum_amount_sb.setStyleSheet("")
def on_min_or_max_height_changed(self, *args):
"""Warn if the min-max range is empty"""
if self.minimum_height_sb.value() > self.maximum_height_sb.value():
self.minimum_height_sb.setStyleSheet("color: red;")
self.maximum_height_sb.setStyleSheet("color: red;")
else:
self.minimum_height_sb.setStyleSheet("")
self.maximum_height_sb.setStyleSheet("")
class OutputsPage(QtWidgets.QWizardPage):
def __init__(self, input_address: Address, parent=None):
super().__init__(parent)
self.inputs_address: Address = input_address
self.output_address: Optional[Address] = None
self.setTitle("Outputs")
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
layout.addWidget(QtWidgets.QLabel("<h2>Destination address</h2>"))
self.same_address_rb = QtWidgets.QRadioButton("Same address as inputs")
self.same_address_rb.setChecked(True)
layout.addWidget(self.same_address_rb)
single_address_sublayout = QtWidgets.QHBoxLayout()
layout.addLayout(single_address_sublayout)
self.single_address_rb = QtWidgets.QRadioButton("Single address")
single_address_sublayout.addWidget(self.single_address_rb)
self.output_address_edit = QtWidgets.QLineEdit()
self.output_address_edit.setPlaceholderText("enter a valid destination address")
self.output_address_edit.setEnabled(False)
single_address_sublayout.addWidget(self.output_address_edit)
layout.addSpacing(20)
layout.addWidget(QtWidgets.QLabel("<h2>Transaction parameters</h2>"))
tx_size_layout = QtWidgets.QHBoxLayout()
layout.addLayout(tx_size_layout)
tx_size_layout.addWidget(QtWidgets.QLabel("Maximum transaction size (bytes)"))
self.tx_size_sb = QtWidgets.QSpinBox()
self.tx_size_sb.setMinimum(192)
self.tx_size_sb.setMaximum(MAX_TX_SIZE)
self.tx_size_sb.setValue(MAX_STANDARD_TX_SIZE)
tx_size_layout.addWidget(self.tx_size_sb)
self.single_address_rb.toggled.connect(self.output_address_edit.setEnabled)
self.single_address_rb.toggled.connect(self.completeChanged.emit)
self.output_address_edit.textChanged.connect(self.validate_address)
def validate_address(self, address_text: str):
previous_address = self.output_address
try:
self.output_address = Address.from_string(address_text)
except AddressError:
self.output_address = None
if self.output_address != previous_address:
self.completeChanged.emit()
def isComplete(self):
return not self.single_address_rb.isChecked() or self.output_address is not None
def get_output_address(self) -> Address:
return (
self.inputs_address
if self.same_address_rb.isChecked()
else self.output_address
)
class TransactionsPage(QtWidgets.QWizardPage):
def __init__(self, wallet, main_window, parent=None):
super().__init__(parent)
self.status: TransactionsStatus = TransactionsStatus.NOT_STARTED
self.setTitle("Transactions")
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self.status_label = QtWidgets.QLabel()
layout.addWidget(self.status_label)
self.multi_tx_display = MultiTransactionsWidget(wallet, main_window)
layout.addWidget(self.multi_tx_display)
def display_work_in_progress(self):
"""Disable buttons, inform the user about the ongoing computation"""
self.multi_tx_display.reset_labels()
self.multi_tx_display.disable_buttons()
self.setCursor(QtCore.Qt.WaitCursor)
def update_status(self, status: TransactionsStatus):
if status == TransactionsStatus.BUILDING:
self.display_work_in_progress()
self.status_label.setText(f"Status: <b>{status.value}</b>")
previous_status, self.status = self.status, status
if previous_status != status and TransactionsStatus.FINISHED in [
previous_status,
status,
]:
self.completeChanged.emit()
def update_progress(self, num_tx: int):
self.multi_tx_display.set_displayed_number_of_transactions(num_tx)
def set_unsigned_transactions(self, transactions: Sequence[Transaction]):
self.unsetCursor()
if not transactions:
self.update_status(TransactionsStatus.NO_RESULT)
return
self.multi_tx_display.set_transactions(transactions)
def isComplete(self) -> bool:
return self.status == TransactionsStatus.FINISHED
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# データセットの交差検証
import pandas as pd
import numpy as np
import dataclasses
from collections import defaultdict
from .utils.folder import folder_create
from tqdm import tqdm
@dataclasses.dataclass
class Stratified_group_k_fold:
"""
データをグループ層化K分割するときのパラメータを保持する
"""
csv_config: dict # 学習に使用するデータの情報が書かれたcsvの情報
split_info_folder : str # 分割されたファイルの内訳を保存するフォルダ名
n_splits: int = 5 # 分割数
shuffle: bool = False # シャッフルするかどうか
random_state: int = None # ランダムシード
def __post_init__(self):
self.filename_column = self.csv_config["image_filename_column"] # ファイル列
self.label_column = self.csv_config["label_column"] # ラベル列
self.group_column = self.csv_config["ID_column"] # グループ列
def split(self, X, y, groups=None):
"""
グループ層化K分割する
Parameters
----------
X : array-like, shape(ファイル数,)
分割するファイル名
y : array-like, shape(ファイル数,)
分割するファイル名のラベル
groups : None or array-like, shape(ファイル数,)
分割するファイルのグループ名
Noneの場合はただの層化K分割となる
Yields
-------
train_index : array-like, shape(分割数, ファイル数)
学習用として分けられたi分割目のXのインデックス
test_index : array-like, shape(分割数, ファイル数)
テスト用として分けられたi分割目のXのインデックス
"""
# 初期化
## グループがない場合はファイル名をグループ名とする
## ユニークなグループ名を取得
if groups is None:
groups = X
unique_group_list = list(set(groups))
## ラベルの数と種類を取得
labels_list = list(set(y))
labels_num = len(labels_list)
y_count = np.zeros(labels_num)
for _y in y:
y_count[labels_list.index(_y)] += 1
## グループとファイル名の対応辞書,ファイル名とラベルの対応辞書,
## グループとラベルの数および種類の対応辞書を作成
group_X_dict = defaultdict(list)
X_y_dict = defaultdict(list)
group_y_count_dict = defaultdict(lambda: np.zeros(labels_num))
for _X, _y, _groups in zip(X, y, groups):
group_X_dict[_groups].append(_X)
idx = labels_list.index(_y)
X_y_dict[_X] = idx
group_y_count_dict[_groups][idx] += 1
## 分割後の情報を保存する変数の初期化
group_X_fold = [[] for i in range(self.n_splits)]
group_y_count_fold = [np.zeros(labels_num)
for i in range(self.n_splits)]
# グループを1単位としてシャッフル
if self.shuffle is True:
np.random.seed(seed=self.random_state)
np.random.shuffle(unique_group_list)
# グループ層化K分割
# 各分割群のラベル数を調べ,
# ラベル数の標準偏差が最小になるようにデータを割り当てる
for unique_group in tqdm(unique_group_list, desc='k-fold_split'):
best_fold = None
min_value = None
for i in range(self.n_splits):
group_y_count_fold[i] += group_y_count_dict[unique_group]
std_per_label = []
for label in range(labels_num):
label_std = np.std([group_y_count_fold[i][label]
/ y_count[label]
for i in range(self.n_splits)])
std_per_label.append(label_std)
group_y_count_fold[i] -= group_y_count_dict[unique_group]
value = np.mean(std_per_label)
if min_value is None or value < min_value:
min_value = value
best_fold = i
group_y_count_fold[best_fold] += group_y_count_dict[unique_group]
group_X_fold[best_fold] += group_X_dict[unique_group]
# i番目の分割群をテストデータ,残りを学習データとする
X_set = set(X)
for i in range(self.n_splits):
X_train = X_set - set(group_X_fold[i])
X_test = set(group_X_fold[i])
train_index = [i for i, _X in enumerate(X) if _X in X_train]
test_index = [i for i, _X in enumerate(X) if _X in X_test]
yield train_index, test_index
def k_fold_classifier(self, df):
"""
分類問題においてグループ層化K分割を行い,分割の内訳をcsvで保存する
Parameters
----------
df : DataFrame(pandas)
学習に使用するデータの情報
Returns
-------
df_train_list : array-like[DataFrame(pandas)], shape(分割数,)
学習用として分けられたデータ
df_test_list : array-like, shape(分割数, ファイル数)
テスト用として分けられたデータ
"""
# グループ層化K分割
folder_create(self.split_info_folder)
X = df[self.filename_column].values
y = list(map(str, df[self.label_column].values))
if self.group_column == 'None':
groups = None
else:
groups = df[self.group_column].values
df_train_list = []
df_test_list = []
for i, (train_index, test_index) in enumerate(self.split(X, y, groups)):
df_train = df.iloc[train_index]
df_test = df.iloc[test_index]
## 分割されたデータの情報を出力
df_train.to_csv(f'{self.split_info_folder}/train_{i}.csv',
index=False, encoding='utf-8')
df_test.to_csv(f'{self.split_info_folder}/test_{i}.csv',
index=False, encoding='utf-8')
df_train_list.append(df_train)
df_test_list.append(df_test)
return df_train_list, df_test_list
def k_fold_regressor(self, df, bins_num=None):
"""
回帰問題においてグループ層化K分割を行い,分割の内訳をcsvで保存する
数値ラベルを数値を基準にグループ化し,分布が均等になるようにK分割する
Parameters
----------
df : DataFrame(pandas)
学習に使用するデータの情報
bins_num : int or None
疑似ラベルの分割数,Noneの場合,分割数はデータ数の平方根となる
Returns
-------
df_train_list : array-like[DataFrame(pandas)], shape(分割数,)
学習用として分けられたデータ
df_test_list : array-like, shape(分割数, ファイル数)
テスト用として分けられたデータ
"""
# グループ層化K分割
folder_create(self.split_info_folder)
X = df[self.filename_column].values
y = df[self.label_column].values
## 数値の分布が均等になるように分割するために疑似ラベルを作成
if bins_num is None:
bins_num = int(len(X) ** 0.5) + 1
bins = np.linspace(min(y), max(y), bins_num)
y_pseudo = np.digitize(y, bins) - 1
y_pseudo[np.argmax(y)] -= 1
if self.group_column == 'None':
groups = None
else:
groups = df[self.group_column].values
df_train_list = []
df_test_list = []
for i, (train_index, test_index) in enumerate(self.split(X, y_pseudo, groups)):
df_train = df.iloc[train_index]
df_test = df.iloc[test_index]
## 分割されたデータの情報を出力
df_train.to_csv(f'{self.split_info_folder}/train_{i}.csv',
index=False, encoding='utf-8')
df_test.to_csv(f'{self.split_info_folder}/test_{i}.csv',
index=False, encoding='utf-8')
df_train_list.append(df_train)
df_test_list.append(df_test)
return df_train_list, df_test_list
|
nilq/baby-python
|
python
|
import numpy as np
from tqdm import tqdm
class PaddingInputExample(object):
"""Fake example so the num input examples is a multiple of the batch size.
When running eval/predict on the TPU, we need to pad the number of examples
to be a multiple of the batch size, because the TPU requires a fixed batch
size. The alternative is to drop the last batch, which is bad because it means
the entire output data won't be generated.
We use this class instead of `None` because treating `None` as padding
battches could cause silent errors.
"""
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
def convert_single_example(tokenizer, example, max_seq_length=256):
"""Converts a single `InputExample` into a single `InputFeatures`."""
if isinstance(example, PaddingInputExample):
input_ids = [0] * max_seq_length
input_mask = [0] * max_seq_length
segment_ids = [0] * max_seq_length
label = 0
return input_ids, input_mask, segment_ids, label
tokens_a = tokenizer.tokenize(example.text_a)
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0 : (max_seq_length - 2)]
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
return input_ids, input_mask, segment_ids, example.label
def convert_examples_to_features(tokenizer, examples, max_seq_length=256):
"""Convert a set of `InputExample`s to a list of `InputFeatures`."""
input_ids, input_masks, segment_ids, labels = [], [], [], []
for example in tqdm(examples, desc="Converting examples to features"):
input_id, input_mask, segment_id, label = convert_single_example(
tokenizer, example, max_seq_length
)
input_ids.append(input_id)
input_masks.append(input_mask)
segment_ids.append(segment_id)
labels.append(label)
return (
np.array(input_ids),
np.array(input_masks),
np.array(segment_ids),
np.array(labels).reshape(-1, 1),
)
def convert_text_to_examples(texts, labels):
"""Create InputExamples"""
InputExamples = []
for text, label in zip(texts, labels):
InputExamples.append(
InputExample(guid=None, text_a=" ".join(text), text_b=None, label=label)
)
return InputExamples
|
nilq/baby-python
|
python
|
from typing import List
import metagrad.module as nn
from examples.feedforward import load_dataset
from metagrad.dataloader import DataLoader
from metagrad.dataset import TensorDataset
from metagrad.functions import sigmoid
from metagrad.loss import BCELoss
from metagrad.optim import SGD
from metagrad.paramater import Parameter
from metagrad.tensor import no_grad, Tensor
from metagrad.utils import Animator, run_epoch, regression_classification_metric
class DynamicFFN(nn.Module):
def __init__(self, num_layers, input_size, hidden_size, output_size):
'''
:param num_layers: 隐藏层层数
:param input_size: 输入维度
:param hidden_size: 隐藏层大小
:param output_size: 分类个数
'''
layers = []
layers.append(nn.Linear(input_size, hidden_size)) # 隐藏层,将输入转换为隐藏向量
layers.append(nn.ReLU()) # 激活函数
for i in range(num_layers - 1):
layers.append(nn.Linear(hidden_size, hidden_size // 2))
hidden_size = hidden_size // 2 # 后面的神经元数递减
layers.append(nn.ReLU())
layers.append(nn.Linear(hidden_size, output_size)) # 输出层,将隐藏向量转换为输出
self.net = nn.Sequential(*layers)
def forward(self, x: Tensor) -> Tensor:
return self.net(x)
def weights(self) -> List[Parameter]:
parameters = []
for layer in self.net.layers:
if isinstance(layer, nn.Linear):
parameters.append(layer.weight)
return parameters
def bias(self) -> List[Parameter]:
parameters = []
for layer in self.net.layers:
if isinstance(layer, nn.Linear):
parameters.append(layer.bias)
return parameters
def train_model(model, opt, train_dl, val_dl, num_epochs=20):
loss = BCELoss(reduction=None)
val_losses = []
for epoch in range(num_epochs):
train_loss, _ = run_epoch(model, train_dl, loss, opt, activate_func=sigmoid,
evaluate_func=regression_classification_metric)
with no_grad():
val_loss, _ = run_epoch(model, val_dl, loss, opt=None, activate_func=sigmoid,
evaluate_func=regression_classification_metric)
val_losses.append(val_loss)
print(f'epoch:{epoch + 1}, train loss:{train_loss:.4f}, validation loss:{val_loss:.4f}')
return val_losses
def compare_model(train_dl, val_dl, original_model, new_model, original_opt, new_opt,
original_label='Simple model', new_label='Complex model', ):
num_epochs = 20
print(f'Training {original_label}:')
original_losses = train_model(original_model, original_opt, train_dl, val_dl, num_epochs)
print(f'Training {new_label}:')
new_losses = train_model(new_model, new_opt, train_dl, val_dl, num_epochs)
animator = Animator(xlabel='epoch', ylabel='validation loss', yscale='log',
xlim=[1, num_epochs], ylim=[1e-3, 1e2],
legend=[original_label, new_label], saved_file='animator')
for epoch in range(num_epochs):
animator.add(epoch + 1, (original_losses[epoch], new_losses[epoch]))
animator.show()
def simple_and_complex(input_size, output_size, train_dl, val_dl):
'''
比较简单模型和复杂模型
:param input_size:
:param output_size:
:param train_dl:
:param val_dl:
:return:
'''
simple_model = DynamicFFN(1, input_size, 4, output_size)
simple_opt = SGD(simple_model.parameters(), lr=0.1)
complex_model = DynamicFFN(4, input_size, 128, output_size)
complex_opt = SGD(complex_model.parameters(), lr=0.1)
compare_model(train_dl, val_dl, simple_model, complex_model, simple_opt, complex_opt)
def complex_with_l2_or_not(input_size, output_size, train_dl, val_dl):
'''
比较有L2正则化的复杂模型和无L2正则化的复杂模型
:param input_size:
:param output_size:
:param train_dl:
:param val_dl:
:return:
'''
complex_model = DynamicFFN(1, input_size, 256, output_size)
complex_opt = SGD(complex_model.parameters(), lr=0.1)
complex_l2_model = DynamicFFN(1, input_size, 256, output_size)
# 只为权重设置L2惩罚
complex_l2_opt = SGD([
{"params": complex_l2_model.weights(), 'weight_decay': 0.01},
{"params": complex_l2_model.bias()}], lr=0.1
)
compare_model(train_dl, val_dl, complex_model, complex_l2_model, complex_opt, complex_l2_opt, "Complex model",
"Complex Model(L2)")
if __name__ == '__main__':
X_train, X_test, y_train, y_test, X_val, y_val = load_dataset()
batch_size = 512
train_ds = TensorDataset(X_train, y_train)
train_dl = DataLoader(train_ds, batch_size=batch_size)
val_ds = TensorDataset(X_val, y_val)
val_dl = DataLoader(val_ds, batch_size=batch_size)
input_size = 10000
output_size = 1
complex_with_l2_or_not(input_size, output_size, train_dl, val_dl)
|
nilq/baby-python
|
python
|
from functools import wraps
from ..exceptions import BeeSQLError
def primary_keyword(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if not self.table:
raise BeeSQLError('No table selected. Use Query.on to select a table first')
statement = func(self, *args, **kwargs)
self.set_statement(statement)
return statement
return wrapper
def secondary_keyword(func):
""" Convert a statement method into a secondary keyword generator. """
@wraps(func)
def wrapper(self, *args, **kwargs):
keyword = func(self, *args, **kwargs)
self.add_secondary_keyword(keyword)
return self
return wrapper
def logical_operator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if not self.is_condition_set():
raise BeeSQLError('No condition set.')
return func(self, *args, **kwargs)
return wrapper
def aggregation(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
aggregation_ = func(self, *args, **kwargs)
self.add_aggregation(aggregation_)
return self
return wrapper
def complete_condition(query_part_name):
""" Works with ColumnSelector class. """
def decorator(func):
@wraps(func)
def wrapper(self, value, **kwargs):
operator = self.get_operator(query_part_name, value)
return self.complete(operator)
return wrapper
return decorator
|
nilq/baby-python
|
python
|
"""
Relationship pseudo-model.
"""
class Relationship:
def __init__(self, start_id, end_id, type, properties):
"""
A relationship (edge) in a property graph view of data.
:param {str} start_id: unique id of the 'from' node in the graph this relationship is associated with
:param {str} end_id: unique id of the 'to' node in the graph this relationship is associated with
:param {list} type: a qualified relationship 'type' to use, typically corresponding to some enumeration
:param {dict} properties: any scalar attributes ("properties") associated with the relationship.
"""
self.start_id = start_id
self.end_id = end_id
self.type = type
self.properties = properties
|
nilq/baby-python
|
python
|
# Number of Islands
class Solution(object):
def numIslands(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if not any(grid): return 0
m, n = len(grid), len(grid[0])
count = 0
for i in range(m):
for j in range(n):
if grid[i][j] == '1':
count += 1
self.dfs(i, j, grid)
return count
def dfs(self, i, j, grid):
m, n = len(grid), len(grid[0])
grid[i][j] = '0'
for x, y in [(i - 1, j), (i, j - 1), (i + 1, j), (i, j + 1)]:
if 0 <= x < m and 0 <= y < n and grid[x][y] == '1':
self.dfs(x, y, grid)
# O(mn) time, O(max(m, n)) space for recursive stacks
# follow up: how to find the number of lakes?
# a lake is an area of water surrounded horizonatally and vertically
# by the same island
# solution:
# 1. use num_islands() to mark islands with different ids
# 2. iterate through the grid, if it's water then dfs to see if
# it's surrounded by lands of the same id
|
nilq/baby-python
|
python
|
from parameterized import parameterized
from combinatrix.testintegration import load_parameter_sets
from doajtest.helpers import DoajTestCase
from doajtest.fixtures import JournalFixtureFactory, ArticleFixtureFactory
from doajtest.mocks.store import StoreMockFactory
from doajtest.mocks.model_Cache import ModelCacheMockFactory
from portality.lib.paths import rel2abs
from portality.lib import dates
from portality.background import BackgroundApi
from portality.tasks.public_data_dump import PublicDataDumpBackgroundTask
from portality import models, store
from portality.core import app
import os, shutil, tarfile, json
from StringIO import StringIO
def load_cases():
return load_parameter_sets(rel2abs(__file__, "..", "matrices", "tasks.public_data_dump"), "data_dump", "test_id",
{"test_id" : []})
class TestPublicDataDumpTask(DoajTestCase):
def setUp(self):
super(TestPublicDataDumpTask, self).setUp()
self.store_tmp_imp = app.config.get("STORE_TMP_IMPL")
self.store_imp = app.config.get("STORE_IMPL")
self.discovery_records_per_file = app.config.get("DISCOVERY_RECORDS_PER_FILE")
self.store_local_dir = app.config["STORE_LOCAL_DIR"]
self.store_tmp_dir = app.config["STORE_TMP_DIR"]
self.cache = models.Cache
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
app.config["STORE_LOCAL_DIR"] = rel2abs(__file__, "..", "tmp", "store", "main")
app.config["STORE_TMP_DIR"] = rel2abs(__file__, "..", "tmp", "store", "tmp")
os.makedirs(app.config["STORE_LOCAL_DIR"])
os.makedirs(app.config["STORE_TMP_DIR"])
models.cache.Cache = ModelCacheMockFactory.in_memory()
def tearDown(self):
app.config["STORE_TMP_IMPL"] = self.store_tmp_imp
app.config["STORE_IMPL"] = self.store_imp
app.config["DISCOVERY_RECORDS_PER_FILE"] = self.discovery_records_per_file
shutil.rmtree(rel2abs(__file__, "..", "tmp"))
app.config["STORE_LOCAL_DIR"] = self.store_local_dir
app.config["STORE_TMP_DIR"] = self.store_tmp_dir
models.cache.Cache = self.cache
super(TestPublicDataDumpTask, self).tearDown()
@parameterized.expand(load_cases)
def test_public_data_dump(self, name, kwargs):
clean_arg = kwargs.get("clean")
prune_arg = kwargs.get("prune")
types_arg = kwargs.get("types")
journals_arg = kwargs.get("journals")
articles_arg = kwargs.get("articles")
batch_size_arg = kwargs.get("batch_size")
tmp_write_arg = kwargs.get("tmp_write")
store_write_arg = kwargs.get("store_write")
status_arg = kwargs.get("status")
###############################################
## set up
clean = True if clean_arg == "yes" else False if clean_arg == "no" else None
prune = True if prune_arg == "yes" else False if prune_arg == "no" else None
types = types_arg if types_arg != "-" else None
journal_count = int(journals_arg)
article_count = int(articles_arg)
batch_size = int(batch_size_arg)
journal_file_count = 0 if journal_count == 0 else (journal_count / batch_size) + 1
article_file_count = 0 if article_count == 0 else (article_count / batch_size) + 1
first_article_file_records = 0 if article_count == 0 else batch_size if article_count > batch_size else article_count
first_journal_file_records = 0 if journal_count == 0 else batch_size if journal_count > batch_size else journal_count
# add the data to the index first, to maximise the time it has to become available for search
sources = JournalFixtureFactory.make_many_journal_sources(journal_count, in_doaj=True)
jids = []
for i in range(len(sources)):
source = sources[i]
journal = models.Journal(**source)
journal.save()
jids.append((journal.id, journal.last_updated))
aids = []
for i in range(article_count):
source = ArticleFixtureFactory.make_article_source(
eissn="{x}000-0000".format(x=i),
pissn="0000-{x}000".format(x=i),
with_id=False,
doi="10.123/{x}".format(x=i),
fulltext="http://example.com/{x}".format(x=i)
)
article = models.Article(**source)
article.save()
aids.append((article.id, article.last_updated))
# construct some test data in the local store
container_id = app.config["STORE_PUBLIC_DATA_DUMP_CONTAINER"]
localStore = store.StoreLocal(None)
localStoreFiles = []
if clean or prune:
for i in range(5):
localStore.store(container_id, "doaj_article_data_2018-01-0" + str(i) + ".tar.gz",
source_stream=StringIO("test"))
localStore.store(container_id, "doaj_journal_data_2018-01-0" + str(i) + ".tar.gz",
source_stream=StringIO("test"))
localStoreFiles = localStore.list(container_id)
app.config["DISCOVERY_RECORDS_PER_FILE"] = batch_size
# set the mocks for store write failures
if tmp_write_arg == "fail":
app.config["STORE_TMP_IMPL"] = StoreMockFactory.no_writes_classpath()
if store_write_arg == "fail":
app.config["STORE_IMPL"] = StoreMockFactory.no_writes_classpath()
# block until all the records are saved
for jid, lu in jids:
models.Journal.block(jid, lu, sleep=0.05)
for aid, lu in aids:
models.Article.block(aid, lu, sleep=0.05)
###########################################################
# Execution
job = PublicDataDumpBackgroundTask.prepare("testuser", clean=clean, prune=prune, types=types)
task = PublicDataDumpBackgroundTask(job)
BackgroundApi.execute(task)
# make sure we have a fresh copy of the job
job = task.background_job
assert job.status == status_arg
if job.status != "error":
article_url = models.cache.Cache.get_public_data_dump().get("article", {}).get("url")
if types_arg in ["-", "all", "article"]:
assert article_url is not None
else:
assert article_url is None
journal_url = models.cache.Cache.get_public_data_dump().get("journal", {}).get("url")
if types_arg in ["-", "all", "journal"]:
assert journal_url is not None
else:
assert journal_url is None
assert localStore.exists(container_id)
files = localStore.list(container_id)
if types_arg in ["-", "all"]:
assert len(files) == 2
else:
assert len(files) == 1
day_at_start = dates.today()
if types_arg in ["-", "all", "article"]:
article_file = "doaj_article_data_" + day_at_start + ".tar.gz"
assert article_file in files
stream = localStore.get(container_id, article_file)
tarball = tarfile.open(fileobj=stream, mode="r:gz")
members = tarball.getmembers()
assert len(members) == article_file_count
if len(members) > 0:
f = tarball.extractfile(members[0])
data = json.loads(f.read())
assert len(data) == first_article_file_records
record = data[0]
for key in record.keys():
assert key in ["admin", "bibjson", "id", "last_updated", "created_date"]
if "admin" in record:
for key in record["admin"].keys():
assert key in ["ticked", "seal"]
if types_arg in ["-", "all", "journal"]:
journal_file = "doaj_journal_data_" + day_at_start + ".tar.gz"
assert journal_file in files
stream = localStore.get(container_id, journal_file)
tarball = tarfile.open(fileobj=stream, mode="r:gz")
members = tarball.getmembers()
assert len(members) == journal_file_count
if len(members) > 0:
f = tarball.extractfile(members[0])
data = json.loads(f.read())
assert len(data) == first_journal_file_records
record = data[0]
for key in record.keys():
assert key in ["admin", "bibjson", "id", "last_updated", "created_date"]
if "admin" in record:
for key in record["admin"].keys():
assert key in ["ticked", "seal"]
else:
# in the case of an error, we expect the tmp store to have been cleaned up
tmpStore = store.TempStore()
assert not tmpStore.exists(container_id)
# in the case of an error, we expect the main store not to have been touched
# (for the errors that we are checking for)
if prune and not clean:
# no matter what the error, if we didn't specify clean then we expect everything
# to survive
survived = localStore.list(container_id)
assert localStoreFiles == survived
elif clean:
# if we specified clean, then it's possible the main store was cleaned before the
# error occurred, in which case it depends on the error. This reminds us that
# clean shouldn't be used in production
if tmp_write_arg == "fail":
assert not localStore.exists(container_id)
else:
survived = localStore.list(container_id)
assert localStoreFiles == survived
else:
# otherwise, we expect the main store to have survived
assert not localStore.exists(container_id)
|
nilq/baby-python
|
python
|
from flask import Flask, jsonify, request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def hello_world():
#return 'Hello, World!'
response = ""
term = request.args['term']
if term:
items = [ "c++", "java", "php", "coldfusion", "javascript", "asp", "ruby", "perl", "ocaml", "haskell", "rust", "go" ]
response = jsonify([item for item in items if item.startswith(term)])
response.headers.add('Access-Control-Allow-Origin', '*')
return response
|
nilq/baby-python
|
python
|
"""Flsqls module."""
from pineboolib.core import decorators
from pineboolib.core.utils import utils_base
from pineboolib.application.metadata import pntablemetadata
from pineboolib import logging
from pineboolib.fllegacy import flutil
from pineboolib.interfaces import isqldriver
from sqlalchemy.orm import sessionmaker # type: ignore [import] # noqa: F821
from typing import Optional, Union, List, Any, TYPE_CHECKING
if TYPE_CHECKING:
from sqlalchemy.engine import ( # type: ignore [import] # noqa: F401, F821
base, # noqa: F401
result, # noqa: F401
) # noqa: F401 # pragma: no cover
from pineboolib.interfaces import isession
LOGGER = logging.get_logger(__name__)
class FLPYMSSQL(isqldriver.ISqlDriver):
"""FLPYMSSQL class."""
def __init__(self):
"""Inicialize."""
super().__init__()
self.version_ = "0.9"
self.name_ = "FLPYMSSQL"
self.error_list = []
self.alias_ = "SQL Server (PYMSSQL)"
self.default_port = 1433
self.savepoint_command = "SAVE TRANSACTION"
self.rollback_savepoint_command = "ROLLBACK TRANSACTION"
self.commit_transaction_command = "COMMIT"
self._like_true = "1"
self._like_false = "0"
self._safe_load = {"pymssql": "pymssql", "sqlalchemy": "sqlAlchemy"}
self._database_not_found_keywords = ["20018"]
self._text_like = ""
self._sqlalchemy_name = "mssql+pymssql"
self._create_isolation = False
def getAlternativeConn(self, name: str, host: str, port: int, usern: str, passw_: str) -> Any:
"""Return connection."""
self._queqe_params["connect_args"] = {"autocommit": True}
conn_ = self.getConn("master", host, port, usern, passw_)
del self._queqe_params["connect_args"]
# conn_.execute("set transaction isolation level read uncommitted;")
return conn_
def session(self) -> "isession.PinebooSession":
"""Create a sqlAlchemy session."""
while True:
session_class = sessionmaker(bind=self.connection(), autoflush=False, autocommit=True)
new_session = session_class()
if new_session.connection().connection is not None:
break
else:
LOGGER.warning("Conexión invalida capturada.Solicitando nueva")
setattr(new_session, "_conn_name", self.db_._name)
session_key = utils_base.session_id(self.db_._name, True)
self.db_._conn_manager._thread_sessions[session_key] = new_session
return new_session
def existsTable(self, table_name: str) -> bool:
"""Return if exists a table specified by name."""
sql = (
"SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE "
+ "TABLE_NAME = N'%s' AND TABLE_CATALOG = '%s'" % (table_name, self._dbname)
)
cur = self.execute_query(sql)
return True if cur and cur.fetchone() else False
def nextSerialVal(self, table_name: str, field_name: str) -> int:
"""Return next serial value."""
if self.is_open():
cur = self.execute_query("SELECT NEXT VALUE FOR %s_%s_seq" % (table_name, field_name))
if cur and cur.returns_rows:
return cur.fetchone()[0] # type: ignore [index] # noqa: F821
LOGGER.warning("not exec sequence")
return 0
def releaseSavePoint(self, num: int) -> bool:
"""Set release savepoint."""
return True
def setType(self, type_: str, leng: int = 0) -> str:
"""Return type definition."""
type_ = type_.lower()
res_ = ""
if type_ in ("int", "serial"):
res_ = "INT"
elif type_ == "uint":
res_ = "BIGINT"
elif type_ in ("bool", "unlock"):
res_ = "BIT"
elif type_ == "double":
res_ = "DECIMAL"
elif type_ == "time":
res_ = "TIME"
elif type_ == "date":
res_ = "DATE"
elif type_ in ("pixmap", "stringlist"):
res_ = "TEXT"
elif type_ == "string":
res_ = "VARCHAR"
elif type_ == "bytearray":
res_ = "NVARCHAR"
elif type_ == "timestamp":
res_ = "DATETIME2"
elif type_ == "json":
res_ = "NVARCHAR"
else:
LOGGER.warning("seType: unknown type %s", type_)
leng = 0
return "%s(%s)" % (res_, leng) if leng else res_
def sqlCreateTable(
self, tmd: "pntablemetadata.PNTableMetaData", create_index: bool = True
) -> Optional[str]:
"""Return a create table query."""
if tmd.isQuery():
return self.sqlCreateView(tmd)
util = flutil.FLUtil()
primary_key = ""
sql = "CREATE TABLE %s (" % tmd.name()
seq = None
field_list = tmd.fieldList()
unlocks = 0
for number, field in enumerate(field_list):
sql += field.name()
type_ = field.type()
if type_ == "serial":
seq = "%s_%s_seq" % (tmd.name(), field.name())
if self.is_open() and create_index:
try:
self.execute_query("CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1" % seq)
except Exception as error:
LOGGER.error("%s::sqlCreateTable:%s", __name__, str(error))
sql += " INT"
elif type_ == "double":
sql += " DECIMAL(%s,%s)" % (
int(field.partInteger()) + int(field.partDecimal()),
int(field.partDecimal()),
)
else:
if type_ == "unlock":
unlocks += 1
if unlocks > 1:
LOGGER.warning(
u"FLManager : No se ha podido crear la tabla %s ", tmd.name()
)
LOGGER.warning(
u"FLManager : Hay mas de un campo tipo unlock. Solo puede haber uno."
)
return None
sql += " %s" % self.setType(type_, field.length())
if field.isPrimaryKey():
if not primary_key:
sql = sql + " PRIMARY KEY"
primary_key = field.name()
else:
LOGGER.warning(
util.translate(
"application",
"FLManager : Tabla-> %s ." % tmd.name()
+ "Se ha intentado poner una segunda clave primaria para el campo %s ,pero el campo %s ya es clave primaria."
% (primary_key, field.name())
+ "Sólo puede existir una clave primaria en FLTableMetaData, use FLCompoundKey para crear claves compuestas.",
)
)
raise Exception(
"A primary key (%s) has been defined before the field %s.%s -> %s"
% (primary_key, tmd.name(), field.name(), sql)
)
else:
sql += " UNIQUE" if field.isUnique() else ""
sql += " NULL" if field.allowNull() else " NOT NULL"
if number != len(field_list) - 1:
sql += ","
sql += ")"
return sql
def decodeSqlType(self, type_: Union[int, str]) -> str:
"""Return the specific field type."""
ret = str(type_).lower()
if type_ == "bit":
ret = "bool"
elif type_ == "bigint":
ret = "uint"
elif type_ == "decimal":
ret = "double"
elif type_ == "date":
ret = "date"
elif type_ == "time":
ret = "time"
elif type_ == "varchar":
ret = "string"
elif type_ == "nvarchar":
ret = "bytearray"
elif type_ == "text":
ret = "stringlist"
elif type_ == "datetime2":
ret = "timestamp"
elif type_ == "json":
ret = "json"
return ret
def tables(self, type_name: str = "", table_name: str = "") -> List[str]:
"""Return a tables list specified by type."""
table_list: List[str] = []
result_list: List[Any] = []
if self.is_open():
where: List[str] = []
if type_name in ("Tables", ""):
where.append("xtype ='U'")
if type_name in ("Views", ""):
where.append("xtype ='V'")
if type_name in ("SystemTables", ""):
where.append("xtype ='S'")
if where:
and_name = " AND name ='%s'" % (table_name) if table_name else ""
cursor = self.execute_query(
"SELECT name FROM SYSOBJECTS where %s%s ORDER BY name ASC"
% (" OR ".join(where), and_name)
)
result_list += cursor.fetchall() if cursor else []
table_list = [item[0] for item in result_list]
return table_list
def declareCursor(
self, curname: str, fields: str, table: str, where: str, conn_db: "base.Connection"
) -> Optional["result.ResultProxy"]:
"""Set a refresh query for database."""
if not self.is_open():
raise Exception("declareCursor: Database not open")
sql = "DECLARE %s CURSOR STATIC FOR SELECT %s FROM %s WHERE %s " % (
curname,
fields,
table,
where,
)
try:
conn_db.execute(sql)
conn_db.execute("OPEN %s" % curname)
except Exception as error:
LOGGER.error("refreshQuery: %s", error)
LOGGER.info("SQL: %s", sql)
LOGGER.trace("Detalle:", stack_info=True)
return None
def deleteCursor(self, cursor_name: str, cursor: Any) -> None:
"""Delete cursor."""
if not self.is_open():
raise Exception("deleteCursor: Database not open")
try:
sql_exists = "SELECT CURSOR_STATUS('global','%s')" % cursor_name
cursor.execute(sql_exists)
if cursor.fetchone()[0] < 1:
return
cursor.execute("CLOSE %s" % cursor_name)
except Exception as exception:
LOGGER.error("finRow: %s", exception)
LOGGER.warning("Detalle:", stack_info=True)
# def fix_query(self, query: str) -> str:
# """Fix string."""
# # ret_ = query.replace(";", "")
# return query
@decorators.not_implemented_warn
def alterTable(self, new_metadata: "pntablemetadata.PNTableMetaData") -> bool:
"""Modify a table structure."""
return True
def recordInfo2(self, tablename: str) -> List[List[Any]]:
"""Return info from a database table."""
info = []
sql = (
"SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_DEFAULT, NUMERIC_PRECISION_RADIX,"
+ " CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '%s'"
% tablename.lower()
)
data = self.execute_query(sql)
res = data.fetchall() if data else []
for columns in res:
field_size = int(columns[5]) if columns[5] else 0
# field_precision = columns[4] or 0
field_name = columns[0]
field_type = self.decodeSqlType(columns[1])
field_allow_null = columns[2] == "YES"
field_default_value = columns[3]
info.append(
[
field_name,
field_type,
not field_allow_null,
field_size,
None,
field_default_value,
None, # field_pk
]
)
return info
def vacuum(self) -> None:
"""Vacuum tables."""
return
def sqlLength(self, field_name: str, size: int) -> str:
"""Return length formated."""
return "LEN(%s)=%s" % (field_name, size)
|
nilq/baby-python
|
python
|
"""
Copyright 2021 Gabriele Pisciotta - ga.pisciotta@gmail.com
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted,
provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
OF THIS SOFTWARE.
"""
__author__ = "Gabriele Pisciotta"
import networkx as nx
from oc_ocdm import Storer
from oc_ocdm.graph import GraphSet
from oc_ocdm.graph.entities.bibliographic.agent_role import AgentRole
from oc_ocdm.graph.entities.bibliographic.bibliographic_resource import BibliographicResource
from oc_ocdm.graph.entities.bibliographic.responsible_agent import ResponsibleAgent
from oc_ocdm.graph.graph_entity import GraphEntity
from oc_ocdm.prov import ProvSet
from rdflib import URIRef
class InstanceMatching:
def __init__(self, g_set: GraphSet,
graph_filename="matched.rdf",
provenance_filename="provenance.rdf",
resp_agent='https://w3id.org/oc/meta/prov/pa/4',
debug=False):
self.g_set = g_set
self.graph_filename = graph_filename
self.provenance_filename = provenance_filename
self.debug = debug
self.resp_agent = resp_agent
self.prov = ProvSet(self.g_set, self.resp_agent)
def match(self):
""" Start the matching process that will do, in sequence:
- match the ARs
- match the BRs
- match the IDs
In the end, this process will produce:
- `matched.rdf` that will contain the graph set specified previously without the duplicates.
- `provenance.rdf` that will contain the provenance, tracking record of all the changes done.
"""
self.instance_matching_ar()
self.instance_matching_br()
self.instance_matching_ids()
self.save()
return self.g_set
def save(self):
""" Serialize the graph set into the specified RDF file,
and the provenance in another specified RDF file.
"""
gs_storer = Storer(self.g_set, output_format="nt11")
gs_storer.store_graphs_in_file(self.graph_filename, "")
prov_storer = Storer(self.prov, output_format="nquads")
prov_storer.store_graphs_in_file(self.provenance_filename, "")
def instance_matching_ar(self):
""" Discover all the ARs that share the same identifier's literal, creating a graph of them.
Then merge each connected component (cluster of ARs linked by the same identifier) into one.
For each couple of AR that are going to be merged, substitute the references of the AR that
will no longer exist, by removing the AR from each of its referred BR and add, instead, the merged one)
If the RA linked by the AR that will no longer exist is not linked by any other AR, then
it will be marked as to be deleted, otherwise not.
In the end, generate the provenance and commit pending changes in the graph set"""
merge_graph: nx.Graph = nx.Graph()
associated_ar_ra = self.__get_association_ar_ra()
associated_ar_br = self.__get_association_ar_br()
identifiers = {}
for ar in self.g_set.get_ar():
role = ar.get_role_type()
# Extract Authors and Publishers, with their info and their identifiers
if role == GraphEntity.iri_author or role == GraphEntity.iri_publisher:
for i in ar.get_identifiers():
if identifiers.get(i.get_scheme()) is None:
identifiers[i.get_scheme()] = {}
ra_first: ResponsibleAgent = identifiers[i.get_scheme()].get(i.get_literal_value())
if ra_first is None:
identifiers[i.get_scheme()][i.get_literal_value()] = ar
else:
merge_graph.add_edge(ra_first, ar)
if self.debug:
print("[IM-RA] Will merge {} and {} due to {}:{} in common".format(ar.res,
ra_first.res,
i.get_scheme().split(
"/")[-1],
i.get_literal_value()))
# Get the connected components of the graph (clusters of "to-be-merged"):
clusters = sorted(nx.connected_components(merge_graph), key=len, reverse=True)
print("[IM-RA] N° of clusters: {}".format(len(clusters)))
for n, cluster in enumerate(clusters):
clusters_dict = {}
clusters_str_list = []
for k in cluster:
clusters_dict[str(k)] = k
clusters_str_list.append(str(k))
clusters_str_list.sort()
entity_first: AgentRole = clusters_dict[clusters_str_list[0]]
if self.debug:
print("[IM-RA] Merging cluster #{}, with {} entities".format(n, len(cluster)))
for entity in clusters_str_list[1:]:
other_entity = clusters_dict[entity]
if self.debug:
print(f"\tMerging agent role {entity} in agent role {entity_first}")
# The other entity has been merged in the first entity: at this point we need to change all the
# occurrencies of the other entity with the first entity by looking at all the BRs referred
if associated_ar_br.get(other_entity) is not None:
for other_br in associated_ar_br.get(other_entity):
other_br.remove_contributor(other_entity)
other_br.has_contributor(entity_first)
if self.debug:
print(f"\tUnset {other_entity} as contributor of {other_br}")
print(f"\tSet {entity_first} as contributor of {other_br} ")
ra_to_delete = entity_first.get_is_held_by()
entity_first.merge(other_entity)
if entity_first.get_is_held_by() != ra_to_delete:
if associated_ar_ra.get(ra_to_delete) is not None and len(associated_ar_ra.get(ra_to_delete)) == 1:
ra_to_delete.mark_as_to_be_deleted()
else:
other_entity.mark_as_to_be_deleted(False)
other_entity.mark_as_to_be_deleted()
if self.debug:
print(f"\tMarking to delete: {other_entity} ")
self.prov.generate_provenance()
self.g_set.commit_changes()
def instance_matching_br(self):
""" Discover all the BRs that share the same identifier's literal, creating a graph of them.
Then merge each connected component (cluster of Be RA associated to the Rs linked by the same identifier) into one.
For each couple of BR that are going to be merged, merge also:
- their containers by matching the proper type (issue of BR1 -> issue of BR2)
- their publisher
NB: when two BRs are merged, you'll have the union of their ARs. You could have duplicates if the duplicates
don't have any ID in common or if the method `instance_matching_ar` wasn't called before.
In the end, generate the provenance and commit pending changes in the graph set"""
merge_graph: nx.Graph = nx.Graph()
identifiers = {}
for br in self.g_set.get_br():
for i in br.get_identifiers():
if identifiers.get(i.get_scheme()) is None:
identifiers[i.get_scheme()] = {}
br_first: BibliographicResource = identifiers[i.get_scheme()].get(i.get_literal_value())
if br_first is None:
identifiers[i.get_scheme()][i.get_literal_value()] = br
else:
merge_graph.add_edge(br_first, br)
if self.debug:
print("[IM-BR] Will merge {} into {} due to {}:{} in common".format(br.res,
br_first.res,
i.get_scheme().split("/")[
-1],
i.get_literal_value()))
# Get the connected components of the graph (clusters of "to-be-merge"):
clusters = sorted(nx.connected_components(merge_graph), key=len, reverse=True)
print("[IM-BR] N° of clusters: {}".format(len(clusters)))
for n, cluster in enumerate(clusters):
clusters_dict = {}
clusters_str_list = []
for k in cluster:
clusters_dict[str(k)] = k
clusters_str_list.append(str(k))
clusters_str_list.sort()
entity_first: BibliographicResource = clusters_dict[clusters_str_list[0]]
publisher_first: ResponsibleAgent = self.__get_publisher(entity_first)
entity_first_partofs = self.__get_part_of(entity_first)
if self.debug:
print("[IM-BR] Merging cluster #{}, with {} entities".format(n, len(cluster)))
entity: BibliographicResource
for entity in clusters_str_list[1:]:
entity = clusters_dict[entity]
# Merge containers
partofs = self.__get_part_of(entity)
p1: BibliographicResource;
p2: BibliographicResource
for p1 in entity_first_partofs:
p1types = p1.get_types()
p1types.remove(URIRef('http://purl.org/spar/fabio/Expression'))
for p2 in partofs:
p2types = p2.get_types()
p2types.remove(URIRef('http://purl.org/spar/fabio/Expression'))
intersection_of_types = set(p2types).intersection(set(p1types))
if intersection_of_types is not None and len(intersection_of_types) != 0:
p1.merge(p2)
if self.debug:
print(f"\tMerging container {p2} in container {p1} ({intersection_of_types})")
# Merge publisher
publisher = self.__get_publisher(entity)
if publisher is not None and publisher_first is not None and publisher != publisher_first:
publisher_first.merge(publisher)
if self.debug:
print(f"\tMerging publisher {publisher} in publisher {publisher_first}")
# Merge authors
# contributors = entity.get_contributors()
# Merging the two BRs
entity_first.merge(entity)
# for ar in contributors:
# print(f"\tRemoving agent role {ar} from bibliographic resource {entity_first}")
# entity_first.remove_contributor(ar)
self.prov.generate_provenance()
self.g_set.commit_changes()
def instance_matching_ids(self):
""" Discover all the IDs that share the same schema and literal, then merge all into one
and substitute all the reference with the merged one.
In the end, generate the provenance and commit pending changes in the graph set"""
literal_to_id = {}
id_to_resources = {}
entities = list(self.g_set.get_br())
entities.extend(list(self.g_set.get_ar()))
for e in entities:
for i in e.get_identifiers():
literal = i.get_scheme() + "#" + i.get_literal_value()
if i in id_to_resources:
id_to_resources[i].append(e)
else:
id_to_resources[i] = [e]
if literal in literal_to_id:
literal_to_id[literal].append(i)
else:
literal_to_id[literal] = [i]
for k, v in literal_to_id.items():
if len(v) > 1:
schema, lit = k.split('#')
print(
f"[IM-ID] Will merge {len(v) - 1} identifiers into {v[0]} because they share literal {lit} and schema {schema}")
for actual_id in v[1:]:
v[0].merge(actual_id)
entities = id_to_resources[actual_id]
# Remove, from all the entities, the ID that has been merged
# Setting, instead, the merged one as new ID
for e in entities:
e.remove_identifier(actual_id)
if v[0] not in e.get_identifiers():
e.has_identifier(v[0])
actual_id.mark_as_to_be_deleted()
self.prov.generate_provenance()
self.g_set.commit_changes()
@staticmethod
def __get_part_of(br):
""" Given a BR in input (e.g.: a journal article), walk the full 'part-of' chain.
Returns a list of BR that are the hierarchy of of containers (e.g: given an article-> [issue, journal])"""
partofs = []
e = br
ended = False
while not ended:
partof = e.get_is_part_of()
if partof is not None:
partofs.append(partof)
e = partof
else:
ended = True
return partofs
@staticmethod
def __get_publisher(br):
""" Given a BR as input, returns the AR that is a publisher """
for ar in br.get_contributors():
role = ar.get_role_type()
if role == GraphEntity.iri_publisher:
return ar
def __get_association_ar_ra(self):
""" Returns the dictionary:
key-> RA
value-> list of AR
This let you take all the ARs associated to the same RA
"""
association = {}
for ar in self.g_set.get_ar():
if ar.get_is_held_by() is not None and ar.get_is_held_by() not in association:
association[ar.get_is_held_by()] = [ar]
elif ar.get_is_held_by() is not None and ar.get_is_held_by() in association:
association[ar.get_is_held_by()].append(ar)
return association
def __get_association_ar_br(self):
""" Returns the dictionary:
key-> AR
value-> list of BR
This let you take all the BRs associated to the same AR
"""
association = {}
for br in self.g_set.get_br():
for ar in br.get_contributors():
if ar.get_is_held_by() is not None and ar not in association:
association[ar] = [br]
elif ar.get_is_held_by() is not None and ar in association:
association[ar].append(br)
return association
|
nilq/baby-python
|
python
|
import re
from .models import Profile, Link
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.module_loading import import_string
from django.contrib.sites.shortcuts import get_current_site
from django.contrib.auth.models import User
from django.contrib.auth import get_user_model
from django.template.defaultfilters import filesizeformat
from rest_framework import serializers
from rest_framework import serializers, exceptions
from rest_auth.registration.serializers import RegisterSerializer as RS
from rest_auth.serializers import LoginSerializer as LS
from rest_auth.models import TokenModel
from avatar.models import Avatar
from avatar.signals import avatar_updated
from allauth.account.forms import ResetPasswordForm, default_token_generator
from allauth.account.utils import send_email_confirmation, user_pk_to_url_str
from allauth.account.forms import UserTokenForm
from allauth.account.adapter import get_adapter
from allauth.utils import email_address_exists
from allauth.account.models import EmailAddress
from allauth.account import app_settings as allauth_settings
from allauth.account.utils import setup_user_email
UserModel = get_user_model()
class UserSocialLinksSerializer(serializers.ModelSerializer):
class Meta:
model = Link
fields = ('facebook', 'twitter', 'youtube', 'instagram')
class ProfileSerializer(serializers.ModelSerializer):
"""a serializer for our user profile objects"""
link = UserSocialLinksSerializer(read_only=True)
class Meta:
model = Profile
fields = ( 'first_name','last_name','displayed_name','bio', 'location', 'birth_date','link')
extra_kwargs = {
'first_name':{'write_only':True},
'last_name':{'write_only':True},
'displayed_name':{'read_only':True}}
def validate(self, data):
pattern = "^[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ðء-ي]+[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ðء-ي]+[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ðء-ي]*$"
compiler = re.compile(pattern)
if not compiler.match(data["first_name"]):
raise serializers.ValidationError(
_("Make sure it contains only letters."))
if not compiler.match(data["last_name"]):
raise serializers.ValidationError(
_("Make sure it contains only letters."))
return data
class DisplayUserName(serializers.ModelSerializer):
display_name = serializers.ReadOnlyField(source='displayed_name')
class Meta:
model = Profile
fields = ('display_name',)
class UserSerializer(serializers.ModelSerializer):
displayed_name = serializers.ReadOnlyField(source='profile.displayed_name')
avatar_url = serializers.SerializerMethodField()
class Meta:
model = User
fields = ('username', 'email', 'displayed_name', 'avatar_url')
#extra_kwargs = {'password': {'write_only': True}}
def get_avatar_url(self, obj, size=settings.AVATAR_DEFAULT_SIZE):
for provider_path in settings.AVATAR_PROVIDERS:
provider = import_string(provider_path)
avatar_url = provider.get_avatar_url(obj, size)
if avatar_url:
return self.context['request'].build_absolute_uri(avatar_url)
class RegisterSerializer(serializers.Serializer):
email = serializers.EmailField(required=allauth_settings.EMAIL_REQUIRED)
password1 = serializers.CharField(required=True, write_only=True)
password2 = serializers.CharField(required=True, write_only=True)
def validate_email(self, email):
email = get_adapter().clean_email(email)
if allauth_settings.UNIQUE_EMAIL:
if email and email_address_exists(email):
raise serializers.ValidationError(
_("A user is already registered with this e-mail address."))
return email
def validate_password1(self, password):
return get_adapter().clean_password(password)
def validate(self, data):
if data['password1'] != data['password2']:
raise serializers.ValidationError(
_("The two password fields didn't match."))
return data
def get_cleaned_data(self):
return {
'password1': self.validated_data.get('password1', ''),
'email': self.validated_data.get('email', ''),
}
def save(self, request):
adapter = get_adapter()
user = adapter.new_user(request)
self.cleaned_data = self.get_cleaned_data()
adapter.save_user(request, user, self)
setup_user_email(request, user, [])
user.profile.save()
return user
class LoginSerializer(LS):
def validate(self, attrs):
username = attrs.get('username')
email = attrs.get('email')
password = attrs.get('password')
user = self._validate_username_email(username, email, password)
# Did we get back an active user?
if user:
if not user.is_active:
msg = _('User account is disabled.')
raise exceptions.ValidationError(msg)
else:
msg = _('Unable to log in with provided credentials.')
raise exceptions.ValidationError(msg)
# If required, is the email verified?
email_address = user.emailaddress_set.get(email=user.email)
if not email_address.verified:
pass
#raise exceptions.PermissionDenied('not verified')
attrs['user'] = user
return attrs
class PasswordResetSerializer(serializers.Serializer):
email = serializers.EmailField()
def validate_email(self, email):
email = get_adapter().clean_email(email)
if not email_address_exists(email):
raise serializers.ValidationError(_("The e-mail address is not assigned "
"to any user account"))
return email
def save(self, *args, **kwargs):
request = self.context.get('request')
current_site = get_current_site(request)
email = self.validated_data["email"]
user = UserModel.objects.get(email__iexact=email)
token_generator = kwargs.get("token_generator", default_token_generator)
temp_key = token_generator.make_token(user)
path = "/reset-password/{}/{}".format(user_pk_to_url_str(user), temp_key)
url = request.build_absolute_uri(path)
context = {"current_site": current_site,
"user": user,
"password_reset_url": url,
"request": request}
get_adapter().send_mail(
'account/email/password_reset_key',
email,
context)
return email
class PasswordResetConfirmSerializer(serializers.Serializer):
new_password1 = serializers.CharField(max_length=128)
new_password2 = serializers.CharField(max_length=128)
uid = serializers.CharField()
key = serializers.CharField()
def validate_new_password1(self, password):
return get_adapter().clean_password(password)
def validate(self, attrs):
self.user_token_form = UserTokenForm(data={'uidb36': attrs['uid'], 'key': attrs['key']})
if not self.user_token_form.is_valid():
raise serializers.ValidationError(_("Invalid Token"))
if attrs['new_password1'] != attrs['new_password2']:
raise serializers.ValidationError(_("The two password fields didn't match."))
self.password = attrs['new_password1']
return attrs
def save(self):
user = self.user_token_form.reset_user
get_adapter().set_password(user, self.password)
return user
class ResendConfirmSerializer(serializers.Serializer):
email = serializers.EmailField()
password_reset_form_class = ResetPasswordForm
def validate(self, attrs):
self.reset_form = self.password_reset_form_class(
data=self.initial_data)
if not self.reset_form.is_valid():
raise serializers.ValidationError(self.reset_form.errors)
return attrs
def save(self):
request = self.context.get('request')
User = get_user_model()
email = self.reset_form.cleaned_data["email"]
user = User.objects.get(email__iexact=email)
send_email_confirmation(request, user, True)
return email
from posts.serializers import PostSerializer
class UserDetailsSerializer(serializers.ModelSerializer):
email_status = serializers.SerializerMethodField()
avatar_url = serializers.SerializerMethodField()
profile = ProfileSerializer()
avatar = serializers.ImageField(write_only=True, required=False)
class Meta:
model = UserModel
fields = ('username', 'email', 'email_status', 'profile', 'avatar', 'avatar_url')
def get_email_status(self, obj):
email_address = EmailAddress.objects.get(user=obj)
return email_address.verified
def get_avatar_url(self, obj, size=settings.AVATAR_DEFAULT_SIZE):
for provider_path in settings.AVATAR_PROVIDERS:
provider = import_string(provider_path)
avatar_url = provider.get_avatar_url(obj, size)
if avatar_url:
return avatar_url
def validate_name(self, name):
pattern = "^[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ðء-ي]+[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ðء-ي]+[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ðء-ي]*$"
compiler = re.compile(pattern)
if not compiler.match(name):
raise serializers.ValidationError(
_("Make sure it contains only letters and spaces."))
return name
def validate_avatar(self, avatar):
if settings.AVATAR_ALLOWED_FILE_EXTS:
root, ext = os.path.splitext(avatar.name.lower())
if ext not in settings.AVATAR_ALLOWED_FILE_EXTS:
valid_exts = ", ".join(settings.AVATAR_ALLOWED_FILE_EXTS)
error = _("%(ext)s is an invalid file extension. "
"Authorized extensions are : %(valid_exts_list)s")
raise serializers.ValidationError(error %
{'ext': ext,
'valid_exts_list': valid_exts})
if avatar.size > settings.AVATAR_MAX_SIZE:
error = _("Your file is too big: %(size)s, "
"the maximum allowed size is: %(max_valid_size)s")
raise serializers.ValidationError(error % {
'size': filesizeformat(avatar.size),
'max_valid_size': filesizeformat(settings.AVATAR_MAX_SIZE)
})
def validate_email(self, email):
email = get_adapter().clean_email(email)
if email and email_address_exists(email, exclude_user=self.context.get('request').user):
raise serializers.ValidationError(_("A user is already registered with this e-mail address."))
return email
def update(self, instance, validated_data):
request = self.context.get('request')
profile = validated_data.get('profile', None)
instance.username = validated_data.get('username', instance.username)
instance.first_name = validated_data.get(
'first_name', instance.first_name)
if profile :
bio = profile.get("bio")
location = profile.get("location")
birth_date = profile.get("birth_date")
first_name = profile.get("first_name")
last_name = profile.get("last_name")
if bio and bio != instance.profile.bio :
instance.profile.bio = bio
if location and location != instance.profile.location:
instance.profile.location = location
if birth_date and birth_date != instance.profile.birth_date:
instance.profile.birth_date = birth_date
if first_name and first_name != instance.profile.first_name:
instance.profile.first_name = first_name
if last_name and last_name != instance.profile.last_name:
instance.profile.last_name = last_name
email = validated_data.get('email', None)
if email and email != instance.email:
adapter = get_adapter()
adapter.send_mail('account/email/email_change', instance.email, {})
email_address = EmailAddress.objects.get(user=instance, verified=True)
email_address.change(request, email, True)
instance.email = email
if 'avatar' in request.FILES:
avatar = Avatar(user=instance, primary=True)
image_file = request.FILES['avatar']
avatar.avatar.save(image_file.name, image_file)
avatar.save()
avatar_updated.send(sender=Avatar, user=instance, avatar=avatar)
instance.save()
# sync_sso(instance)
return instance
class TokenSerializer(serializers.ModelSerializer):
user = UserDetailsSerializer()
class Meta:
model = TokenModel
fields = ('key', 'user')
|
nilq/baby-python
|
python
|
# Copyright (c) 2020 Graphcore Ltd. All rights reserved.
import os
from pathlib import Path
from subprocess import run
import nltk
def rebuild_custom_ops():
"""The objective of this script is to:
1.) Delete the existing custom ops if it exists
2.) Perform the make command
3.) Validate a custom_ops.so now does exist"""
model_path = Path(__file__).resolve().parent
custom_ops_path = Path(model_path, "custom_ops.so")
if custom_ops_path.exists():
print(f"\nDeleting: {custom_ops_path}")
os.remove(custom_ops_path)
print("\nBuilding Custom Ops")
run(["make"], cwd=custom_ops_path.parent)
assert custom_ops_path.exists()
def get_nltk_data():
"""Gets the NLTK data using the NLTK python module."""
nltk.download("cmudict")
def pytest_sessionstart(session):
get_nltk_data()
rebuild_custom_ops()
|
nilq/baby-python
|
python
|
from tortoise import Tortoise
from tortoise.contrib import test
from tortoise.exceptions import OperationalError, ParamsError
from tortoise.tests.testmodels import Event, EventTwo, TeamTwo, Tournament
from tortoise.transactions import in_transaction, start_transaction
class TestTwoDatabases(test.SimpleTestCase):
async def setUp(self):
if Tortoise._inited:
await self._tearDownDB()
first_db_config = test.getDBConfig(
app_label="models", modules=["tortoise.tests.testmodels"]
)
second_db_config = test.getDBConfig(
app_label="events", modules=["tortoise.tests.testmodels"]
)
merged_config = {
"connections": {**first_db_config["connections"], **second_db_config["connections"]},
"apps": {**first_db_config["apps"], **second_db_config["apps"]},
}
await Tortoise.init(merged_config, _create_db=True)
await Tortoise.generate_schemas()
self.db = Tortoise.get_connection("models")
self.second_db = Tortoise.get_connection("events")
async def tearDown(self):
await Tortoise._drop_databases()
async def test_two_databases(self):
tournament = await Tournament.create(name="Tournament")
await EventTwo.create(name="Event", tournament_id=tournament.id)
with self.assertRaises(OperationalError):
await self.db.execute_query("SELECT * FROM eventtwo")
results = await self.second_db.execute_query("SELECT * FROM eventtwo")
self.assertEqual(dict(results[0].items()), {"id": 1, "name": "Event", "tournament_id": 1})
async def test_two_databases_relation(self):
tournament = await Tournament.create(name="Tournament")
event = await EventTwo.create(name="Event", tournament_id=tournament.id)
with self.assertRaises(OperationalError):
await self.db.execute_query("SELECT * FROM eventtwo")
results = await self.second_db.execute_query("SELECT * FROM eventtwo")
self.assertEqual(dict(results[0].items()), {"id": 1, "name": "Event", "tournament_id": 1})
teams = []
for i in range(2):
team = await TeamTwo.create(name="Team {}".format(i + 1))
teams.append(team)
await event.participants.add(team)
self.assertEqual(await TeamTwo.all().order_by("name"), teams)
self.assertEqual(await event.participants.all().order_by("name"), teams)
self.assertEqual(
await TeamTwo.all().order_by("name").values("id", "name"),
[{"id": 1, "name": "Team 1"}, {"id": 2, "name": "Team 2"}],
)
self.assertEqual(
await event.participants.all().order_by("name").values("id", "name"),
[{"id": 1, "name": "Team 1"}, {"id": 2, "name": "Team 2"}],
)
async def test_two_databases_transactions_switch_db(self):
async with in_transaction("models"):
tournament = await Tournament.create(name="Tournament")
await Event.create(name="Event1", tournament=tournament)
async with in_transaction("events"):
event = await EventTwo.create(name="Event2", tournament_id=tournament.id)
team = await TeamTwo.create(name="Team 1")
await event.participants.add(team)
saved_tournament = await Tournament.filter(name="Tournament").first()
self.assertEqual(tournament.id, saved_tournament.id)
saved_event = await EventTwo.filter(tournament_id=tournament.id).first()
self.assertEqual(event.id, saved_event.id)
async def test_two_databases_transaction_paramerror(self):
with self.assertRaisesRegex(
ParamsError,
"You are running with multiple databases, so you should specify connection_name",
):
await start_transaction()
|
nilq/baby-python
|
python
|
from Instrucciones.Instruccion import Instruccion
from Instrucciones.Declaracion import Declaracion
from Expresion.Terminal import Terminal
from Tipo import Tipo
class Procedure(Instruccion):
def __init__(self,nombre,params,instrucciones):
self.nombre=nombre
self.params=params
self.instrucciones=instrucciones
def ejecutar(self, ent):
''
def traducir(self,ent):
'traduccion proc'
nl=ent.newlabel()
cad='goto ' + nl+'\n'
cad+='label '+ent.newlabel('p_'+self.nombre)+'\n'
cont=0
lenparams=0
if self.params != None:
lenparams=len(self.params)
for i in range(0,lenparams):
val='stack['+str(i)+']'
term=Terminal(Tipo('staesqck',None,-1,-1),val)
d=Declaracion(self.params[i].nombre,False,self.params[i].tipo,term)
c3d=d.traducir(ent).codigo3d
cad+=c3d
cont=i
if self.instrucciones!=None:
for inst in self.instrucciones:
if inst !=None:
c3d= inst.traducir(ent).codigo3d
cad+=c3d
cad+='stack=[]\n'
cad+='goto temp\n'
cad+='label ' +nl+'\n'
self.codigo3d=cad
return self
class Parametro():
def __init__(self,nombre,modo,tipo):
self.nombre=nombre
self.modo=modo
self.tipo=tipo
|
nilq/baby-python
|
python
|
"""
This example shows how to upload a model with customized csv schedules
Put all the relevant schedules under one folder
and then add the folder directory to the add_files parameter.
"""
import BuildSimHubAPI as bshapi
import BuildSimHubAPI.postprocess as pp
bsh = bshapi.BuildSimHubAPIClient()
project_api_key = 'f98aadb3-254f-428d-a321-82a6e4b9424c'
# 1. define the absolute directory of your energy model
file_dir = '/Users/weilixu/Desktop/data/schedule/5ZoneTDV.idf'
wea_dir = "/Users/weilixu/Desktop/data/jsontest/in.epw"
new_sj = bsh.new_simulation_job(project_api_key)
results = new_sj.run(file_dir=file_dir, epw_dir=wea_dir,
add_files='/Users/weilixu/Desktop/data/schedule/csv', track=True)
if results:
load_data = results.zone_load()
print(load_data)
zl = pp.ZoneLoad(load_data)
print(zl.pandas_df())
|
nilq/baby-python
|
python
|
# Generated by Django 3.2.11 on 2022-01-12 08:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('index', '0004_auto_20201221_1213'),
]
operations = [
migrations.AlterField(
model_name='broadcast',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='feedbackreport',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='term',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='track',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='useracceptedterms',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='usermeta',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
|
nilq/baby-python
|
python
|
from pyrosm.data_manager import get_osm_data
from pyrosm.frames import prepare_geodataframe
import warnings
def get_network_data(node_coordinates, way_records, tags_as_columns,
network_filter, bounding_box):
# Tags to keep as separate columns
tags_as_columns += ["id", "nodes", "timestamp", "changeset", "version"]
# Call signature for fetching network data
nodes, ways, relation_ways, relations = get_osm_data(node_arrays=None,
way_records=way_records,
relations=None,
tags_as_columns=tags_as_columns,
data_filter=network_filter,
filter_type="exclude",
# Keep only records having 'highway' tag
osm_keys="highway",
)
# If there weren't any data, return empty GeoDataFrame
if ways is None:
warnings.warn("Could not find any buildings for given area.",
UserWarning,
stacklevel=2)
return None
# Prepare GeoDataFrame
gdf = prepare_geodataframe(nodes, node_coordinates, ways,
relations, relation_ways,
tags_as_columns, bounding_box)
return gdf
|
nilq/baby-python
|
python
|
class UnionFind(object):
def __init__(self, n):
self.u = list(range(n))
def union(self, a, b):
ra, rb = self.find(a), self.find(b)
if ra != rb:
self.u[ra] = rb
def find(self, a):
while self.u[a] != a:
a = self.u[a]
return a
class Solution(object):
def findCircleNum(self, M):
if not M:
return 0
s = len(M)
uf = UnionFind(s)
for r in range(s):
for c in range(r, s):
if M[r][c] == 1:
uf.union(r, c)
return len(set([uf.find(i) for i in range(s)]))
'''
Ideas/thoughts:
sanity check is ,if len is empty, just return 0 friend groups.
Iterating thru the each person frnd list, go to check upto len of frnd list , It would be a square matrix.
The two important functions union and find, union will add elements and find will check element.
'''
|
nilq/baby-python
|
python
|
for _ in range(int(input())):
n = int(input())
s = input()
alpha = set(s)
ans = n
countImpossible = 0
for i in alpha:
curr = 0
lb, ub = 0, n - 1
while lb < ub:
if s[lb] == s[ub]:
lb += 1
ub -= 1
continue
else:
if s[lb] == i:
lb += 1
curr += 1
continue
elif s[ub] == i:
ub -= 1
curr += 1
continue
else:
curr = n + 1
lb += 1
ub -= 1
continue
dup = s
dup = dup.replace(i, '')
if dup != dup[::-1]:
countImpossible += 1
ans = min(ans, curr)
if countImpossible == len(alpha):
ans = -1
print(ans)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# Copyright (c) 2020, Xiaotian Derrick Yang
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
"""Package build and install script."""
from setuptools import find_packages, setup
def get_readme():
"""Load README.rst for display on PyPI."""
with open("README.md") as f:
return f.read()
setup(
name="quanbit",
version="0.0.1",
description="Python library for simulating quantum computor and algorithm.",
long_description=get_readme(),
long_description_content_type="text/markdown",
author="Xiaotian Derrick Yang",
author_email="yxt1991@gmail.com",
url="https://github.com/tczorro/quanbit",
package_dir={"": "src"},
packages=find_packages(where="src"),
zip_safe=False,
python_requires=">=3.6",
install_requires=["numpy>=1.16",],
keywords=["Quantum Computing", "Quantum Algorithm"],
classifiers=[
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering",
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Description
"""
import torch
from scipy import stats
from ptranking.metric.adhoc_metric import torch_ap_at_ks, torch_nDCG_at_ks, torch_kendall_tau, torch_nerr_at_ks
def test_ap():
''' todo-as-note: the denominator should be carefully checked when using AP@k '''
# here we assume that there five relevant documents, but the system just retrieves three of them
sys_sorted_labels = torch.Tensor([1.0, 0.0, 1.0, 0.0, 1.0])
std_sorted_labels = torch.Tensor([1.0, 1.0, 1.0, 1.0, 1.0])
ap_at_ks = torch_ap_at_ks(sys_sorted_labels, std_sorted_labels, ks=[1, 3, 5])
print(ap_at_ks) # tensor([1.0000, 0.5556, 0.4533])
sys_sorted_labels = torch.Tensor([1.0, 0.0, 1.0, 0.0, 1.0])
std_sorted_labels = torch.Tensor([1.0, 1.0, 1.0, 0.0, 0.0])
ap_at_ks = torch_ap_at_ks(sys_sorted_labels, std_sorted_labels, ks=[1, 3, 5])
print(ap_at_ks) # tensor([1.0000, 0.5556, 0.7556])
# here we assume that there four relevant documents, the system just retrieves four of them
sys_sorted_labels = torch.Tensor([1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0])
std_sorted_labels = torch.Tensor([1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0])
ap_at_ks = torch_ap_at_ks(sys_sorted_labels, std_sorted_labels, ks=[1, 2, 3, 5, 7])
print(ap_at_ks) # tensor([1.0000, 1.0000, 0.6667, 0.6875, 0.8304])
def test_ndcg():
sys_sorted_labels = torch.Tensor([1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0])
std_sorted_labels = torch.Tensor([1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0])
ndcg_at_ks = torch_nDCG_at_ks(sys_sorted_labels, std_sorted_labels, ks=[1, 2, 3, 4, 5, 6, 7])
print(ndcg_at_ks) # tensor([1.0000, 1.0000, 0.7654, 0.8048, 0.8048, 0.8048, 0.9349])
def test_nerr():
sys_sorted_labels = torch.Tensor([3.0, 2.0, 4.0])
std_sorted_labels = torch.Tensor([4.0, 3.0, 2.0])
nerr_at_ks = torch_nerr_at_ks(sys_sorted_labels, std_sorted_labels, ks=[1, 2, 3])
print(nerr_at_ks) # tensor([0.4667, 0.5154, 0.6640])
def test_kendall_tau():
reference = torch.Tensor([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
sys_1 = torch.Tensor([2.0, 1.0, 5.0, 3.0, 4.0, 6.0, 7.0, 9.0, 8.0, 10.0])
sys_2 = torch.Tensor([10.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1.0])
tau_1 = torch_kendall_tau(sys_1, natural_ascending_as_reference=True)
print('tau_1', tau_1)
tau_2 = torch_kendall_tau(sys_2, natural_ascending_as_reference=True)
print('tau_2', tau_2)
tau, p = stats.kendalltau(reference.data.data.numpy(), sys_1)
print('scipy-1', tau, p)
tau, p = stats.kendalltau(reference.data.numpy(), sys_2)
print('scipy-2', tau, p)
print()
print('-----------------------')
res_reference, _ = torch.sort(reference, dim=0, descending=True)
tau_1 = torch_kendall_tau(sys_1, natural_ascending_as_reference=False)
print('tau_1', tau_1)
tau_2 = torch_kendall_tau(sys_2, natural_ascending_as_reference=False)
print('tau_2', tau_2)
tau, p = stats.kendalltau(res_reference.data.numpy(), sys_1)
print('scipy-1', tau, p)
tau, p = stats.kendalltau(res_reference.data.numpy(), sys_2)
print('scipy-2', tau, p)
if __name__ == '__main__':
#1
#test_ap()
#2
test_nerr()
#3
#test_kendall_tau()
|
nilq/baby-python
|
python
|
import logging
from kubernetes import client
from kubernetes.client import V1beta1CustomResourceDefinition, V1ObjectMeta, V1beta1CustomResourceDefinitionSpec, \
V1Deployment, V1DeploymentSpec, V1LabelSelector, V1PodTemplateSpec, V1PodSpec, V1Service, V1ServiceSpec, \
V1ServicePort, V1DeleteOptions, V1PersistentVolumeClaim, V1PersistentVolumeClaimSpec, V1ResourceRequirements
from nifi_web.models import K8sCluster
logger = logging.getLogger(__name__)
def auth_gcloud_k8s(credentials):
c = K8sCluster.objects.get(id=1)
configuration = client.Configuration()
configuration.host = f"https://{c.endpoint}:443"
configuration.verify_ssl = False
configuration.api_key = {"authorization": "Bearer " + credentials.token}
client.Configuration.set_default(configuration)
def ensure_custom_object(api: client.CustomObjectsApi, custom_object, group, plural, version, namespace, name):
if len(api.list_namespaced_custom_object(namespace=namespace,
field_selector=f'metadata.name={name}', group=group,
plural=plural, version=version)['items']) == 0:
logger.info(f'creating custom object: {namespace}/{name}')
api.create_namespaced_custom_object(
body=custom_object,
namespace=namespace,
group=group,
plural=plural,
version=version
)
else:
logger.info(f'custom object exists: {namespace}/{name}')
def destroy_custom_object(api: client.CustomObjectsApi, group, plural, version, namespace, name):
if len(api.list_namespaced_custom_object(namespace=namespace,
field_selector=f'metadata.name={name}', group=group,
plural=plural, version=version)['items']) == 1:
logger.info(f'destroying custom object: {namespace}/{name}')
api.delete_namespaced_custom_object(
namespace=namespace,
group=group,
plural=plural,
version=version,
name=name,
body=V1DeleteOptions()
)
else:
logger.info(f'cannot find custom object to destroy: {namespace}/{name}')
def ensure_deployment(api: client.AppsV1Api, deployment, namespace, name):
if len(api.list_namespaced_deployment(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating Deployment: {namespace}/{name}')
api.create_namespaced_deployment(
body=deployment,
namespace=namespace
)
else:
logger.info(f'Deployment exists: {namespace}/{name}')
def ensure_namespace(api: client.CoreV1Api, namespace):
if len(api.list_namespace(field_selector=f'metadata.name={namespace}').items) == 0:
logger.info(f'creating namespace: {namespace}')
body = client.V1Namespace(
metadata=V1ObjectMeta(name=namespace)
)
api.create_namespace(
body=body
)
else:
logger.info(f'namespace exists: {namespace}')
def ensure_statefulset(api: client.AppsV1Api, stateful_set, namespace, name):
if len(api.list_namespaced_stateful_set(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating StatefulSet: {namespace}/{name}')
api.create_namespaced_stateful_set(
body=stateful_set,
namespace=namespace
)
else:
logger.info(f'StatefulSet exists: {namespace}/{name}')
def destroy_deployment(api: client.AppsV1Api, namespace, name):
if len(api.list_namespaced_deployment(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 1:
logger.info(f'destroying Deployment: {namespace}/{name}')
api.delete_namespaced_deployment(
name=name,
namespace=namespace
)
else:
logger.info(f'cannot find Deployment to destroy: {namespace}/{name}')
def destroy_statefulset(api: client.AppsV1Api, core_api: client.CoreV1Api, namespace, name):
for pvc in core_api.list_namespaced_persistent_volume_claim(namespace=namespace,
label_selector=f'app={name}').items:
core_api.delete_namespaced_persistent_volume_claim(
name=pvc.metadata.name,
namespace=namespace
)
if len(api.list_namespaced_stateful_set(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 1:
logger.info(f'destroying StatefulSet: {namespace}/{name}')
api.delete_namespaced_stateful_set(
name=name,
namespace=namespace
)
else:
logger.info(f'cannot find StatefulSet to destroy: {namespace}/{name}')
def ensure_service(api: client.CoreV1Api, service, namespace, name):
if len(api.list_namespaced_service(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating Service: {namespace}/{name}')
api.create_namespaced_service(
body=service,
namespace=namespace
)
else:
logger.info(f'Service exists: {namespace}/{name}')
def destroy_service(api: client.CoreV1Api, namespace, name):
if len(api.list_namespaced_service(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 1:
logger.info(f'destroying Service: {namespace}/{name}')
api.delete_namespaced_service(
name=name,
namespace=namespace
)
else:
logger.info(f'cannot find Service to destroy: {namespace}/{name}')
def destroy_namespace(api: client.CoreV1Api, name):
if len(api.list_namespace(field_selector=f'metadata.name={name}').items) == 1:
logger.info(f'destroying namespace: {name}')
api.delete_namespace(
name=name
)
else:
logger.info(f'cannot find namespace to destroy: {name}')
def ensure_service_account(api: client.CoreV1Api, account, name, namespace):
if len(api.list_namespaced_service_account(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating ServiceAccount: {name}')
api.create_namespaced_service_account(
namespace=namespace,
body=account
)
else:
logger.info(f'ServiceAccount exists: {name}')
def ensure_secret(api: client.CoreV1Api, secret, name, namespace):
if len(api.list_namespaced_secret(namespace=namespace,
field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating secret: {name}')
api.create_namespaced_secret(
namespace=namespace,
body=secret
)
else:
logger.info(f'secret exists: {name}')
def ensure_role(api: client.RbacAuthorizationV1beta1Api, role, name):
if len(api.list_cluster_role(field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating ClusterRole: {name}')
api.create_cluster_role(role)
else:
logger.info(f'ClusterRole exists: {name}')
def ensure_role_binding(api: client.RbacAuthorizationV1beta1Api, role_binding, name):
if len(api.list_cluster_role_binding(field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating ClusterRoleBinding: {name}')
api.create_cluster_role_binding(role_binding)
else:
logger.info(f'ClusterRoleBinding exists: {name}')
def ensure_storage_class(api: client.StorageV1Api, cls, name):
if len(api.list_storage_class(field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating StorageClass: {name}')
api.create_storage_class(cls)
else:
logger.info(f'StorageClass exists: {name}')
def ensure_crd(api, name, group, kind, plural, singular, scope):
if len(api.list_custom_resource_definition(field_selector=f'metadata.name={name}').items) == 0:
logger.info(f'creating CustomResourceDefinition: {name}')
try:
api.create_custom_resource_definition(V1beta1CustomResourceDefinition(
api_version='apiextensions.k8s.io/v1beta1',
kind='CustomResourceDefinition',
metadata=V1ObjectMeta(name=name),
spec=V1beta1CustomResourceDefinitionSpec(
group=group,
version='v1alpha1',
names={
'kind': kind,
'plural': plural,
'singular': singular
},
scope=scope
),
))
except ValueError:
# unforunate workaround due to client library bug
# https://github.com/kubernetes-client/python/issues/415
logger.warning(f'swallowed ValueError when creating CRD {name} to workaround API client issue')
pass
else:
logger.info(f'CustomResourceDefinition exists: {name}')
def ensure_single_container_deployment(api_apps_v1, container, name, namespace, replicas=1):
ensure_deployment(
api=api_apps_v1,
deployment=V1Deployment(
api_version="apps/v1",
metadata=V1ObjectMeta(
name=name,
labels={'app': name}
),
spec=V1DeploymentSpec(
replicas=replicas,
selector=V1LabelSelector(
match_labels={'app': name}
),
template=V1PodTemplateSpec(
metadata=V1ObjectMeta(
name=name,
labels={'app': name}
),
spec=V1PodSpec(
containers=[
container
]
)
)
)
),
name=name,
namespace=namespace
)
def ensure_ingress_routed_svc(api_core_v1: client.CoreV1Api,
api_custom: client.CustomObjectsApi,
domain,
hostname,
name,
target_name,
namespace,
port_name,
svc_port,
target_port):
ensure_service(
api=api_core_v1,
service=V1Service(
api_version="v1",
metadata=V1ObjectMeta(
name=name
),
spec=V1ServiceSpec(
type='ClusterIP',
ports=[
V1ServicePort(
protocol='TCP',
port=svc_port,
name=port_name,
target_port=target_port
),
],
selector={
'app': target_name
}
)
),
name=name,
namespace=namespace
)
ensure_custom_object(
api=api_custom,
custom_object={
'apiVersion': 'traefik.containo.us/v1alpha1',
'kind': 'IngressRoute',
'metadata': {
'name': name,
},
'spec': {
'entryPoints': [
'websecure'
],
'routes': [
{
'match': f'Host(`{hostname}.{domain}`)',
'kind': 'Rule',
'services': [
{
'name': name,
'port': svc_port
}
],
'middlewares': [
{
'name': 'traefik-forward-auth',
'namespace': 'default'
}
]
}
],
'tls': {
'certResolver': 'default'
}
}
},
group='traefik.containo.us',
plural='ingressroutes',
version='v1alpha1',
name=hostname,
namespace=namespace
)
def destroy_ingress_routed_svc(api_core_v1, api_custom, name, namespace):
destroy_service(
api=api_core_v1,
name=name,
namespace=namespace
)
destroy_custom_object(
api=api_custom,
group='traefik.containo.us',
plural='ingressroutes',
version='v1alpha1',
name=name,
namespace=namespace
)
def ensure_statefulset_with_containers(api_apps_v1,
name,
namespace,
containers,
volume_paths,
replicas=1,
init_containers=None,
volumes=None):
if volumes is None:
volumes = []
if init_containers is None:
init_containers = []
volume_claim_templates = [V1PersistentVolumeClaim(
metadata=V1ObjectMeta(
name=path[0]
),
spec=V1PersistentVolumeClaimSpec(
access_modes=['ReadWriteOnce'],
resources=V1ResourceRequirements(
requests={
'storage': path[2]
}
),
storage_class_name=path[3]
)
) for path in volume_paths]
ss = client.V1StatefulSet(
api_version="apps/v1",
kind="StatefulSet",
metadata=client.V1ObjectMeta(
name=name,
labels={'app': name}
),
spec=client.V1StatefulSetSpec(
replicas=replicas,
service_name=name,
template=V1PodTemplateSpec(
metadata=V1ObjectMeta(labels={"app": name}),
spec=V1PodSpec(
containers=containers,
volumes=volumes,
init_containers=init_containers
)
),
selector={'matchLabels': {'app': name}},
volume_claim_templates=volume_claim_templates
)
)
ensure_statefulset(
api_apps_v1,
stateful_set=ss,
namespace=namespace,
name=name
)
|
nilq/baby-python
|
python
|
# Generated by Django 2.1.5 on 2019-01-28 03:31
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('configurations', '0017_d3mconfiguration_description'),
]
operations = [
migrations.AddField(
model_name='d3mconfiguration',
name='env_values',
field=jsonfield.fields.JSONField(blank=True, help_text='D3M env values for running Docker TA2s'),
),
]
|
nilq/baby-python
|
python
|
from agrirouter.auth.enums import BaseEnum
class CertificateTypes(BaseEnum):
PEM = "PEM"
P12 = "P12"
class GateWays(BaseEnum):
MQTT = "2"
REST = "3"
|
nilq/baby-python
|
python
|
"""yaml templates for DataFrame plotting."""
from os.path import (join, dirname)
import yaml
_filename = join(dirname(__file__), 'palette.yaml')
with open(_filename, 'r') as f:
lineplot_dict = yaml.load(f, Loader=yaml.SafeLoader)
style_overide = lineplot_dict.pop('style_overide', {})
__all__ = ['lineplot_dict', 'style_overide']
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# Importation des librairies TM1637 et time
from tm1637 import TM1637
from time import sleep
# Stockage de la duree dans des variables
print("- Duree du minuteur -")
minutes = int(input("Minutes : "))
secondes = int(input("Secondes : "))
print("- Demarage du minuteur : " + str(minutes) + ":" + str(secondes) + " -")
# Initialisation de l'afficheur
afficheur = TM1637(clk=23, dio=24)
# Definition de la luminosite (0-7)
afficheur.brightness(1)
# Affichage du temps du minuteur sur le module avant demarage
# .numbers(x, y) : Affiche x sur les deux premiers 7 segments et y sur les deux suivants
# -10 < x(resp. y) < 100
afficheur.numbers(minutes, secondes)
# Boucle du minuteur
i = minutes
j = secondes
while i >= 0:
while j >= 0:
afficheur.numbers(i, j)
sleep(1)
j -= 1
i -= 1
j = 59
print("- Temps ecoule ! -")
# Animation de fin : on fait clignoter 00:00
for n in range(0, 20):
afficheur.brightness(0)
sleep(0.25)
afficheur.brightness(7)
sleep(0.25)
|
nilq/baby-python
|
python
|
from unittest import TestCase
from daily_solutions.year_2020.day_5 import parse_seat_id
class Day5TestCase(TestCase):
def test_parse_row_column(self) -> None:
self.assertEqual(567, parse_seat_id("BFFFBBFRRR"))
|
nilq/baby-python
|
python
|
from flask_wtf import FlaskForm
from wtforms import (
widgets,
HiddenField,
BooleanField,
TextField,
PasswordField,
SubmitField,
SelectField,
SelectMultipleField,
DateTimeField,
)
from wtforms.validators import Email, Length, Required, EqualTo, Optional
day_map = {
"0": "Mon",
"1": "Tue",
"2": "Wed",
"3": "Thu",
"4": "Fri",
"5": "Sat",
"6": "Sun",
}
class Login(FlaskForm):
# form to login users; subclass of base form class
email = TextField("Email", [Required(), Email(), Length(min=4, max=50)])
pwd = PasswordField("Password", [Required(), Length(min=6, max=25)])
remember_me = BooleanField(default=True)
submit = SubmitField("Login")
class Register(Login):
# form to register users; subclass of login plus confirm
confirm = PasswordField(
"Confirm Password",
[
Required(),
Length(min=6, max=25),
EqualTo("pwd", message="Passwords must match"),
],
)
submit = SubmitField("Register")
class MultiCheckbox(SelectMultipleField):
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.CheckboxInput()
class Pattern(FlaskForm):
# required fields
path = SelectField("Path")
pattern = TextField("Pattern", [Required(), Length(min=1, max=255)])
name = TextField("Name", [Required(), Length(min=1, max=255)])
# scheduling fields: recipients, time, and days
recipients = TextField("Recipients", [Optional(), Length(max=255)])
time = DateTimeField("Time", [Optional()], format="%H:%M")
# create sorted list of days to choose
choices = [(k, v) for k, v in sorted(day_map.items())]
days = MultiCheckbox("Days", [Optional()], choices=choices)
# hidden field for pattern_id
pattern_id = HiddenField("pattern_id", [Optional()])
# create two submit fields
save = SubmitField("Save")
delete = SubmitField("Delete")
|
nilq/baby-python
|
python
|
# File: __init__.py
# Aim: Package initial
# Package version: 1.0
# %%
from .defines import Config
CONFIG = Config()
# CONFIG.reload_logger(name='develop')
# %%
|
nilq/baby-python
|
python
|
from dataclasses import dataclass
from enum import Enum
class TokenEnum(Enum):
LPAREN = 0
RPAREN = 1
NUMBER = 2
PLUS = 3
MINUS = 4
MULTIPLY = 5
DIVIDE = 6
INTEGRAL_DIVIDE = 7
EXPONENTIAL = 8
@dataclass
class Token:
type: TokenEnum
val: any = None
def __repr__(self):
if self.val != None:
return self.type.name + f":{self.val}"
else:
return ""
|
nilq/baby-python
|
python
|
#-------------------------------------------------------------------------------
import collections
import copy
import warnings
import inspect
import logging
import math
#-------------------------------------------------------------------------------
class MintError(Exception): pass
class MintIndexError(MintError): pass
class MintValueError(MintError): pass
class MintConnectionError(MintError): pass
class MintModelDoesNotExist(MintError): pass
#-------------------------------------------------------------------------------
class Dir:
I = 'input'
O = 'output'
IO = 'inout'
ANY = '_any_dir_'
class Default:
port_dir = Dir.ANY
scalar_port_template = '{I}_{n}'
vector_port_template = '{i}_{n}'
net_template = '{I}_{n}'
net_template = '{I}_{n}'
#-------------------------------------------------------------------------------
class Net(object):
""" Base class for net types. """
def _handle_cmp_ops(self, other, op, dir):
if isinstance(other, ModInstBase):
other.bind_net(self, dir=dir)
return True
raise TypeError("unsupported operand type(s) for %s: '%s' and '%s'" %
(op, type(self), type(other)))
def __ne__(self, other):
return self._handle_cmp_ops(other, '<>', Dir.IO)
def __gt__(self, other):
return self._handle_cmp_ops(other, '>', Dir.I)
def __lt__(self, other):
return self._handle_cmp_ops(other, '<', Dir.O)
def __mul__(self, other):
if isinstance(other, int):
clones = []
for i in range(other):
clone = copy.copy(self)
clone.parent = clone
clones.append(clone)
return clones
else:
return NotImplemented
def __rmul__(self, other):
return self.__mul__(other)
class Wire(Net):
def __init__(self, name=None, size=None, indices=None, parent=None):
"""
Initialize the Wire instance.
- name = base name for the wire
- size = None for scalar, int for vector.
- indices = tuple of indices, but size takes precedence if defined.
- parent points to parent wire for slices.
"""
self._name = name
if size is not None:
self.indices = tuple(range(size))
else:
self.indices = indices # 'None' for scalar
self.parent = parent or self
# Template used for full/formatted name
self.template = "{name}"
def __call__(self, name=None):
"""
Additional initializations for the Wire instance.
- name = base name for the wire
"""
self.name = name or self.name
return self
@property
def name(self):
return self._name or self.parent._name
@name.setter
def name(self, val):
self._name = val
@property
def fname(self):
""" Return full/formatted name """
return self.template.format(name=self.name)
def formatted_repr(self, fmt0="{name}",
fmt1="{name}[{index}]",
fmt2="{name}[{index}]"):
""" Return formatted representation
- fmt0 : format for scalars
- fmt1 : format for 1 bit vectors
- fmt2 : format for >= 2 bit vectors
Following replacement strings can be specified:
- name, index, msb, lsb
"""
name = self.fname
#name = self.name.format(**kwargs)
if self.indices is None:
index = msb = lsb = ''
return fmt0.format(name=name, index=index, msb=msb, lsb=lsb)
elif len(self.indices) == 1:
index = self.indices[0]
msb = lsb = index
return fmt1.format(name=name, index=index, msb=msb, lsb=lsb)
else:
lsb = self.indices[0]
msb = self.indices[-1]
index = "%s:%s" % (msb, lsb)
return fmt2.format(name=name, index=index, msb=msb, lsb=lsb)
def __getitem__(self, key):
""" Verilog like indexing syntax is used:
[index] => python [index]
[msb:lsb] => python [lsb:msb+1]
"""
if self.indices is None:
raise MintIndexError("scalar wire is not indexable")
valid_range = range(len(self.indices))
if isinstance(key, int):
if key not in valid_range:
raise MintIndexError("wire index out of range")
indices = (self.indices[key],)
elif isinstance(key, slice):
msb, lsb, step = key.start, key.stop, key.step
if msb is None: msb = valid_range[-1]
if lsb is None: lsb = valid_range[0]
if msb not in valid_range or lsb not in valid_range:
raise MintIndexError("wire index out of range")
if msb < lsb:
raise MintIndexError("msb less than lsb")
indices = self.indices[lsb : msb + 1 : step]
return Wire(indices=indices, parent=self.parent)
def __len__(self):
if self.indices is None:
return 1
else:
return len(self.indices)
def __repr__(self):
return "Wire(%s)" % self.formatted_repr()
class Const(Net):
def __init__(self, size, val, fmt='hex'):
self.size = size
if val < 0 or val >= 2**size:
raise MintValueError("constant value out of range")
self.val = val
self.fmt = fmt
#@property
#def name(self):
# return self.formatted_repr()
def formatted_repr(self, fmt=None):
fmt = fmt or self.fmt
if fmt == 'bin':
return "{size}'b{0:0>{width}b}".format(self.val, size=self.size,
width=self.size)
elif fmt == 'hex':
width = int(math.ceil(self.size/4))
return "{size}'h{0:0>{width}x}".format(self.val, size=self.size,
width=width)
else:
return "{size}'d{0}".format(self.val, size=self.size)
def __len__(self):
return self.size
def __repr__(self):
return "Const(%s)" % self.formatted_repr()
class Concat(Net):
def __init__(self, nets):
self.nets = nets
#@property
#def name(self):
# return self.formatted_repr()
@property
def wires(self):
return [wire for wire in self.nets if isinstance(wire, Wire)]
def formatted_repr(self):
return "{%s}" % ', '.join([net.formatted_repr() for net in self.nets])
def __len__(self):
size = 0
for net in self.nets:
size += len(net)
return size
def __repr__(self):
return "Concat(%s)" % self.formatted_repr()
#-------------------------------------------------------------------------------
class InstBase(object):
def __div__(self, other):
" Supports inst_exp/template expressions "
if isinstance(other, str):
templatized = self.templatize(other)
else:
raise TypeError('unsupported operand type(s) for /: %s and %s' %
(type(self), type(other)))
return templatized
class InstScalar(InstBase):
def __init__(self, name=None, index=None):
self.name = name
# This would be set if part of a vector
self.index = index
# Set by obj/template expression.
self.template = None
# Which model to build
self.model = None
# Set to True if this instance is a port
self.isport = False
def formatted_repr(self, fmt0="{name}",
fmt1="{name}[{index}]"):
""" Return formatted representation
- fmt0 : format for scalars
- fmt1 : format for 1 bit vectors (part of vector)
Following replacement strings can be specified:
- name, index
"""
if self.index is None:
return fmt0.format(name=self.name, index=self.index)
else:
return fmt1.format(name=self.name, index=self.index)
def __iter__(self):
return iter([self])
def __len__(self):
return 1
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__, self.formatted_repr(),
self.template)
class InstList(InstBase):
def __init__(self, inst_scalars, name=None):
self.scalars = []
index = 0
for inst_scalar in inst_scalars:
inst_scalar.index = index
index += 1
self.scalars.append(inst_scalar)
self._name = name
# Set by obj/template expression.
self.template = None
# Which model to build
self._model = None
# Set to True if this instance is a port
self.isport = False
@property
def name(self):
# Confirm all scalars have same name
assert all(self._name == scalar.name for scalar in self),\
"all scalars should have same name: %s" % self
return self._name
@name.setter
def name(self, value):
self._name = value
for scalar in self.scalars:
scalar.name = value
#@property
#def template(self):
# return self._template
@property
def model(self):
# Confirm all scalars have same model
assert all(self._model == scalar.model for scalar in self),\
"all scalars should have same model: %s" % self
return self._model
@model.setter
def model(self, value):
for scalar in self.scalars:
scalar.model = value
self._model = value
def make(self, model=None):
self.model = model or self.model
for scalar in self:
scalar.make(self.model)
def __getitem__(self, key):
""" Verilog like indexing syntax is used:
[index] => python [index]
[msb:lsb] => python [lsb:msb+1]
"""
valid_range = range(len(self.scalars))
if isinstance(key, int):
if key not in valid_range:
raise MintIndexError("inst index out of range")
return self.scalars[key]
elif isinstance(key, slice):
msb, lsb, step = key.start, key.stop, key.step
if msb is None: msb = valid_range[-1]
if lsb is None: lsb = valid_range[0]
if msb not in valid_range or lsb not in valid_range:
raise MintIndexError("inst index out of range")
if msb < lsb:
raise MintIndexError("msb less than lsb")
sliced = copy.copy(self)
sliced.scalars = self.scalars[lsb : msb + 1 : step]
return sliced
def __iter__(self):
return iter(self.scalars)
def __len__(self):
return len(self.scalars)
def __contains__(self, value):
return value in self.scalars
def __repr__(self):
#r = "InstList("
r = "%s(%s)[" % (self.__class__.__name__, self.name)
for i, e in enumerate(self.scalars):
if i: r += ', ' + str(e)
else: r += str(e)
r += "]"
return r
#-------------------------------------------------------------------------------
class ModInstBase(object):
def _handle_cmp_ops(self, other, op, dir):
if isinstance(other, IntfInstBase):
self.bind_intf(other, modport=0, dir_filter=dir)
return True
if isinstance(other, Net):
self.bind_net(other, dir=dir)
return True
raise TypeError("unsupported operand type(s) for %s: '%s' and '%s'" %
(op, type(self), type(other)))
def __eq__(self, other):
return self._handle_cmp_ops(other, '==', Dir.ANY)
def __ne__(self, other):
return self._handle_cmp_ops(other, '<>', Dir.IO)
def __gt__(self, other):
return self._handle_cmp_ops(other, '>', Dir.O)
def __lt__(self, other):
return self._handle_cmp_ops(other, '<', Dir.I)
class ModInstScalar(InstScalar, ModInstBase):
# InsGen.__getattr__ expects "obj" (module in this case) as first arg
def __init__(self, module, name=None, index=None):
super(ModInstScalar, self).__init__(name, index)
self.module = module
# Bind relationships with interfaces represented as Interface Pins
self.intfpins = []
# Bind relationships with wires represented as Pins
self.pins = []
def templatize(self, template):
# Important - we make a copy, not a deepcopy. This ensures that the
# copy's instance variables point to the same object as the original
templatized = copy.copy(self)
templatized.template = template
return templatized
def bind_intf(self, intfinst, modport, dir_filter):
for intfinst_scalar in intfinst:
intfpin = IntfPin(modinst=self, intfinst=intfinst_scalar,
modport=modport, dir_filter=dir_filter,
template=self.template)
#print 'IntfPin:', intfpin
self.intfpins.append(intfpin)
def bind_net(self, net, dir):
pin = Pin(dir=dir, inst=self, net=net, name=self.template,
intfinst='_IF_')
self.pins.append(pin)
def make(self, model=None):
self.model = model or self.model
self.module.make(self.model)
def get_pins(self):
pins = []
for intfpin in self.intfpins:
pins += intfpin.get_pins()
pins += self.pins
return pins
def __repr__(self):
return "ModInstScalar(%s, %s, %s)" % (self.formatted_repr(),
self.module.name, self.template)
class ModInstList(InstList, ModInstBase):
def templatize(self, template):
scalars = []
for scalar in self:
scalars += [scalar.templatize(template)]
templatized = copy.copy(self)
templatized.scalars = scalars
templatized.template = template
return templatized
def bind_intf(self, intfinst, modport, dir_filter):
#if len(intfinst) == 1:
if isinstance(intfinst, IntfInstScalar):
# v - s
for modinst_scalar in self:
intfpin = IntfPin(modinst=modinst_scalar, intfinst=intfinst,
modport=modport, dir_filter=dir_filter,
template=self.template)
#print 'IntfPin:', intfpin
modinst_scalar.intfpins.append(intfpin)
else:
# v - v
if len(self) != len(intfinst):
raise MintConnectionError("vector sizes differ: %s(%s), %s(%s)" %
(self, len(self), intfinst, len(intfinst)))
for modinst_scalar, intfinst_scalar in zip(self, intfinst):
intfpin = IntfPin(modinst=modinst_scalar,
intfinst=intfinst_scalar,
modport=modport, dir_filter=dir_filter,
template=self.template)
#print 'IntfPin:', intfpin
modinst_scalar.intfpins.append(intfpin)
def bind_net(self, net, dir):
for modinst_scalar in self:
pin = Pin(dir=dir, inst=modinst_scalar, net=net, name=self.template)
modinst_scalar.pins.append(pin)
#-------------------------------------------------------------------------------
class IntfInstBase(object):
def _handle_cmp_ops(self, other, op, dir_filter):
if isinstance(other, ModInstBase):
other.bind_intf(self, modport=1, dir_filter=dir_filter)
return True
raise TypeError("unsupported operand type(s) for %s: '%s' and '%s'" %
(op, type(self), type(other)))
def __eq__(self, other):
return self._handle_cmp_ops(other, '==', Dir.ANY)
def __ne__(self, other):
return self._handle_cmp_ops(other, '<>', Dir.IO)
def __gt__(self, other):
return self._handle_cmp_ops(other, '>', Dir.I)
def __lt__(self, other):
return self._handle_cmp_ops(other, '<', Dir.O)
class IntfInstScalar(InstScalar, IntfInstBase):
# InsGen.__getattr__ expects "obj" (interface in this case) as first arg
def __init__(self, interface, name=None, index=None):
super(IntfInstScalar, self).__init__(name, index)
self.interface = interface
def templatize(self, template):
self.template = template
return self
def make(self, model=None):
self.model = model or self.model
self.interface.make(self.model)
def __repr__(self):
return "IntfInstScalar(%s, %s, %s)" % (self.formatted_repr(),
self.interface.name, self.template)
class IntfInstList(InstList, IntfInstBase):
def templatize(self, template):
for scalar in self:
scalar.template = template
return self
#-------------------------------------------------------------------------------
class Pin(object):
"""
P = port name, dir
I = inst/modport
N = net
PIN = I.P(N) = inst I has port P that connects to net N
"""
def __init__(self, dir, inst, net, name=None, intfinst=None):
self.dir = dir
self.modinst = inst
self.net = net
# This may be defined by "inst/'name'" expression, else net name
self._name = name
self.intfinst = intfinst
# Template used for full/formatted name
self.template = "{name}"
@property
def name(self):
if self._name:
return self._name
try:
return self.net.name
except AttributeError:
# This will happen if net is a Const or Concat and port name is not
# specified
raise MintConnectionError("port name not specified for '%s' and '%s'" %
(self.inst, self.net))
@name.setter
def name(self, value):
self._name = value
@property
def fname(self):
""" Return full/formatted name """
return self.template.format(name=self.name)
def __repr__(self):
r = '{self.dir}: {self.modinst.name}.{self.fname}({self.net.fname})'
return r.format(self=self)
class IntfPin(object):
"""
Interface Pin binds a modinst to a view/filter of the interface instance
P = port template, dir
I = inst
N = interface inst, modport
PIN = I.P(N) = inst I has port P that connects to net N
"""
def __init__(self, modinst, intfinst, modport, dir_filter, template=None):
self.modinst = modinst
self.intfinst = intfinst
self.modport = modport # this could int(position) or str(name)
self.dir_filter = dir_filter
# This may be defined by "inst/template" expression, else default
self._template = template
#@property
#def name(self):
# return self.intfinst.name # ???
@property
def template(self):
if self._template is not None:
return self._template
else:
if self.modinst.index is None:
return Default.scalar_port_template
else:
return Default.vector_port_template
#@template.setter
#def template(self, value):
# self._template = value
def get_pins(self):
interface = self.intfinst.interface
# TODO: consider replacing with named tuple
if isinstance(self.modport, int):
modport_name = interface.port_at_pos[self.modport]
else:
modport_name = self.modport
modport = interface.module_instances[modport_name]
# Get the pins form the modport that match the direction criteria and
# compute the port and wire names based on naming rules
pins = []
#for pin in modport.pins:
for pin in modport.get_pins():
if self.dir_filter in (Dir.ANY, pin.dir):
i = self.intfinst.name
k = self.intfinst.formatted_repr(fmt0="", fmt1="{index}")
I = self.intfinst.formatted_repr(fmt0="{name}",
fmt1="{name}{index}")
# Inplace pin template change
pin_template = self.template
pin.template = pin_template.format(i=i, k=k, I=I, n='{name}')
# Inplace wire template change
net_template = self.intfinst.template or Default.net_template
if hasattr(pin.net, 'template'):
pin.net.template = net_template.format(i=i, k=k, I=I, n='{name}')
pin.intfinst = I
pins.append(pin)
return pins
def __repr__(self):
r = '{self.dir_filter}: {self.modinst.name}.{self.name}'
r += '({self.intfinst.name}.{self.modport})'
return r.format(self=self)
#-------------------------------------------------------------------------------
class MintObject(object):
def __init__(self, name=None, model=None):
self._name = name or self.__class__.__name__
self.model = model
self.module_instances = collections.OrderedDict()
self.interface_instances = collections.OrderedDict()
self.port_at_pos = []
# TODO add shadow dict for self.intstances
if model:
self.make(model)
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
def add(self, obj):
if obj.name is None:
raise MintValueError, "obj %s has no name" % obj
if isinstance(obj, ModInstBase):
self.module_instances[obj.name] = obj
elif isinstance(obj, IntfInstBase):
self.interface_instances[obj.name] = obj
def make(self, model):
try:
model_method = getattr(self, model)
except AttributeError:
raise MintModelDoesNotExist("'%s' of '%s'" % (model, self.name))
model_method(self)
def get_module_instances(self, flatten=False):
mod_insts = []
for mod_inst in self.module_instances.values():
if isinstance(mod_inst, ModInstList):
if flatten == True:
for mod_inst_scalar in mod_inst:
mod_insts += [mod_inst_scalar]
else:
mod_insts += [mod_inst]
else:
mod_insts += [mod_inst]
return mod_insts
def get_interface_instances(self, flatten=False):
intf_insts = []
for intf_inst in self.interface_instances.values():
if isinstance(intf_inst, IntfInstList):
if flatten == True:
for intf_inst_scalar in intf_inst:
intf_insts += [intf_inst_scalar]
else:
intf_insts += [intf_inst]
else:
intf_insts += [intf_inst]
return intf_insts
class Module(MintObject): pass
class Interface(MintObject): pass
|
nilq/baby-python
|
python
|
"""Remote"""
from os import path
import uuid
import time
import json
import tornado.ioloop
import tornado.websocket
import tornado.web
from models.led_strip import LedStrip
from models.color import Color
strip = LedStrip(14)
def start():
"""animation"""
strip.stop_animation()
print("start_animation")
strip.start_animation()
def stop():
"""stop"""
print("stop animation")
strip.stop_animation()
def change(effects):
"""change"""
strip.remove_all_effects()
for effect in effects:
strip.add_effect_by_name(effect['name'], options=effect['options'])
for key in clients:
print(clients[key].uuid)
clients[key].send_led_strip_info()
json.dump(effects, open("./effect.store", "w"))
clients = {}
class MainHandler(tornado.web.RequestHandler): # pylint: disable=W0223
"""MainHandler"""
def get(self):
"""get"""
file = open("{}/index.html".format(path.dirname(path.abspath(__file__))), "r")
self.write(file.read())
file.close()
class LedStripWebsocket(tornado.websocket.WebSocketHandler): # pylint: disable=W0223
"""LedStripWebsocket"""
def simple_init(self):
""" Initialize Socket """
self.last = time.time()
self.stop = False
self.uuid = uuid.uuid1()
def check_origin(self, origin):
"""check_origin"""
return True
def send_led_strip_info(self):
"""check_origin"""
result = {}
result['ledstrip'] = strip.to_json()
effects = strip.get_effects()
result['effects'] = []
for effect in effects:
result['effects'].append(effect.to_json())
result_json = "{}"
try:
result_json = json.dumps(result)
except Exception as error:
print(error)
self.write_message(u"{}".format(result_json))
def open(self): # pylint: disable=W0221
"""open"""
print("Websocket Opened")
self.simple_init()
clients[self.uuid] = self
self.send_led_strip_info()
self.loop = tornado.ioloop.PeriodicCallback(self.keep_alive, 1000)
self.loop.start()
def keep_alive(self):
"""Keep alive"""
if time.time() - self.last > 10:
self.write_message(u'{"message":"keep Alive"}')
self.last = time.time()
def on_message(self, message):
"""on_message"""
print("LedStripWebsocket")
print(message)
data = json.loads(message)
if data['action'] == 'stop':
stop()
if data['action'] == 'start':
start()
if data['action'] == 'change':
if 'effects' in data:
change(data['effects'])
self.write_message(u'{"message":"Changes done!"}')
def on_close(self):
"""on_close"""
print("Websocket Closed")
try:
self.loop.stop()
del clients[self.uuid]
except KeyError:
print("Could not remove {}".format(self.uuid))
except Exception:
print("Exception {}".format(self.uuid))
def make_app():
"""Make App"""
return tornado.web.Application([
(r"/", MainHandler),
(r"/index", MainHandler),
(r"/index.html", MainHandler),
(r"/ledstrip", LedStripWebsocket),
])
if __name__ == "__main__":
app = make_app()
app.listen(8888)
try:
effects = json.load(open("./effect.store", "r"))
change(effects)
except Exception as error:
print('Could not load from file, error: {}',format(error))
strip.add_effect_by_name("rainbow", options={"hue_end": 60})
strip.set_background_color(Color(0,0,0))
start()
try:
tornado.ioloop.IOLoop.current().start()
finally:
stop()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from unittest import TestCase
class DataTest(TestCase):
"""Obey the testing goat."""
def test_something(self):
"""
A testing template -- make to update tests.yml if you change the
testing name.
"""
matches = True
expected_matches = True
self.assertEqual(matches, expected_matches)
|
nilq/baby-python
|
python
|
from selenium.webdriver.common.by import By
from seleniumpm.webpage import Webpage
from seleniumpm.webelements.textfield import TextField
from seleniumpm.locator import Locator
class GooglePage(Webpage):
"""
This is an Google page that extends SeleniumPM WebPage. This class acts as a container for the different
WebElements on the page that an engineer may want to interact with.
"""
def __init__(self, driver, url=None):
super(GooglePage, self).__init__(driver, url)
self.search_field = TextField(driver, Locator.by_name('q'))
def get_result_links(self):
"""
Returns a list of links from a Google search.
:return: Returns a list of links from a Google search.
"""
links = []
elements = self.driver.find_elements(By.XPATH, "//h3[contains(@class, 'r')]/a")
for element in elements:
links.append(element.get_attribute("href"))
return links
|
nilq/baby-python
|
python
|
import unittest
from ArrayQueue import ArrayQueue, Empty
class TestArrayQueue(unittest.TestCase):
def setUp(self):
self.q = ArrayQueue()
self.q.enqueue(1)
self.q.enqueue(2)
self.q.enqueue(3)
def test_instantiation(self):
print('Can create an instance')
self.assertIsInstance(self.q, ArrayQueue)
def test_length_checking(self):
print('Can check the length of the queue')
self.assertEqual(len(self.q), 3)
def test_first_method(self):
print('Can return the first element of the queue')
self.assertEqual(self.q.first(), 1)
def test_enqueue_method(self):
print('Can add elements to the queue')
self.q.enqueue(4)
self.q.enqueue(5)
self.assertEqual(len(self.q), 5)
self.assertEqual(self.q.first(), 1)
def test_dequeue_method(self):
print('Can remove elements from the front of the queue')
self.q.enqueue(4)
self.q.enqueue(5)
self.q.dequeue()
self.assertEqual(self.q.dequeue(), 2)
self.assertEqual(len(self.q), 3)
self.assertEqual(self.q.first(), 3)
def test_is_empty_method(self):
print('Can check if the queue is empty')
self.q.dequeue()
self.q.dequeue()
self.q.dequeue()
self.assertEqual(self.q.is_empty(), True)
def test_exception_raising(self):
print('Can raise exception while performing action(s) on an empty queue')
self.q.dequeue()
self.q.dequeue()
self.q.dequeue()
with self.assertRaises(Empty):
self.q.first()
self.q.dequeue()
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
"""Unit tests for memory-based file-like objects.
StringIO -- for unicode strings
BytesIO -- for bytes
"""
import unittest
from test import support
import io
import _pyio as pyio
import pickle
class MemorySeekTestMixin:
def testInit(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
def testRead(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
self.assertEqual(buf[:1], bytesIo.read(1))
self.assertEqual(buf[1:5], bytesIo.read(4))
self.assertEqual(buf[5:], bytesIo.read(900))
self.assertEqual(self.EOF, bytesIo.read())
def testReadNoArgs(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
self.assertEqual(buf, bytesIo.read())
self.assertEqual(self.EOF, bytesIo.read())
def testSeek(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
bytesIo.read(5)
bytesIo.seek(0)
self.assertEqual(buf, bytesIo.read())
bytesIo.seek(3)
self.assertEqual(buf[3:], bytesIo.read())
self.assertRaises(TypeError, bytesIo.seek, 0.0)
def testTell(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
self.assertEqual(0, bytesIo.tell())
bytesIo.seek(5)
self.assertEqual(5, bytesIo.tell())
bytesIo.seek(10000)
self.assertEqual(10000, bytesIo.tell())
class MemoryTestMixin:
def test_detach(self):
buf = self.ioclass()
self.assertRaises(self.UnsupportedOperation, buf.detach)
def write_ops(self, f, t):
self.assertEqual(f.write(t("blah.")), 5)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(t("Hello.")), 6)
self.assertEqual(f.tell(), 6)
self.assertEqual(f.seek(5), 5)
self.assertEqual(f.tell(), 5)
self.assertEqual(f.write(t(" world\n\n\n")), 9)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(t("h")), 1)
self.assertEqual(f.truncate(12), 12)
self.assertEqual(f.tell(), 1)
def test_write(self):
buf = self.buftype("hello world\n")
memio = self.ioclass(buf)
self.write_ops(memio, self.buftype)
self.assertEqual(memio.getvalue(), buf)
memio = self.ioclass()
self.write_ops(memio, self.buftype)
self.assertEqual(memio.getvalue(), buf)
self.assertRaises(TypeError, memio.write, None)
memio.close()
self.assertRaises(ValueError, memio.write, self.buftype(""))
def test_writelines(self):
buf = self.buftype("1234567890")
memio = self.ioclass()
self.assertEqual(memio.writelines([buf] * 100), None)
self.assertEqual(memio.getvalue(), buf * 100)
memio.writelines([])
self.assertEqual(memio.getvalue(), buf * 100)
memio = self.ioclass()
self.assertRaises(TypeError, memio.writelines, [buf] + [1])
self.assertEqual(memio.getvalue(), buf)
self.assertRaises(TypeError, memio.writelines, None)
memio.close()
self.assertRaises(ValueError, memio.writelines, [])
def test_writelines_error(self):
memio = self.ioclass()
def error_gen():
yield self.buftype('spam')
raise KeyboardInterrupt
self.assertRaises(KeyboardInterrupt, memio.writelines, error_gen())
def test_truncate(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertRaises(ValueError, memio.truncate, -1)
memio.seek(6)
self.assertEqual(memio.truncate(), 6)
self.assertEqual(memio.getvalue(), buf[:6])
self.assertEqual(memio.truncate(4), 4)
self.assertEqual(memio.getvalue(), buf[:4])
self.assertEqual(memio.tell(), 6)
memio.seek(0, 2)
memio.write(buf)
self.assertEqual(memio.getvalue(), buf[:4] + buf)
pos = memio.tell()
self.assertEqual(memio.truncate(None), pos)
self.assertEqual(memio.tell(), pos)
self.assertRaises(TypeError, memio.truncate, '0')
memio.close()
self.assertRaises(ValueError, memio.truncate, 0)
def test_init(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.getvalue(), buf)
memio = self.ioclass(None)
self.assertEqual(memio.getvalue(), self.EOF)
memio.__init__(buf * 2)
self.assertEqual(memio.getvalue(), buf * 2)
memio.__init__(buf)
self.assertEqual(memio.getvalue(), buf)
self.assertRaises(TypeError, memio.__init__, [])
def test_read(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.read(0), self.EOF)
self.assertEqual(memio.read(1), buf[:1])
self.assertEqual(memio.read(4), buf[1:5])
self.assertEqual(memio.read(900), buf[5:])
self.assertEqual(memio.read(), self.EOF)
memio.seek(0)
self.assertEqual(memio.read(), buf)
self.assertEqual(memio.read(), self.EOF)
self.assertEqual(memio.tell(), 10)
memio.seek(0)
self.assertEqual(memio.read(-1), buf)
memio.seek(0)
self.assertEqual(type(memio.read()), type(buf))
memio.seek(100)
self.assertEqual(type(memio.read()), type(buf))
memio.seek(0)
self.assertEqual(memio.read(None), buf)
self.assertRaises(TypeError, memio.read, '')
memio.close()
self.assertRaises(ValueError, memio.read)
def test_readline(self):
buf = self.buftype("1234567890\n")
memio = self.ioclass(buf * 2)
self.assertEqual(memio.readline(0), self.EOF)
self.assertEqual(memio.readline(), buf)
self.assertEqual(memio.readline(), buf)
self.assertEqual(memio.readline(), self.EOF)
memio.seek(0)
self.assertEqual(memio.readline(5), buf[:5])
self.assertEqual(memio.readline(5), buf[5:10])
self.assertEqual(memio.readline(5), buf[10:15])
memio.seek(0)
self.assertEqual(memio.readline(-1), buf)
memio.seek(0)
self.assertEqual(memio.readline(0), self.EOF)
buf = self.buftype("1234567890\n")
memio = self.ioclass((buf * 3)[:-1])
self.assertEqual(memio.readline(), buf)
self.assertEqual(memio.readline(), buf)
self.assertEqual(memio.readline(), buf[:-1])
self.assertEqual(memio.readline(), self.EOF)
memio.seek(0)
self.assertEqual(type(memio.readline()), type(buf))
self.assertEqual(memio.readline(), buf)
self.assertRaises(TypeError, memio.readline, '')
memio.close()
self.assertRaises(ValueError, memio.readline)
def test_readlines(self):
buf = self.buftype("1234567890\n")
memio = self.ioclass(buf * 10)
self.assertEqual(memio.readlines(), [buf] * 10)
memio.seek(5)
self.assertEqual(memio.readlines(), [buf[5:]] + [buf] * 9)
memio.seek(0)
self.assertEqual(memio.readlines(15), [buf] * 2)
memio.seek(0)
self.assertEqual(memio.readlines(-1), [buf] * 10)
memio.seek(0)
self.assertEqual(memio.readlines(0), [buf] * 10)
memio.seek(0)
self.assertEqual(type(memio.readlines()[0]), type(buf))
memio.seek(0)
self.assertEqual(memio.readlines(None), [buf] * 10)
self.assertRaises(TypeError, memio.readlines, '')
memio.close()
self.assertRaises(ValueError, memio.readlines)
def test_iterator(self):
buf = self.buftype("1234567890\n")
memio = self.ioclass(buf * 10)
self.assertEqual(iter(memio), memio)
self.assertTrue(hasattr(memio, '__iter__'))
self.assertTrue(hasattr(memio, '__next__'))
i = 0
for line in memio:
self.assertEqual(line, buf)
i += 1
self.assertEqual(i, 10)
memio.seek(0)
i = 0
for line in memio:
self.assertEqual(line, buf)
i += 1
self.assertEqual(i, 10)
memio = self.ioclass(buf * 2)
memio.close()
self.assertRaises(ValueError, memio.__next__)
def test_getvalue(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.getvalue(), buf)
memio.read()
self.assertEqual(memio.getvalue(), buf)
self.assertEqual(type(memio.getvalue()), type(buf))
memio = self.ioclass(buf * 1000)
self.assertEqual(memio.getvalue()[-3:], self.buftype("890"))
memio = self.ioclass(buf)
memio.close()
self.assertRaises(ValueError, memio.getvalue)
def test_seek(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
memio.read(5)
self.assertRaises(ValueError, memio.seek, -1)
self.assertRaises(ValueError, memio.seek, 1, -1)
self.assertRaises(ValueError, memio.seek, 1, 3)
self.assertEqual(memio.seek(0), 0)
self.assertEqual(memio.seek(0, 0), 0)
self.assertEqual(memio.read(), buf)
self.assertEqual(memio.seek(3), 3)
self.assertEqual(memio.seek(0, 1), 3)
self.assertEqual(memio.read(), buf[3:])
self.assertEqual(memio.seek(len(buf)), len(buf))
self.assertEqual(memio.read(), self.EOF)
memio.seek(len(buf) + 1)
self.assertEqual(memio.read(), self.EOF)
self.assertEqual(memio.seek(0, 2), len(buf))
self.assertEqual(memio.read(), self.EOF)
memio.close()
self.assertRaises(ValueError, memio.seek, 0)
def test_overseek(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.seek(len(buf) + 1), 11)
self.assertEqual(memio.read(), self.EOF)
self.assertEqual(memio.tell(), 11)
self.assertEqual(memio.getvalue(), buf)
memio.write(self.EOF)
self.assertEqual(memio.getvalue(), buf)
memio.write(buf)
self.assertEqual(memio.getvalue(), buf + self.buftype('\0') + buf)
def test_tell(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.tell(), 0)
memio.seek(5)
self.assertEqual(memio.tell(), 5)
memio.seek(10000)
self.assertEqual(memio.tell(), 10000)
memio.close()
self.assertRaises(ValueError, memio.tell)
def test_flush(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.flush(), None)
def test_flags(self):
memio = self.ioclass()
self.assertEqual(memio.writable(), True)
self.assertEqual(memio.readable(), True)
self.assertEqual(memio.seekable(), True)
self.assertEqual(memio.isatty(), False)
self.assertEqual(memio.closed, False)
memio.close()
self.assertRaises(ValueError, memio.writable)
self.assertRaises(ValueError, memio.readable)
self.assertRaises(ValueError, memio.seekable)
self.assertRaises(ValueError, memio.isatty)
self.assertEqual(memio.closed, True)
def test_subclassing(self):
buf = self.buftype("1234567890")
def test1():
class MemIO(self.ioclass):
pass
m = MemIO(buf)
return m.getvalue()
def test2():
class MemIO(self.ioclass):
def __init__(me, a, b):
self.ioclass.__init__(me, a)
m = MemIO(buf, None)
return m.getvalue()
self.assertEqual(test1(), buf)
self.assertEqual(test2(), buf)
def test_instance_dict_leak(self):
# Test case for issue #6242.
# This will be caught by regrtest.py -R if this leak.
for _ in range(100):
memio = self.ioclass()
memio.foo = 1
def test_pickling(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
memio.foo = 42
memio.seek(2)
class PickleTestMemIO(self.ioclass):
def __init__(me, initvalue, foo):
self.ioclass.__init__(me, initvalue)
me.foo = foo
# __getnewargs__ is undefined on purpose. This checks that PEP 307
# is used to provide pickling support.
# Pickle expects the class to be on the module level. Here we use a
# little hack to allow the PickleTestMemIO class to derive from
# self.ioclass without having to define all combinations explictly on
# the module-level.
import __main__
PickleTestMemIO.__module__ = '__main__'
PickleTestMemIO.__qualname__ = PickleTestMemIO.__name__
__main__.PickleTestMemIO = PickleTestMemIO
submemio = PickleTestMemIO(buf, 80)
submemio.seek(2)
# We only support pickle protocol 2 and onward since we use extended
# __reduce__ API of PEP 307 to provide pickling support.
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
for obj in (memio, submemio):
obj2 = pickle.loads(pickle.dumps(obj, protocol=proto))
self.assertEqual(obj.getvalue(), obj2.getvalue())
self.assertEqual(obj.__class__, obj2.__class__)
self.assertEqual(obj.foo, obj2.foo)
self.assertEqual(obj.tell(), obj2.tell())
obj2.close()
self.assertRaises(ValueError, pickle.dumps, obj2, proto)
del __main__.PickleTestMemIO
class BytesIOMixin:
def test_getbuffer(self):
memio = self.ioclass(b"1234567890")
buf = memio.getbuffer()
self.assertEqual(bytes(buf), b"1234567890")
memio.seek(5)
buf = memio.getbuffer()
self.assertEqual(bytes(buf), b"1234567890")
# Trying to change the size of the BytesIO while a buffer is exported
# raises a BufferError.
self.assertRaises(BufferError, memio.write, b'x' * 100)
self.assertRaises(BufferError, memio.truncate)
self.assertRaises(BufferError, memio.close)
self.assertFalse(memio.closed)
# Mutating the buffer updates the BytesIO
buf[3:6] = b"abc"
self.assertEqual(bytes(buf), b"123abc7890")
self.assertEqual(memio.getvalue(), b"123abc7890")
# After the buffer gets released, we can resize and close the BytesIO
# again
del buf
support.gc_collect()
memio.truncate()
memio.close()
self.assertRaises(ValueError, memio.getbuffer)
class PyBytesIOTest(MemoryTestMixin, MemorySeekTestMixin,
BytesIOMixin, unittest.TestCase):
UnsupportedOperation = pyio.UnsupportedOperation
@staticmethod
def buftype(s):
return s.encode("ascii")
ioclass = pyio.BytesIO
EOF = b""
def test_read1(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertRaises(TypeError, memio.read1)
self.assertEqual(memio.read(), buf)
def test_readinto(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
b = bytearray(b"hello")
self.assertEqual(memio.readinto(b), 5)
self.assertEqual(b, b"12345")
self.assertEqual(memio.readinto(b), 5)
self.assertEqual(b, b"67890")
self.assertEqual(memio.readinto(b), 0)
self.assertEqual(b, b"67890")
b = bytearray(b"hello world")
memio.seek(0)
self.assertEqual(memio.readinto(b), 10)
self.assertEqual(b, b"1234567890d")
b = bytearray(b"")
memio.seek(0)
self.assertEqual(memio.readinto(b), 0)
self.assertEqual(b, b"")
self.assertRaises(TypeError, memio.readinto, '')
import array
a = array.array('b', b"hello world")
memio = self.ioclass(buf)
memio.readinto(a)
self.assertEqual(a.tobytes(), b"1234567890d")
memio.close()
self.assertRaises(ValueError, memio.readinto, b)
memio = self.ioclass(b"123")
b = bytearray()
memio.seek(42)
memio.readinto(b)
self.assertEqual(b, b"")
def test_relative_seek(self):
buf = self.buftype("1234567890")
memio = self.ioclass(buf)
self.assertEqual(memio.seek(-1, 1), 0)
self.assertEqual(memio.seek(3, 1), 3)
self.assertEqual(memio.seek(-4, 1), 0)
self.assertEqual(memio.seek(-1, 2), 9)
self.assertEqual(memio.seek(1, 1), 10)
self.assertEqual(memio.seek(1, 2), 11)
memio.seek(-3, 2)
self.assertEqual(memio.read(), buf[-3:])
memio.seek(0)
memio.seek(1, 1)
self.assertEqual(memio.read(), buf[1:])
def test_unicode(self):
memio = self.ioclass()
self.assertRaises(TypeError, self.ioclass, "1234567890")
self.assertRaises(TypeError, memio.write, "1234567890")
self.assertRaises(TypeError, memio.writelines, ["1234567890"])
def test_bytes_array(self):
buf = b"1234567890"
import array
a = array.array('b', list(buf))
memio = self.ioclass(a)
self.assertEqual(memio.getvalue(), buf)
self.assertEqual(memio.write(a), 10)
self.assertEqual(memio.getvalue(), buf)
def test_issue5449(self):
buf = self.buftype("1234567890")
self.ioclass(initial_bytes=buf)
self.assertRaises(TypeError, self.ioclass, buf, foo=None)
class TextIOTestMixin:
def test_newlines_property(self):
memio = self.ioclass(newline=None)
# The C StringIO decodes newlines in write() calls, but the Python
# implementation only does when reading. This function forces them to
# be decoded for testing.
def force_decode():
memio.seek(0)
memio.read()
self.assertEqual(memio.newlines, None)
memio.write("a\n")
force_decode()
self.assertEqual(memio.newlines, "\n")
memio.write("b\r\n")
force_decode()
self.assertEqual(memio.newlines, ("\n", "\r\n"))
memio.write("c\rd")
force_decode()
self.assertEqual(memio.newlines, ("\r", "\n", "\r\n"))
def test_relative_seek(self):
memio = self.ioclass()
self.assertRaises(OSError, memio.seek, -1, 1)
self.assertRaises(OSError, memio.seek, 3, 1)
self.assertRaises(OSError, memio.seek, -3, 1)
self.assertRaises(OSError, memio.seek, -1, 2)
self.assertRaises(OSError, memio.seek, 1, 1)
self.assertRaises(OSError, memio.seek, 1, 2)
def test_textio_properties(self):
memio = self.ioclass()
# These are just dummy values but we nevertheless check them for fear
# of unexpected breakage.
self.assertIsNone(memio.encoding)
self.assertIsNone(memio.errors)
self.assertFalse(memio.line_buffering)
def test_newline_default(self):
memio = self.ioclass("a\nb\r\nc\rd")
self.assertEqual(list(memio), ["a\n", "b\r\n", "c\rd"])
self.assertEqual(memio.getvalue(), "a\nb\r\nc\rd")
memio = self.ioclass()
self.assertEqual(memio.write("a\nb\r\nc\rd"), 8)
memio.seek(0)
self.assertEqual(list(memio), ["a\n", "b\r\n", "c\rd"])
self.assertEqual(memio.getvalue(), "a\nb\r\nc\rd")
def test_newline_none(self):
# newline=None
memio = self.ioclass("a\nb\r\nc\rd", newline=None)
self.assertEqual(list(memio), ["a\n", "b\n", "c\n", "d"])
memio.seek(0)
self.assertEqual(memio.read(1), "a")
self.assertEqual(memio.read(2), "\nb")
self.assertEqual(memio.read(2), "\nc")
self.assertEqual(memio.read(1), "\n")
self.assertEqual(memio.getvalue(), "a\nb\nc\nd")
memio = self.ioclass(newline=None)
self.assertEqual(2, memio.write("a\n"))
self.assertEqual(3, memio.write("b\r\n"))
self.assertEqual(3, memio.write("c\rd"))
memio.seek(0)
self.assertEqual(memio.read(), "a\nb\nc\nd")
self.assertEqual(memio.getvalue(), "a\nb\nc\nd")
memio = self.ioclass("a\r\nb", newline=None)
self.assertEqual(memio.read(3), "a\nb")
def test_newline_empty(self):
# newline=""
memio = self.ioclass("a\nb\r\nc\rd", newline="")
self.assertEqual(list(memio), ["a\n", "b\r\n", "c\r", "d"])
memio.seek(0)
self.assertEqual(memio.read(4), "a\nb\r")
self.assertEqual(memio.read(2), "\nc")
self.assertEqual(memio.read(1), "\r")
self.assertEqual(memio.getvalue(), "a\nb\r\nc\rd")
memio = self.ioclass(newline="")
self.assertEqual(2, memio.write("a\n"))
self.assertEqual(2, memio.write("b\r"))
self.assertEqual(2, memio.write("\nc"))
self.assertEqual(2, memio.write("\rd"))
memio.seek(0)
self.assertEqual(list(memio), ["a\n", "b\r\n", "c\r", "d"])
self.assertEqual(memio.getvalue(), "a\nb\r\nc\rd")
def test_newline_lf(self):
# newline="\n"
memio = self.ioclass("a\nb\r\nc\rd", newline="\n")
self.assertEqual(list(memio), ["a\n", "b\r\n", "c\rd"])
self.assertEqual(memio.getvalue(), "a\nb\r\nc\rd")
memio = self.ioclass(newline="\n")
self.assertEqual(memio.write("a\nb\r\nc\rd"), 8)
memio.seek(0)
self.assertEqual(list(memio), ["a\n", "b\r\n", "c\rd"])
self.assertEqual(memio.getvalue(), "a\nb\r\nc\rd")
def test_newline_cr(self):
# newline="\r"
memio = self.ioclass("a\nb\r\nc\rd", newline="\r")
self.assertEqual(memio.read(), "a\rb\r\rc\rd")
memio.seek(0)
self.assertEqual(list(memio), ["a\r", "b\r", "\r", "c\r", "d"])
self.assertEqual(memio.getvalue(), "a\rb\r\rc\rd")
memio = self.ioclass(newline="\r")
self.assertEqual(memio.write("a\nb\r\nc\rd"), 8)
memio.seek(0)
self.assertEqual(list(memio), ["a\r", "b\r", "\r", "c\r", "d"])
memio.seek(0)
self.assertEqual(memio.readlines(), ["a\r", "b\r", "\r", "c\r", "d"])
self.assertEqual(memio.getvalue(), "a\rb\r\rc\rd")
def test_newline_crlf(self):
# newline="\r\n"
memio = self.ioclass("a\nb\r\nc\rd", newline="\r\n")
self.assertEqual(memio.read(), "a\r\nb\r\r\nc\rd")
memio.seek(0)
self.assertEqual(list(memio), ["a\r\n", "b\r\r\n", "c\rd"])
memio.seek(0)
self.assertEqual(memio.readlines(), ["a\r\n", "b\r\r\n", "c\rd"])
self.assertEqual(memio.getvalue(), "a\r\nb\r\r\nc\rd")
memio = self.ioclass(newline="\r\n")
self.assertEqual(memio.write("a\nb\r\nc\rd"), 8)
memio.seek(0)
self.assertEqual(list(memio), ["a\r\n", "b\r\r\n", "c\rd"])
self.assertEqual(memio.getvalue(), "a\r\nb\r\r\nc\rd")
def test_issue5265(self):
# StringIO can duplicate newlines in universal newlines mode
memio = self.ioclass("a\r\nb\r\n", newline=None)
self.assertEqual(memio.read(5), "a\nb\n")
self.assertEqual(memio.getvalue(), "a\nb\n")
def test_newline_argument(self):
self.assertRaises(TypeError, self.ioclass, newline=b"\n")
self.assertRaises(ValueError, self.ioclass, newline="error")
# These should not raise an error
for newline in (None, "", "\n", "\r", "\r\n"):
self.ioclass(newline=newline)
class PyStringIOTest(MemoryTestMixin, MemorySeekTestMixin,
TextIOTestMixin, unittest.TestCase):
buftype = str
ioclass = pyio.StringIO
UnsupportedOperation = pyio.UnsupportedOperation
EOF = ""
def test_lone_surrogates(self):
# Issue #20424
memio = self.ioclass('\ud800')
self.assertEqual(memio.read(), '\ud800')
memio = self.ioclass()
memio.write('\ud800')
self.assertEqual(memio.getvalue(), '\ud800')
class PyStringIOPickleTest(TextIOTestMixin, unittest.TestCase):
"""Test if pickle restores properly the internal state of StringIO.
"""
buftype = str
UnsupportedOperation = pyio.UnsupportedOperation
EOF = ""
class ioclass(pyio.StringIO):
def __new__(cls, *args, **kwargs):
return pickle.loads(pickle.dumps(pyio.StringIO(*args, **kwargs)))
def __init__(self, *args, **kwargs):
pass
class CBytesIOTest(PyBytesIOTest):
ioclass = io.BytesIO
UnsupportedOperation = io.UnsupportedOperation
def test_getstate(self):
memio = self.ioclass()
state = memio.__getstate__()
self.assertEqual(len(state), 3)
bytearray(state[0]) # Check if state[0] supports the buffer interface.
self.assertIsInstance(state[1], int)
if state[2] is not None:
self.assertIsInstance(state[2], dict)
memio.close()
self.assertRaises(ValueError, memio.__getstate__)
def test_setstate(self):
# This checks whether __setstate__ does proper input validation.
memio = self.ioclass()
memio.__setstate__((b"no error", 0, None))
memio.__setstate__((bytearray(b"no error"), 0, None))
memio.__setstate__((b"no error", 0, {'spam': 3}))
self.assertRaises(ValueError, memio.__setstate__, (b"", -1, None))
self.assertRaises(TypeError, memio.__setstate__, ("unicode", 0, None))
self.assertRaises(TypeError, memio.__setstate__, (b"", 0.0, None))
self.assertRaises(TypeError, memio.__setstate__, (b"", 0, 0))
self.assertRaises(TypeError, memio.__setstate__, (b"len-test", 0))
self.assertRaises(TypeError, memio.__setstate__)
self.assertRaises(TypeError, memio.__setstate__, 0)
memio.close()
self.assertRaises(ValueError, memio.__setstate__, (b"closed", 0, None))
check_sizeof = support.check_sizeof
@support.cpython_only
def test_sizeof(self):
basesize = support.calcobjsize('P2nN2Pn')
check = self.check_sizeof
self.assertEqual(object.__sizeof__(io.BytesIO()), basesize)
check(io.BytesIO(), basesize )
check(io.BytesIO(b'a'), basesize + 1 + 1 )
check(io.BytesIO(b'a' * 1000), basesize + 1000 + 1 )
class CStringIOTest(PyStringIOTest):
ioclass = io.StringIO
UnsupportedOperation = io.UnsupportedOperation
# XXX: For the Python version of io.StringIO, this is highly
# dependent on the encoding used for the underlying buffer.
def test_widechar(self):
buf = self.buftype("\U0002030a\U00020347")
memio = self.ioclass(buf)
self.assertEqual(memio.getvalue(), buf)
self.assertEqual(memio.write(buf), len(buf))
self.assertEqual(memio.tell(), len(buf))
self.assertEqual(memio.getvalue(), buf)
self.assertEqual(memio.write(buf), len(buf))
self.assertEqual(memio.tell(), len(buf) * 2)
self.assertEqual(memio.getvalue(), buf + buf)
def test_getstate(self):
memio = self.ioclass()
state = memio.__getstate__()
self.assertEqual(len(state), 4)
self.assertIsInstance(state[0], str)
self.assertIsInstance(state[1], str)
self.assertIsInstance(state[2], int)
if state[3] is not None:
self.assertIsInstance(state[3], dict)
memio.close()
self.assertRaises(ValueError, memio.__getstate__)
def test_setstate(self):
# This checks whether __setstate__ does proper input validation.
memio = self.ioclass()
memio.__setstate__(("no error", "\n", 0, None))
memio.__setstate__(("no error", "", 0, {'spam': 3}))
self.assertRaises(ValueError, memio.__setstate__, ("", "f", 0, None))
self.assertRaises(ValueError, memio.__setstate__, ("", "", -1, None))
self.assertRaises(TypeError, memio.__setstate__, (b"", "", 0, None))
self.assertRaises(TypeError, memio.__setstate__, ("", b"", 0, None))
self.assertRaises(TypeError, memio.__setstate__, ("", "", 0.0, None))
self.assertRaises(TypeError, memio.__setstate__, ("", "", 0, 0))
self.assertRaises(TypeError, memio.__setstate__, ("len-test", 0))
self.assertRaises(TypeError, memio.__setstate__)
self.assertRaises(TypeError, memio.__setstate__, 0)
memio.close()
self.assertRaises(ValueError, memio.__setstate__, ("closed", "", 0, None))
class CStringIOPickleTest(PyStringIOPickleTest):
UnsupportedOperation = io.UnsupportedOperation
class ioclass(io.StringIO):
def __new__(cls, *args, **kwargs):
return pickle.loads(pickle.dumps(io.StringIO(*args, **kwargs)))
def __init__(self, *args, **kwargs):
pass
def test_main():
tests = [PyBytesIOTest, PyStringIOTest, CBytesIOTest, CStringIOTest,
PyStringIOPickleTest, CStringIOPickleTest]
support.run_unittest(*tests)
if __name__ == '__main__':
test_main()
|
nilq/baby-python
|
python
|
# Generated by Django 3.1.3 on 2022-01-18 13:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('onlinecourse', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='choice',
name='question_id',
),
migrations.AddField(
model_name='choice',
name='question_id',
field=models.ManyToManyField(related_name='choices', to='onlinecourse.Question'),
),
]
|
nilq/baby-python
|
python
|
"""Tools to turn atmospheric profiles into their record representation.
MonoRTM takes gas amounts either as column density in molecules/cm² or
as molecular/volume mixing ratios in molecules/molecules. Internally the
two are separated by checking if the given value is smaller or larger than
one (monortm.f90, lines 421-422). Mixing ratios of all constituents are
relative to dry air.
Conversion between column density and mixing ratio is given by
column density = mixing ratio · dz · p / k / T
The broadening gases in element 8 of record 2.1.2 must always be given as
a column density. I cannot find anywhere in the documentation what these
broadening gases are but it seems that they are the nobel gases since the
example profiles have mixing ratios of about 0.009 that are fairly constant
with height.
"""
from monortm.records import (Record21, Record211_IFORM0, Record211_IFORM1,
Record212_first, Record212_other)
# Molecular/Volume mixing ratios
# Source: https://en.wikipedia.org/wiki/Atmosphere_of_Earth#Composition
mixing_ratio_N2 = 0.78084
mixing_ratio_O2 = 0.20946
mixing_ratio_Ar = 0.00934
mixing_ratio_CO2 = 0.00036 # Remaining parts
boltzmann = 1.3806485e-23
avogadro = 6.02214e23
Rdry = 287.
Rwat = 461.5
def layer(zs, ps, Ts, qvap, qliq, IFORM=1):
"""Create the records for an atmospheric layer.
Contains only a minimal set of species. Make sure to set NMOL to 22.
"""
assert IFORM == 0 or IFORM == 1
assert len(zs) == 2
assert len(ps) == 2
assert len(Ts) == 2
dz = zs[1] - zs[0]
assert dz > 0
pave = 0.5 * sum(ps)
Tave = 0.5 * sum(Ts)
Rave = (1-qvap)*Rdry + qvap*Rwat
ρave = 100*pave / Tave / Rave
# Calculate column number density of water from specific humidity
H2O = (qvap # Specific humidity [kg/kg]
* ρave # Density of water vapor → [kg/m³]
/ 0.018 # 0.018 kg of water is 1 mol → [mol/m³]
* avogadro # Number density → [molecules/m³]
* dz # Column number density → [molecules/m²]
* 1.0e-4 # MonoRTM wants cm² → [molecules/cm²]
)
# Cloud amout in mm contained in column
CLW = (qliq # Specific CLW [kg/kg]
* ρave # Density of CLW [kg/m³]
* dz # Column CLW [kg/m²], corresponds to [mm]
)
if CLW == 0: CLW = None
# Broadening gas amount must be given as column density (see __doc__) ↓cm²
broadening = mixing_ratio_Ar * dz * (pave*100) / Tave / boltzmann * 1.0e-4
# Give species 1 (H2O), 2 (CO2), 7 (O2) and 22 (N2)
row1 = [H2O, mixing_ratio_CO2, 0., 0., 0., 0., mixing_ratio_O2]
row2 = [ 0., 0., 0., 0., 0., 0., 0., 0.]
row3 = [ 0., 0., 0., 0., 0., 0., mixing_ratio_N2, None]
# Select Record matching IFORM parameter
Record211 = Record211_IFORM0 if IFORM == 0 else Record211_IFORM1
return [Record211(PAVE=pave, TAVE=Tave, ALTZB=zs[0]/1000, PZB=ps[0],
TZB=Ts[0], ALTZT=zs[1]/1000, PZT=ps[1], TZT=Ts[1],
CLW=CLW), # z in km
Record212_first(WKL=row1, WBROADL=broadening),
Record212_other(WKL=row2),
Record212_other(WKL=row3)
]
def from_mwrt_profile(z, p, T, lnq):
"""Output records for building MONORTM_PROF.IN from z, p, T, lnq.
Uses the partioning scheme from mwrt.
"""
from mwrt.fap import partition_lnq
qvap, qliq = partition_lnq(p, T, lnq)
zs = [(float(zb), float(zt)) for zb, zt in zip(z[:-1], z[1:])]
ps = [(float(pb), float(pt)) for pb, pt in zip(p[:-1], p[1:])]
Ts = [(float(Tb), float(Tt)) for Tb, Tt in zip(T[:-1], T[1:])]
qvaps = [0.5*(qb + qt) for qb, qt in zip(qvap[:-1], qvap[1:])]
qliqs = [0.5*(qb + qt) for qb, qt in zip(qliq[:-1], qliq[1:])]
out = []
H1 = z[0] / 1000.
H2 = z[-1] / 1000.
out.append(Record21(IFORM=1, NLAYRS=len(zs), NMOL=22, SECNTO=1.,
H1=H1, H2=H2, ANGLE=0., LEN=0))
for z, p, T, qvap, qliq in zip(zs, ps, Ts, qvaps, qliqs):
out.extend(layer(z, p, T, qvap, qliq, IFORM=1))
return out
|
nilq/baby-python
|
python
|
import numpy as np
import argparse
import cv2
from cnn.neural_network import CNN
from keras.utils import np_utils
from keras.optimizers import SGD
from sklearn.datasets import fetch_mldata
from sklearn.model_selection import train_test_split
# Parse the Arguments
ap = argparse.ArgumentParser()
ap.add_argument("-s", "--save_model", type=int, default=-1)
ap.add_argument("-l", "--load_model", type=int, default=-1)
ap.add_argument("-w", "--save_weights", type=str)
args = vars(ap.parse_args())
# Read/Download MNIST Dataset
print('Loading MNIST Dataset...')
dataset = fetch_mldata('MNIST Original')
# Read the MNIST data as array of 784 pixels and convert to 28x28 image matrix
mnist_data = dataset.data.reshape((dataset.data.shape[0], 28, 28))
mnist_data = mnist_data[:, np.newaxis, :, :]
# Divide data into testing and training sets.
train_img, test_img, train_labels, test_labels = train_test_split(mnist_data/255.0, dataset.target.astype("int"), test_size=0.1)
# Now each image rows and columns are of 28x28 matrix type.
img_rows, img_columns = 28, 28
# Transform training and testing data to 10 classes in range [0,classes] ; num. of classes = 0 to 9 = 10 classes
total_classes = 10 # 0 to 9 labels
train_labels = np_utils.to_categorical(train_labels, 10)
test_labels = np_utils.to_categorical(test_labels, 10)
# Defing and compile the SGD optimizer and CNN model
print('\n Compiling model...')
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
clf = CNN.build(width=28, height=28, depth=1, total_classes=10, Saved_Weights_Path=args["save_weights"] if args["load_model"] > 0 else None)
clf.compile(loss="categorical_crossentropy", optimizer=sgd, metrics=["accuracy"])
# Initially train and test the model; If weight saved already, load the weights using arguments.
b_size = 128 # Batch size
num_epoch = 20 # Number of epochs
verb = 1 # Verbose
# If weights saved and argument load_model; Load the pre-trained model.
if args["load_model"] < 0:
print('\nTraining the Model...')
clf.fit(train_img, train_labels, batch_size=b_size, epochs=num_epoch,verbose=verb)
# Evaluate accuracy and loss function of test data
print('Evaluating Accuracy and Loss Function...')
loss, accuracy = clf.evaluate(test_img, test_labels, batch_size=128, verbose=1)
print('Accuracy of Model: {:.2f}%'.format(accuracy * 100))
# Save the pre-trained model.
if args["save_model"] > 0:
print('Saving weights to file...')
clf.save_weights(args["save_weights"], overwrite=True)
# Show the images using OpenCV and making random selections.
for num in np.random.choice(np.arange(0, len(test_labels)), size=(5,)):
# Predict the label of digit using CNN.
probs = clf.predict(test_img[np.newaxis, num])
prediction = probs.argmax(axis=1)
# Resize the Image to 100x100 from 28x28 for better view.
image = (test_img[num][0] * 255).astype("uint8")
image = cv2.merge([image] * 3)
image = cv2.resize(image, (100, 100), interpolation=cv2.INTER_LINEAR)
cv2.putText(image, str(prediction[0]), (5, 20),cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
# Show and print the Actual Image and Predicted Label Value
print('Predicted Label: {}, Actual Value: {}'.format(prediction[0],np.argmax(test_labels[num])))
cv2.imshow('Digits', image)
cv2.waitKey(0)
#---------------------- EOC ---------------------
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.