hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c4a20e25214c64f1fee4492086b179d9de887ea
| 5,520
|
py
|
Python
|
imsngpy/gpwatch.py
|
SilverRon/imsngpy
|
e9e55a73403bef4c73dcc242735efc28d79a3066
|
[
"MIT"
] | 1
|
2021-12-22T08:58:47.000Z
|
2021-12-22T08:58:47.000Z
|
imsngpy/gpwatch.py
|
SilverRon/imsngpy
|
e9e55a73403bef4c73dcc242735efc28d79a3066
|
[
"MIT"
] | null | null | null |
imsngpy/gpwatch.py
|
SilverRon/imsngpy
|
e9e55a73403bef4c73dcc242735efc28d79a3066
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#============================================================
# Module
#------------------------------------------------------------
import os
import sys
import time
# IMSNGpy modules
sys.path.append('/home/paek/imsngpy')
from misc import *
# Astropy
from astropy.io import ascii
#============================================================
# Function
#------------------------------------------------------------
def get_size(start_path = '.'):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
# skip if it is symbolic link
if not os.path.islink(fp):
total_size += os.path.getsize(fp)
return total_size
#------------------------------------------------------------
# Path
#------------------------------------------------------------
path_obsdata = '/data6/obsdata'
path_table = '/home/paek/table'
path_log = '/home/paek/log'
path_gppy = '/home/paek/imsngpy/imsngpy'
path_check_rasa36 = '/home/paek/qsopy/monitor/classify_rasa36.py'
path_preprocess = f'{path_gppy}/pipeline.processing.py'
# Slack
keytbl = ascii.read(f'{path_table}/keys.dat')
OAuth_Token = keytbl['key'][keytbl['name']=='slack'].item()
#------------------------------------------------------------
# Data information
#------------------------------------------------------------
obsdict = dict(
# LOAO
loao=dict(
path_base='/data6/obsdata/LOAO',
path_new='',
log=f'{path_log}/loao.log',
size=0, # [bytes]
core=1, # 4
),
# DOAO
doao=dict(
path_base='/data6/obsdata/DOAO',
path_new='',
log=f'{path_log}/doao.log',
size=0, # [bytes]
core=1, # 4
),
# SOAO
soao=dict(
path_base='/data6/obsdata/SOAO',
path_new='',
log=f'{path_log}/soao.log',
size=0, # [bytes]
core=1, # 4
),
# CBNUO
cbnuo=dict(
path_base='/data6/obsdata/CBNUO', # ./2021_0101
path_new='',
log=f'{path_log}/cbnuo.log',
size=0, # [bytes]
core=1, # 4
),
# KHAO
khao=dict(
path_base='/data6/obsdata/KHAO', # ./2021_0101
path_new='',
log=f'{path_log}/khao.log',
size=0, # [bytes]
core=2, # 4
),
# MDFTS
mdfts=dict(
path_base='/data6/obsdata/MDFTS', # ./2021_0101
path_new='',
log=f'{path_log}/mdfts.log',
size=0, # [bytes]
core=2, # 4
),
# KCT_STX16803
kct_stx16803=dict(
path_base='/data6/obsdata/KCT_STX16803',
path_new='',
log=f'{path_log}/kct_stx16803.log',
size=0, # [bytes]
core=1, # 4
),
# RASA36
rasa36=dict(
path_base='/data6/obsdata/RASA36',
path_new='',
log=f'{path_log}/rasa36.log',
size=0, # [bytes]
core=1, # 4
),
)
#------------------------------------------------------------
obslist = ['LOAO', 'DOAO', 'SOAO', 'CBNUO', 'KHAO', 'KCT_STX16803', 'RASA36']
print('OBSERVATOR LIST :', end='')
print(obslist)
obs = input('obs:').upper()
# obs = 'LOAO'
delay = 10
ncore = input('# of cores (i.e. 8):')
'''
print(f"Wrong input in variable 'sphere' (sphere={sphere})")
print('Process all obs. data')
obslist = ['loao', 'doao', 'soao', 'cbnuo',]+['kct_stx16803', 'rasa36']
'''
#============================================================
# Main body
#------------------------------------------------------------
print(f"{'='*60}\n\n[gpwatch/o_o] Watching new data for {obs} with {ncore} cores \n\n{'='*60}")
st = time.time()
while True:
try:
# Time
et = time.time()
delt = int(et - st)
h = delt // (60*60)
m = delt // 60
s = delt % 60
timer = '{:02d}:{:02d}:{:02d}'.format(h, m, s)
print(timer, end="\r")
log = obsdict[obs]['log']
path_base = f"{path_obsdata}/{obs}"
#
logtbl = ascii.read(log)
dirlist = os.listdir(path_base)
#
for f in dirlist:
path_new = f"{path_base}/{f}"
if (path_new not in logtbl['date']) & (f"{path_new}/" not in logtbl['date']) & (os.path.isdir(path_new)):
print()
#------------------------------------------------------------
# Slack message
#------------------------------------------------------------
channel = '#pipeline'
text = f'[gpwatch/{obs}] Detected New {os.path.basename(path_new)} Data'
param_slack = dict(
token = OAuth_Token,
channel = channel,
text = text,
)
slack_bot(**param_slack)
#
print(test)
init_size = get_size(path_new)
while True:
time.sleep(int(delay*2))
now_size = get_size(path_new)
if init_size != now_size:
print(f'Still uploading {os.path.basename(path_new)} : {init_size} --> {now_size}')
init_size = now_size
else:
# RASA36 exception
if (obs == 'rasa36'):
com = f'python {path_check_rasa36} {path_new}'
print(com)
os.system(com)
if len(dirlist) == len(os.listdir(path_base)):
com = f"python {path_calib} {obs} {ncore}"
print(com)
os.system(com)
else:
break
else:
# Run python code
com = f"python {path_calib} {obs} {ncore}"
print(com)
os.system(com)
print(f"[gpwatch/{obs}] Process for {os.path.basename(path_new)} is done.")
print(f"{'='*60}\n\n[gpwatch/o_o] Watching new data for {obs} with {ncore} cores \n\n{'='*60}")
break
except Exception as e:
print(e)
#------------------------------------------------------------
# Slack message
#------------------------------------------------------------
channel = '#pipeline'
text = f'[gpwatch/{obs}] Error\n{e}'
param_slack = dict(
token = OAuth_Token,
channel = channel,
text = text,
)
slack_bot(**param_slack)
time.sleep(1)
| 27.6
| 108
| 0.509058
|
import os
import sys
import time
sys.path.append('/home/paek/imsngpy')
from misc import *
from astropy.io import ascii
def get_size(start_path = '.'):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
if not os.path.islink(fp):
total_size += os.path.getsize(fp)
return total_size
path_obsdata = '/data6/obsdata'
path_table = '/home/paek/table'
path_log = '/home/paek/log'
path_gppy = '/home/paek/imsngpy/imsngpy'
path_check_rasa36 = '/home/paek/qsopy/monitor/classify_rasa36.py'
path_preprocess = f'{path_gppy}/pipeline.processing.py'
keytbl = ascii.read(f'{path_table}/keys.dat')
OAuth_Token = keytbl['key'][keytbl['name']=='slack'].item()
obsdict = dict(
loao=dict(
path_base='/data6/obsdata/LOAO',
path_new='',
log=f'{path_log}/loao.log',
size=0,
core=1,
),
doao=dict(
path_base='/data6/obsdata/DOAO',
path_new='',
log=f'{path_log}/doao.log',
size=0,
core=1,
),
soao=dict(
path_base='/data6/obsdata/SOAO',
path_new='',
log=f'{path_log}/soao.log',
size=0,
core=1,
),
cbnuo=dict(
path_base='/data6/obsdata/CBNUO',
path_new='',
log=f'{path_log}/cbnuo.log',
size=0,
core=1,
),
khao=dict(
path_base='/data6/obsdata/KHAO',
path_new='',
log=f'{path_log}/khao.log',
size=0,
core=2,
),
mdfts=dict(
path_base='/data6/obsdata/MDFTS',
path_new='',
log=f'{path_log}/mdfts.log',
size=0,
core=2,
),
kct_stx16803=dict(
path_base='/data6/obsdata/KCT_STX16803',
path_new='',
log=f'{path_log}/kct_stx16803.log',
size=0,
core=1,
),
rasa36=dict(
path_base='/data6/obsdata/RASA36',
path_new='',
log=f'{path_log}/rasa36.log',
size=0,
core=1,
),
)
obslist = ['LOAO', 'DOAO', 'SOAO', 'CBNUO', 'KHAO', 'KCT_STX16803', 'RASA36']
print('OBSERVATOR LIST :', end='')
print(obslist)
obs = input('obs:').upper()
delay = 10
ncore = input('# of cores (i.e. 8):')
print(f"{'='*60}\n\n[gpwatch/o_o] Watching new data for {obs} with {ncore} cores \n\n{'='*60}")
st = time.time()
while True:
try:
et = time.time()
delt = int(et - st)
h = delt // (60*60)
m = delt // 60
s = delt % 60
timer = '{:02d}:{:02d}:{:02d}'.format(h, m, s)
print(timer, end="\r")
log = obsdict[obs]['log']
path_base = f"{path_obsdata}/{obs}"
logtbl = ascii.read(log)
dirlist = os.listdir(path_base)
for f in dirlist:
path_new = f"{path_base}/{f}"
if (path_new not in logtbl['date']) & (f"{path_new}/" not in logtbl['date']) & (os.path.isdir(path_new)):
print()
channel = '#pipeline'
text = f'[gpwatch/{obs}] Detected New {os.path.basename(path_new)} Data'
param_slack = dict(
token = OAuth_Token,
channel = channel,
text = text,
)
slack_bot(**param_slack)
print(test)
init_size = get_size(path_new)
while True:
time.sleep(int(delay*2))
now_size = get_size(path_new)
if init_size != now_size:
print(f'Still uploading {os.path.basename(path_new)} : {init_size} --> {now_size}')
init_size = now_size
else:
if (obs == 'rasa36'):
com = f'python {path_check_rasa36} {path_new}'
print(com)
os.system(com)
if len(dirlist) == len(os.listdir(path_base)):
com = f"python {path_calib} {obs} {ncore}"
print(com)
os.system(com)
else:
break
else:
com = f"python {path_calib} {obs} {ncore}"
print(com)
os.system(com)
print(f"[gpwatch/{obs}] Process for {os.path.basename(path_new)} is done.")
print(f"{'='*60}\n\n[gpwatch/o_o] Watching new data for {obs} with {ncore} cores \n\n{'='*60}")
break
except Exception as e:
print(e)
channel = '#pipeline'
text = f'[gpwatch/{obs}] Error\n{e}'
param_slack = dict(
token = OAuth_Token,
channel = channel,
text = text,
)
slack_bot(**param_slack)
time.sleep(1)
| true
| true
|
1c4a210a93526f7ae8c560a788759b5ed53c5f6f
| 21,306
|
py
|
Python
|
src/briefcase/commands/base.py
|
chuckyQ/briefcase
|
06e84e7b1c3af016c828a5a640d277809de6644b
|
[
"BSD-3-Clause"
] | 3
|
2020-10-20T00:59:22.000Z
|
2021-02-22T09:22:00.000Z
|
src/briefcase/commands/base.py
|
chuckyQ/briefcase
|
06e84e7b1c3af016c828a5a640d277809de6644b
|
[
"BSD-3-Clause"
] | null | null | null |
src/briefcase/commands/base.py
|
chuckyQ/briefcase
|
06e84e7b1c3af016c828a5a640d277809de6644b
|
[
"BSD-3-Clause"
] | null | null | null |
import argparse
import importlib
import inspect
import os
import platform
import shutil
import sys
from abc import ABC, abstractmethod
from cgi import parse_header
from pathlib import Path
from urllib.parse import urlparse
import requests
import toml
from cookiecutter.main import cookiecutter
from cookiecutter.repository import is_repo_url
from briefcase import __version__, integrations
from briefcase.config import AppConfig, BaseConfig, GlobalConfig, parse_config
from briefcase.console import Console
from briefcase.exceptions import (
BadNetworkResourceError,
BriefcaseCommandError,
BriefcaseConfigError,
MissingNetworkResourceError
)
from briefcase.integrations.subprocess import Subprocess
class TemplateUnsupportedVersion(BriefcaseCommandError):
def __init__(self, version_tag):
self.version_tag = version_tag
super().__init__(
msg='Template does not support {version_tag}'.format(
version_tag=version_tag
)
)
def create_config(klass, config, msg):
try:
return klass(**config)
except TypeError:
# Inspect the GlobalConfig constructor to find which
# parameters are required and don't have a default
# value.
required_args = {
name
for name, param in inspect.signature(klass.__init__).parameters.items()
if param.default == inspect._empty
and name not in {'self', 'kwargs'}
}
missing_args = required_args - config.keys()
missing = ', '.join(
"'{arg}'".format(arg=arg)
for arg in sorted(missing_args)
)
raise BriefcaseConfigError(
"{msg} is incomplete (missing {missing})".format(
msg=msg,
missing=missing
)
)
def cookiecutter_cache_path(template):
"""
Determine the cookiecutter template cache directory given a template URL.
This will return a valid path, regardless of whether `template`
:param template: The template to use. This can be a filesystem path or
a URL.
:returns: The path that cookiecutter would use for the given template name.
"""
template = template.rstrip('/')
tail = template.split('/')[-1]
cache_name = tail.rsplit('.git')[0]
return Path.home() / '.cookiecutters' / cache_name
def full_options(state, options):
"""
Merge command state with keyword arguments.
Command state takes precedence over any keyword argument.
:param state: The current command state. Can be ``None``.
:param options: The base options.
:returns: A dictionary containing all of ``options``, with any values
provided in ``state`` overriding the base ``options`` values.
"""
if state is not None:
full = options.copy()
full.update(state)
else:
full = options
return full
class BaseCommand(ABC):
cmd_line = "briefcase {command} {platform} {output_format}"
GLOBAL_CONFIG_CLASS = GlobalConfig
APP_CONFIG_CLASS = AppConfig
def __init__(self, base_path, home_path=Path.home(), apps=None, input_enabled=True):
self.base_path = base_path
self.home_path = home_path
self.dot_briefcase_path = home_path / ".briefcase"
self.tools_path = self.dot_briefcase_path / 'tools'
self.global_config = None
self.apps = {} if apps is None else apps
self._path_index = {}
# Some details about the host machine
self.host_arch = platform.machine()
self.host_os = platform.system()
# External service APIs.
# These are abstracted to enable testing without patching.
self.cookiecutter = cookiecutter
self.requests = requests
self.input = Console(enabled=input_enabled)
self.os = os
self.sys = sys
self.shutil = shutil
self.subprocess = Subprocess(self)
# The internal Briefcase integrations API.
self.integrations = integrations
@property
def create_command(self):
"Factory property; return an instance of a create command for the same format"
format_module = importlib.import_module(self.__module__)
command = format_module.create(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def update_command(self):
"Factory property; return an instance of an update command for the same format"
format_module = importlib.import_module(self.__module__)
command = format_module.update(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def build_command(self):
"Factory property; return an instance of a build command for the same format"
format_module = importlib.import_module(self.__module__)
command = format_module.build(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def run_command(self):
"Factory property; return an instance of a run command for the same format"
format_module = importlib.import_module(self.__module__)
command = format_module.run(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def package_command(self):
"Factory property; return an instance of a package command for the same format"
format_module = importlib.import_module(self.__module__)
command = format_module.package(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def publish_command(self):
"Factory property; return an instance of a publish command for the same format"
format_module = importlib.import_module(self.__module__)
command = format_module.publish(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def platform_path(self):
"""
The path for all applications for this command's platform
"""
return self.base_path / self.platform
def bundle_path(self, app):
"""
The path to the bundle for the app in the output format.
The bundle is the template-generated source form of the app.
The path will usually be a directory, the existence of which is
indicative that the template has been rolled out for an app.
:param app: The app config
"""
return self.platform_path / app.formal_name
@abstractmethod
def binary_path(self, app):
"""
The path to the executable artefact for the app in the output format.
This may be a binary file produced by compilation; however, if
the output format doesn't require compilation, it may be the same
as the bundle path (assuming the bundle path is inherently
"executable"), or a path that reasonably represents the thing that can
be executed.
:param app: The app config
"""
...
@abstractmethod
def distribution_path(self, app):
"""
The path to the distributable artefact for the app in the output format.
This is the single file that should be uploaded for distribution.
This may be the binary (if the binary is a self contained executable);
however, if the output format produces an installer, it will be the
path to the installer.
:param app: The app config
"""
...
def _load_path_index(self, app: BaseConfig):
"""
Load the path index from the index file provided by the app template
:param app: The config object for the app
:return: The contents of the application path index.
"""
with (self.bundle_path(app) / 'briefcase.toml').open() as f:
self._path_index[app] = toml.load(f)['paths']
return self._path_index[app]
def support_path(self, app: BaseConfig):
"""
Obtain the path into which the support package should be unpacked
:param app: The config object for the app
:return: The full path where the support package should be unpacked.
"""
# If the index file hasn't been loaded for this app, load it.
try:
path_index = self._path_index[app]
except KeyError:
path_index = self._load_path_index(app)
return self.bundle_path(app) / path_index['support_path']
def app_packages_path(self, app: BaseConfig):
"""
Obtain the path into which dependencies should be installed
:param app: The config object for the app
:return: The full path where application dependencies should be installed.
"""
# If the index file hasn't been loaded for this app, load it.
try:
path_index = self._path_index[app]
except KeyError:
path_index = self._load_path_index(app)
return self.bundle_path(app) / path_index['app_packages_path']
def app_path(self, app: BaseConfig):
"""
Obtain the path into which the application should be installed.
:param app: The config object for the app
:return: The full path where application code should be installed.
"""
# If the index file hasn't been loaded for this app, load it.
try:
path_index = self._path_index[app]
except KeyError:
path_index = self._load_path_index(app)
return self.bundle_path(app) / path_index['app_path']
def app_module_path(self, app):
"""
Find the path for the application module for an app.
:param app: The config object for the app
:returns: The Path to the dist-info folder.
"""
app_home = [
path.split('/')
for path in app.sources
if path.rsplit('/', 1)[-1] == app.module_name
]
try:
if len(app_home) == 1:
path = Path(str(self.base_path), *app_home[0])
else:
raise BriefcaseCommandError(
"Multiple paths in sources found for application '{app.app_name}'".format(app=app)
)
except IndexError:
raise BriefcaseCommandError(
"Unable to find code for application '{app.app_name}'".format(app=app)
)
return path
@property
def python_version_tag(self):
"""
The major.minor of the Python version in use, as a string.
This is used as a repository label/tag to identify the appropriate
templates, etc to use.
"""
return '{major}.{minor}'.format(
major=self.sys.version_info.major,
minor=self.sys.version_info.minor
)
def verify_tools(self):
"""
Verify that the tools needed to run this command exist
Raises MissingToolException if a required system tool is missing.
"""
pass
def parse_options(self, extra):
parser = argparse.ArgumentParser(
prog=self.cmd_line.format(
command=self.command,
platform=self.platform,
output_format=self.output_format
),
description=self.description,
)
self.add_default_options(parser)
self.add_options(parser)
# Parse the full set of command line options from the content
# remaining after the basic command/platform/output format
# has been extracted.
options = vars(parser.parse_args(extra))
# Extract the base default options onto the command
self.input.enabled = options.pop('input_enabled')
self.verbosity = options.pop('verbosity')
return options
def clone_options(self, command):
"""
Clone options from one command to this one.
:param command: The command whose options are to be cloned
"""
self.input.enabled = command.input.enabled
self.verbosity = command.verbosity
def add_default_options(self, parser):
"""
Add the default options that exist on *all* commands
:param parser: a stub argparse parser for the command.
"""
parser.add_argument(
'-v', '--verbosity',
action='count',
default=1,
help="set the verbosity of output"
)
parser.add_argument(
'-V', '--version',
action='version',
version=__version__
)
parser.add_argument(
'--no-input',
action='store_false',
default=True,
dest="input_enabled",
help="Don't ask for user input. If any action would be destructive, "
"an error will be raised; otherwise, default answers will be "
"assumed."
)
def add_options(self, parser):
"""
Add any options that this command needs to parse from the command line.
:param parser: a stub argparse parser for the command.
"""
pass
def parse_config(self, filename):
try:
with open(filename) as config_file:
# Parse the content of the pyproject.toml file, extracting
# any platform and output format configuration for each app,
# creating a single set of configuration options.
global_config, app_configs = parse_config(
config_file,
platform=self.platform,
output_format=self.output_format
)
self.global_config = create_config(
klass=self.GLOBAL_CONFIG_CLASS,
config=global_config,
msg="Global configuration"
)
for app_name, app_config in app_configs.items():
# Construct an AppConfig object with the final set of
# configuration options for the app.
self.apps[app_name] = create_config(
klass=self.APP_CONFIG_CLASS,
config=app_config,
msg="Configuration for '{app_name}'".format(
app_name=app_name
)
)
except FileNotFoundError:
raise BriefcaseConfigError('configuration file not found')
def download_url(self, url, download_path):
"""
Download a given URL, caching it. If it has already been downloaded,
return the value that has been cached.
This is a utility method used to obtain assets used by the
install process. The cached filename will be the filename portion of
the URL, appended to the download path.
:param url: The URL to download
:param download_path: The path to the download cache folder. This path
will be created if it doesn't exist.
:returns: The filename of the downloaded (or cached) file.
"""
download_path.mkdir(parents=True, exist_ok=True)
response = self.requests.get(url, stream=True)
if response.status_code == 404:
raise MissingNetworkResourceError(
url=url,
)
elif response.status_code != 200:
raise BadNetworkResourceError(
url=url,
status_code=response.status_code
)
# The initial URL might (read: will) go through URL redirects, so
# we need the *final* response. We look at either the `Content-Disposition`
# header, or the final URL, to extract the cache filename.
cache_full_name = urlparse(response.url).path
header_value = response.headers.get('Content-Disposition')
if header_value:
# See also https://tools.ietf.org/html/rfc6266
value, parameters = parse_header(header_value)
if (value.split(':', 1)[-1].strip().lower() == 'attachment' and parameters.get('filename')):
cache_full_name = parameters['filename']
cache_name = cache_full_name.split('/')[-1]
filename = download_path / cache_name
if not filename.exists():
# We have meaningful content, and it hasn't been cached previously,
# so save it in the requested location
print('Downloading {cache_name}...'.format(cache_name=cache_name))
with filename.open('wb') as f:
total = response.headers.get('content-length')
if total is None:
f.write(response.content)
else:
downloaded = 0
total = int(total)
for data in response.iter_content(chunk_size=1024 * 1024):
downloaded += len(data)
f.write(data)
done = int(50 * downloaded / total)
print('\r{}{} {}%'.format('#' * done, '.' * (50-done), 2*done), end='', flush=True)
print()
else:
print('{cache_name} already downloaded'.format(cache_name=cache_name))
return filename
def update_cookiecutter_cache(self, template: str, branch='master'):
"""
Ensure that we have a current checkout of a template path.
If the path is a local path, use the path as is.
If the path is a URL, look for a local cache; if one exists, update it,
including checking out the required branch.
:param template: The template URL or path.
:param branch: The template branch to use. Default: ``master``
:return: The path to the cached template. This may be the originally
provided path if the template was a file path.
"""
if is_repo_url(template):
# The app template is a repository URL.
#
# When in `no_input=True` mode, cookiecutter deletes and reclones
# a template directory, rather than updating the existing repo.
#
# Look for a cookiecutter cache of the template; if one exists,
# try to update it using git. If no cache exists, or if the cache
# directory isn't a git directory, or git fails for some reason,
# fall back to using the specified template directly.
try:
cached_template = cookiecutter_cache_path(template)
repo = self.git.Repo(cached_template)
try:
# Attempt to update the repository
remote = repo.remote(name='origin')
remote.fetch()
except self.git.exc.GitCommandError:
# We are offline, or otherwise unable to contact
# the origin git repo. It's OK to continue; but warn
# the user that the template may be stale.
print("***************************************************************************")
print("WARNING: Unable to update template (is your computer offline?)")
print("WARNING: Briefcase will use existing template without updating.")
print("***************************************************************************")
try:
# Check out the branch for the required version tag.
head = remote.refs[branch]
print("Using existing template (sha {hexsha}, updated {datestamp})".format(
hexsha=head.commit.hexsha,
datestamp=head.commit.committed_datetime.strftime("%c")
))
head.checkout()
except IndexError:
# No branch exists for the requested version.
raise TemplateUnsupportedVersion(branch)
except self.git.exc.NoSuchPathError:
# Template cache path doesn't exist.
# Just use the template directly, rather than attempting an update.
cached_template = template
except self.git.exc.InvalidGitRepositoryError:
# Template cache path exists, but isn't a git repository
# Just use the template directly, rather than attempting an update.
cached_template = template
else:
# If this isn't a repository URL, treat it as a local directory
cached_template = template
return cached_template
| 36.797927
| 107
| 0.597766
|
import argparse
import importlib
import inspect
import os
import platform
import shutil
import sys
from abc import ABC, abstractmethod
from cgi import parse_header
from pathlib import Path
from urllib.parse import urlparse
import requests
import toml
from cookiecutter.main import cookiecutter
from cookiecutter.repository import is_repo_url
from briefcase import __version__, integrations
from briefcase.config import AppConfig, BaseConfig, GlobalConfig, parse_config
from briefcase.console import Console
from briefcase.exceptions import (
BadNetworkResourceError,
BriefcaseCommandError,
BriefcaseConfigError,
MissingNetworkResourceError
)
from briefcase.integrations.subprocess import Subprocess
class TemplateUnsupportedVersion(BriefcaseCommandError):
def __init__(self, version_tag):
self.version_tag = version_tag
super().__init__(
msg='Template does not support {version_tag}'.format(
version_tag=version_tag
)
)
def create_config(klass, config, msg):
try:
return klass(**config)
except TypeError:
# value.
required_args = {
name
for name, param in inspect.signature(klass.__init__).parameters.items()
if param.default == inspect._empty
and name not in {'self', 'kwargs'}
}
missing_args = required_args - config.keys()
missing = ', '.join(
"'{arg}'".format(arg=arg)
for arg in sorted(missing_args)
)
raise BriefcaseConfigError(
"{msg} is incomplete (missing {missing})".format(
msg=msg,
missing=missing
)
)
def cookiecutter_cache_path(template):
template = template.rstrip('/')
tail = template.split('/')[-1]
cache_name = tail.rsplit('.git')[0]
return Path.home() / '.cookiecutters' / cache_name
def full_options(state, options):
if state is not None:
full = options.copy()
full.update(state)
else:
full = options
return full
class BaseCommand(ABC):
cmd_line = "briefcase {command} {platform} {output_format}"
GLOBAL_CONFIG_CLASS = GlobalConfig
APP_CONFIG_CLASS = AppConfig
def __init__(self, base_path, home_path=Path.home(), apps=None, input_enabled=True):
self.base_path = base_path
self.home_path = home_path
self.dot_briefcase_path = home_path / ".briefcase"
self.tools_path = self.dot_briefcase_path / 'tools'
self.global_config = None
self.apps = {} if apps is None else apps
self._path_index = {}
# Some details about the host machine
self.host_arch = platform.machine()
self.host_os = platform.system()
# External service APIs.
# These are abstracted to enable testing without patching.
self.cookiecutter = cookiecutter
self.requests = requests
self.input = Console(enabled=input_enabled)
self.os = os
self.sys = sys
self.shutil = shutil
self.subprocess = Subprocess(self)
# The internal Briefcase integrations API.
self.integrations = integrations
@property
def create_command(self):
format_module = importlib.import_module(self.__module__)
command = format_module.create(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def update_command(self):
format_module = importlib.import_module(self.__module__)
command = format_module.update(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def build_command(self):
format_module = importlib.import_module(self.__module__)
command = format_module.build(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def run_command(self):
format_module = importlib.import_module(self.__module__)
command = format_module.run(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def package_command(self):
format_module = importlib.import_module(self.__module__)
command = format_module.package(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def publish_command(self):
format_module = importlib.import_module(self.__module__)
command = format_module.publish(
base_path=self.base_path,
apps=self.apps,
input_enabled=self.input.enabled,
)
command.clone_options(self)
return command
@property
def platform_path(self):
return self.base_path / self.platform
def bundle_path(self, app):
return self.platform_path / app.formal_name
@abstractmethod
def binary_path(self, app):
...
@abstractmethod
def distribution_path(self, app):
...
def _load_path_index(self, app: BaseConfig):
with (self.bundle_path(app) / 'briefcase.toml').open() as f:
self._path_index[app] = toml.load(f)['paths']
return self._path_index[app]
def support_path(self, app: BaseConfig):
# If the index file hasn't been loaded for this app, load it.
try:
path_index = self._path_index[app]
except KeyError:
path_index = self._load_path_index(app)
return self.bundle_path(app) / path_index['support_path']
def app_packages_path(self, app: BaseConfig):
try:
path_index = self._path_index[app]
except KeyError:
path_index = self._load_path_index(app)
return self.bundle_path(app) / path_index['app_packages_path']
def app_path(self, app: BaseConfig):
# If the index file hasn't been loaded for this app, load it.
try:
path_index = self._path_index[app]
except KeyError:
path_index = self._load_path_index(app)
return self.bundle_path(app) / path_index['app_path']
def app_module_path(self, app):
app_home = [
path.split('/')
for path in app.sources
if path.rsplit('/', 1)[-1] == app.module_name
]
try:
if len(app_home) == 1:
path = Path(str(self.base_path), *app_home[0])
else:
raise BriefcaseCommandError(
"Multiple paths in sources found for application '{app.app_name}'".format(app=app)
)
except IndexError:
raise BriefcaseCommandError(
"Unable to find code for application '{app.app_name}'".format(app=app)
)
return path
@property
def python_version_tag(self):
return '{major}.{minor}'.format(
major=self.sys.version_info.major,
minor=self.sys.version_info.minor
)
def verify_tools(self):
pass
def parse_options(self, extra):
parser = argparse.ArgumentParser(
prog=self.cmd_line.format(
command=self.command,
platform=self.platform,
output_format=self.output_format
),
description=self.description,
)
self.add_default_options(parser)
self.add_options(parser)
options = vars(parser.parse_args(extra))
self.input.enabled = options.pop('input_enabled')
self.verbosity = options.pop('verbosity')
return options
def clone_options(self, command):
self.input.enabled = command.input.enabled
self.verbosity = command.verbosity
def add_default_options(self, parser):
parser.add_argument(
'-v', '--verbosity',
action='count',
default=1,
help="set the verbosity of output"
)
parser.add_argument(
'-V', '--version',
action='version',
version=__version__
)
parser.add_argument(
'--no-input',
action='store_false',
default=True,
dest="input_enabled",
help="Don't ask for user input. If any action would be destructive, "
"an error will be raised; otherwise, default answers will be "
"assumed."
)
def add_options(self, parser):
pass
def parse_config(self, filename):
try:
with open(filename) as config_file:
# Parse the content of the pyproject.toml file, extracting
# any platform and output format configuration for each app,
# creating a single set of configuration options.
global_config, app_configs = parse_config(
config_file,
platform=self.platform,
output_format=self.output_format
)
self.global_config = create_config(
klass=self.GLOBAL_CONFIG_CLASS,
config=global_config,
msg="Global configuration"
)
for app_name, app_config in app_configs.items():
# Construct an AppConfig object with the final set of
# configuration options for the app.
self.apps[app_name] = create_config(
klass=self.APP_CONFIG_CLASS,
config=app_config,
msg="Configuration for '{app_name}'".format(
app_name=app_name
)
)
except FileNotFoundError:
raise BriefcaseConfigError('configuration file not found')
def download_url(self, url, download_path):
download_path.mkdir(parents=True, exist_ok=True)
response = self.requests.get(url, stream=True)
if response.status_code == 404:
raise MissingNetworkResourceError(
url=url,
)
elif response.status_code != 200:
raise BadNetworkResourceError(
url=url,
status_code=response.status_code
)
# The initial URL might (read: will) go through URL redirects, so
# we need the *final* response. We look at either the `Content-Disposition`
# header, or the final URL, to extract the cache filename.
cache_full_name = urlparse(response.url).path
header_value = response.headers.get('Content-Disposition')
if header_value:
# See also https://tools.ietf.org/html/rfc6266
value, parameters = parse_header(header_value)
if (value.split(':', 1)[-1].strip().lower() == 'attachment' and parameters.get('filename')):
cache_full_name = parameters['filename']
cache_name = cache_full_name.split('/')[-1]
filename = download_path / cache_name
if not filename.exists():
# We have meaningful content, and it hasn't been cached previously,
print('Downloading {cache_name}...'.format(cache_name=cache_name))
with filename.open('wb') as f:
total = response.headers.get('content-length')
if total is None:
f.write(response.content)
else:
downloaded = 0
total = int(total)
for data in response.iter_content(chunk_size=1024 * 1024):
downloaded += len(data)
f.write(data)
done = int(50 * downloaded / total)
print('\r{}{} {}%'.format('#' * done, '.' * (50-done), 2*done), end='', flush=True)
print()
else:
print('{cache_name} already downloaded'.format(cache_name=cache_name))
return filename
def update_cookiecutter_cache(self, template: str, branch='master'):
if is_repo_url(template):
# fall back to using the specified template directly.
try:
cached_template = cookiecutter_cache_path(template)
repo = self.git.Repo(cached_template)
try:
# Attempt to update the repository
remote = repo.remote(name='origin')
remote.fetch()
except self.git.exc.GitCommandError:
# We are offline, or otherwise unable to contact
# the origin git repo. It's OK to continue; but warn
print("***************************************************************************")
print("WARNING: Unable to update template (is your computer offline?)")
print("WARNING: Briefcase will use existing template without updating.")
print("***************************************************************************")
try:
head = remote.refs[branch]
print("Using existing template (sha {hexsha}, updated {datestamp})".format(
hexsha=head.commit.hexsha,
datestamp=head.commit.committed_datetime.strftime("%c")
))
head.checkout()
except IndexError:
raise TemplateUnsupportedVersion(branch)
except self.git.exc.NoSuchPathError:
# Just use the template directly, rather than attempting an update.
cached_template = template
except self.git.exc.InvalidGitRepositoryError:
# Template cache path exists, but isn't a git repository
cached_template = template
else:
cached_template = template
return cached_template
| true
| true
|
1c4a21aabd3ae6db6ff20f418f668c968e995202
| 1,966
|
py
|
Python
|
utils/setup.py
|
david8862/keras-CenterNet
|
e74b933f6dd5ffac04f2de3eb0d887742be8490f
|
[
"Apache-2.0"
] | null | null | null |
utils/setup.py
|
david8862/keras-CenterNet
|
e74b933f6dd5ffac04f2de3eb0d887742be8490f
|
[
"Apache-2.0"
] | null | null | null |
utils/setup.py
|
david8862/keras-CenterNet
|
e74b933f6dd5ffac04f2de3eb0d887742be8490f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import setuptools
from setuptools.extension import Extension
from distutils.command.build_ext import build_ext as DistUtilsBuildExt
class BuildExtension(setuptools.Command):
description = DistUtilsBuildExt.description
user_options = DistUtilsBuildExt.user_options
boolean_options = DistUtilsBuildExt.boolean_options
help_options = DistUtilsBuildExt.help_options
def __init__(self, *args, **kwargs):
from setuptools.command.build_ext import build_ext as SetupToolsBuildExt
# Bypass __setatrr__ to avoid infinite recursion.
self.__dict__['_command'] = SetupToolsBuildExt(*args, **kwargs)
def __getattr__(self, name):
return getattr(self._command, name)
def __setattr__(self, name, value):
setattr(self._command, name, value)
def initialize_options(self, *args, **kwargs):
return self._command.initialize_options(*args, **kwargs)
def finalize_options(self, *args, **kwargs):
ret = self._command.finalize_options(*args, **kwargs)
import numpy
self.include_dirs.append(numpy.get_include())
return ret
def run(self, *args, **kwargs):
return self._command.run(*args, **kwargs)
extensions = [
Extension(
'compute_overlap',
['compute_overlap.pyx']
),
]
setuptools.setup(
name = 'keras-CenterNet',
version = '0.0.1',
description = 'Keras implementation of CenterNet object detection.',
url = 'https://github.com/david8862/keras-CenterNet',
author = 'david8862',
author_email = 'david8862@gmail.com',
maintainer = 'david8862',
maintainer_email = 'david8862@gmail.com',
cmdclass = {'build_ext': BuildExtension},
packages = setuptools.find_packages(),
ext_modules = extensions,
setup_requires = ["cython>=0.28", "numpy>=1.14.0"]
)
| 32.229508
| 80
| 0.660224
|
import setuptools
from setuptools.extension import Extension
from distutils.command.build_ext import build_ext as DistUtilsBuildExt
class BuildExtension(setuptools.Command):
description = DistUtilsBuildExt.description
user_options = DistUtilsBuildExt.user_options
boolean_options = DistUtilsBuildExt.boolean_options
help_options = DistUtilsBuildExt.help_options
def __init__(self, *args, **kwargs):
from setuptools.command.build_ext import build_ext as SetupToolsBuildExt
self.__dict__['_command'] = SetupToolsBuildExt(*args, **kwargs)
def __getattr__(self, name):
return getattr(self._command, name)
def __setattr__(self, name, value):
setattr(self._command, name, value)
def initialize_options(self, *args, **kwargs):
return self._command.initialize_options(*args, **kwargs)
def finalize_options(self, *args, **kwargs):
ret = self._command.finalize_options(*args, **kwargs)
import numpy
self.include_dirs.append(numpy.get_include())
return ret
def run(self, *args, **kwargs):
return self._command.run(*args, **kwargs)
extensions = [
Extension(
'compute_overlap',
['compute_overlap.pyx']
),
]
setuptools.setup(
name = 'keras-CenterNet',
version = '0.0.1',
description = 'Keras implementation of CenterNet object detection.',
url = 'https://github.com/david8862/keras-CenterNet',
author = 'david8862',
author_email = 'david8862@gmail.com',
maintainer = 'david8862',
maintainer_email = 'david8862@gmail.com',
cmdclass = {'build_ext': BuildExtension},
packages = setuptools.find_packages(),
ext_modules = extensions,
setup_requires = ["cython>=0.28", "numpy>=1.14.0"]
)
| true
| true
|
1c4a21e7724b1a0546fadafe66916377d6168f66
| 6,287
|
py
|
Python
|
engine/trainer/train.py
|
7eta/udk_labeler
|
8cd8a86bc1a78647c0aaf81ca78e6e518fb86ceb
|
[
"Apache-2.0"
] | 2
|
2021-03-08T02:29:09.000Z
|
2021-03-08T02:29:11.000Z
|
engine/trainer/train.py
|
7eta/udk_labeler
|
8cd8a86bc1a78647c0aaf81ca78e6e518fb86ceb
|
[
"Apache-2.0"
] | null | null | null |
engine/trainer/train.py
|
7eta/udk_labeler
|
8cd8a86bc1a78647c0aaf81ca78e6e518fb86ceb
|
[
"Apache-2.0"
] | null | null | null |
import os
import argparse
import numpy as np
import torch
import torch.optim as optim
from engine.dataloader import get_dataloader
from engine.retinanet import model
from engine.retinanet import coco_eval
from engine.log.saver import Saver
from tqdm import tqdm
from collections import deque
from engine.log import logger, summarise
assert torch.__version__.split('.')[0] == '1'
print('CUDA available: {}'.format(torch.cuda.is_available()))
class Trainer(object):
def __init__(self, config, img_dir, coco_json):
self.config = config
# Define Saver
self.saver = Saver(self.config)
# Define Tensorboard
if self.config.tensorboard:
self.summary = summarise.TensorboardSummary(self.saver.directory)
self.writer = self.summary.create_summary()
# Define Logger
self.getlogger = logger.get_logger(self.saver.directory)
self.logger = self.getlogger
# Define DataLoader
self.train_loader, self.n_train_img,\
self.val_set, self.val_loader, self.n_val_img, self.n_classes = get_dataloader(self.config, img_dir, coco_json)
# Define Network
if self.config.depth == 18:
self.retinanet = model.resnet18(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 34:
self.retinanet = model.resnet34(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 50:
self.retinanet = model.resnet50(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 101:
self.retinanet = model.resnet101(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 152:
self.retinanet = model.resnet152(num_classes=self.n_classes, pretrained=True)
else:
raise ValueError('Unsupported model depth, must be one of 18, 34, 50, 101, 152')
# Define Optimizer
self.optimizer = optim.Adam(self.retinanet.parameters(), lr=self.config.lr)
# Define lr_schduler
self.scheduler = optim.lr_scheduler.ReduceLROnPlateau(self.optimizer, patience=3, verbose=True)
# Define loss
self.loss_hist = deque(maxlen=500)
# Define cuda
if torch.cuda.is_available():
self.retinanet = torch.nn.DataParallel(self.retinanet).cuda()
else:
raise ValueError('=> Cuda is not available. Check cuda')
# Define resume
self.best_f1_score = .0
if self.config.resume is not None:
self.retinanet = torch.load(self.config.resume)
self.retinanet.cuda()
# check model summary
# summary(self.retinanet, (3, 512, 512))
def train(self, epoch):
self.retinanet.train()
self.retinanet.module.freeze_bn()
epoch_loss = []
print(f'Num training images: {self.n_train_img}')
with tqdm(self.train_loader) as tbar:
for iter_num, data in enumerate(tbar):
self.optimizer.zero_grad()
img = data['img'].cuda().float()
annot = data['annot']
cls_loss, reg_loss = self.retinanet([img, annot])
cls_loss = cls_loss.mean()
reg_loss = reg_loss.mean()
loss = cls_loss + reg_loss
epoch_loss.append(float(loss))
self.loss_hist.append(float(loss))
if bool(loss == 0):
continue
loss.backward()
torch.nn.utils.clip_grad_norm_(self.retinanet.parameters(), 0.1)
self.optimizer.step()
if self.config.tensorboard:
self.writer.add_scalar('Train_Loss/classification_loss',
cls_loss,
iter_num + epoch*(len(self.train_loader)))
self.writer.add_scalar('Train_Loss/regression_loss',
reg_loss,
iter_num + epoch*(len(self.train_loader)))
self.writer.add_scalar('Train_Loss/total_loss',
np.mean(self.loss_hist),
iter_num + epoch*(len(self.train_loader)))
tbar.set_description(f'Epoch: {epoch} | '
f'Cls loss: {cls_loss:1.5f} | '
f'Reg loss: {reg_loss:1.5f} | '
f'Running loss: {np.mean(self.loss_hist):1.5f}')
del cls_loss, reg_loss
self.scheduler.step(np.mean(epoch_loss))
def validation(self, epoch):
print('Evaluating dataset')
stats = coco_eval.evaluate_coco(self.val_set, self.retinanet, self.saver.directory)
if stats is None:
return
# stats: 0~11까지 12개의 값이 존재
# 0: mAP / 1: map .5 / 2: map .75 / 3: ap small / 4: ap medium / 5: ap large/
# 6: ar Det1 / 7: ar Det10 / 8: ar Det100 / 9: ar small / 10: ar medium / 11: ar large
if self.config.tensorboard:
self.writer.add_scalar('Precision/mAP', stats[0], epoch)
self.writer.add_scalar('Precision/mAP@50IOU', stats[1], epoch)
self.writer.add_scalar('Precision/mAP@75IOU', stats[2], epoch)
self.writer.add_scalar('Precision/mAP(samll)', stats[3], epoch)
self.writer.add_scalar('Precision/mAP(medium)', stats[4], epoch)
self.writer.add_scalar('Precision/mAP(large)', stats[5], epoch)
self.writer.add_scalar('Recall/AR@1', stats[6], epoch)
self.writer.add_scalar('Recall/AR@10', stats[7], epoch)
self.writer.add_scalar('Recall/AR@100', stats[8], epoch)
self.writer.add_scalar('Recall/AR@100(small)', stats[9], epoch)
self.writer.add_scalar('Recall/AR@100(medium)', stats[10], epoch)
self.writer.add_scalar('Recall/AR@100(large)', stats[11], epoch)
mAP, AR = stats[0], stats[8]
f1_score = 2 * (mAP * AR) / (mAP + AR)
if f1_score > self.best_f1_score:
self.best_f1_score = f1_score
self.saver.save_checkpoint(self.retinanet.module, f1_score)
| 41.091503
| 119
| 0.585335
|
import os
import argparse
import numpy as np
import torch
import torch.optim as optim
from engine.dataloader import get_dataloader
from engine.retinanet import model
from engine.retinanet import coco_eval
from engine.log.saver import Saver
from tqdm import tqdm
from collections import deque
from engine.log import logger, summarise
assert torch.__version__.split('.')[0] == '1'
print('CUDA available: {}'.format(torch.cuda.is_available()))
class Trainer(object):
def __init__(self, config, img_dir, coco_json):
self.config = config
self.saver = Saver(self.config)
if self.config.tensorboard:
self.summary = summarise.TensorboardSummary(self.saver.directory)
self.writer = self.summary.create_summary()
self.getlogger = logger.get_logger(self.saver.directory)
self.logger = self.getlogger
self.train_loader, self.n_train_img,\
self.val_set, self.val_loader, self.n_val_img, self.n_classes = get_dataloader(self.config, img_dir, coco_json)
if self.config.depth == 18:
self.retinanet = model.resnet18(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 34:
self.retinanet = model.resnet34(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 50:
self.retinanet = model.resnet50(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 101:
self.retinanet = model.resnet101(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 152:
self.retinanet = model.resnet152(num_classes=self.n_classes, pretrained=True)
else:
raise ValueError('Unsupported model depth, must be one of 18, 34, 50, 101, 152')
self.optimizer = optim.Adam(self.retinanet.parameters(), lr=self.config.lr)
self.scheduler = optim.lr_scheduler.ReduceLROnPlateau(self.optimizer, patience=3, verbose=True)
self.loss_hist = deque(maxlen=500)
if torch.cuda.is_available():
self.retinanet = torch.nn.DataParallel(self.retinanet).cuda()
else:
raise ValueError('=> Cuda is not available. Check cuda')
self.best_f1_score = .0
if self.config.resume is not None:
self.retinanet = torch.load(self.config.resume)
self.retinanet.cuda()
def train(self, epoch):
self.retinanet.train()
self.retinanet.module.freeze_bn()
epoch_loss = []
print(f'Num training images: {self.n_train_img}')
with tqdm(self.train_loader) as tbar:
for iter_num, data in enumerate(tbar):
self.optimizer.zero_grad()
img = data['img'].cuda().float()
annot = data['annot']
cls_loss, reg_loss = self.retinanet([img, annot])
cls_loss = cls_loss.mean()
reg_loss = reg_loss.mean()
loss = cls_loss + reg_loss
epoch_loss.append(float(loss))
self.loss_hist.append(float(loss))
if bool(loss == 0):
continue
loss.backward()
torch.nn.utils.clip_grad_norm_(self.retinanet.parameters(), 0.1)
self.optimizer.step()
if self.config.tensorboard:
self.writer.add_scalar('Train_Loss/classification_loss',
cls_loss,
iter_num + epoch*(len(self.train_loader)))
self.writer.add_scalar('Train_Loss/regression_loss',
reg_loss,
iter_num + epoch*(len(self.train_loader)))
self.writer.add_scalar('Train_Loss/total_loss',
np.mean(self.loss_hist),
iter_num + epoch*(len(self.train_loader)))
tbar.set_description(f'Epoch: {epoch} | '
f'Cls loss: {cls_loss:1.5f} | '
f'Reg loss: {reg_loss:1.5f} | '
f'Running loss: {np.mean(self.loss_hist):1.5f}')
del cls_loss, reg_loss
self.scheduler.step(np.mean(epoch_loss))
def validation(self, epoch):
print('Evaluating dataset')
stats = coco_eval.evaluate_coco(self.val_set, self.retinanet, self.saver.directory)
if stats is None:
return
if self.config.tensorboard:
self.writer.add_scalar('Precision/mAP', stats[0], epoch)
self.writer.add_scalar('Precision/mAP@50IOU', stats[1], epoch)
self.writer.add_scalar('Precision/mAP@75IOU', stats[2], epoch)
self.writer.add_scalar('Precision/mAP(samll)', stats[3], epoch)
self.writer.add_scalar('Precision/mAP(medium)', stats[4], epoch)
self.writer.add_scalar('Precision/mAP(large)', stats[5], epoch)
self.writer.add_scalar('Recall/AR@1', stats[6], epoch)
self.writer.add_scalar('Recall/AR@10', stats[7], epoch)
self.writer.add_scalar('Recall/AR@100', stats[8], epoch)
self.writer.add_scalar('Recall/AR@100(small)', stats[9], epoch)
self.writer.add_scalar('Recall/AR@100(medium)', stats[10], epoch)
self.writer.add_scalar('Recall/AR@100(large)', stats[11], epoch)
mAP, AR = stats[0], stats[8]
f1_score = 2 * (mAP * AR) / (mAP + AR)
if f1_score > self.best_f1_score:
self.best_f1_score = f1_score
self.saver.save_checkpoint(self.retinanet.module, f1_score)
| true
| true
|
1c4a245d2bb97a1c4a9d4c44fe206db1ec8eb500
| 1,617
|
py
|
Python
|
venv/Lib/site-packages/keystoneclient/tests/unit/v3/test_simple_cert.py
|
prasoon-uta/IBM-coud-storage
|
82a6876316715efbd0b492d0d467dde0ab26a56b
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/keystoneclient/tests/unit/v3/test_simple_cert.py
|
prasoon-uta/IBM-coud-storage
|
82a6876316715efbd0b492d0d467dde0ab26a56b
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/keystoneclient/tests/unit/v3/test_simple_cert.py
|
prasoon-uta/IBM-coud-storage
|
82a6876316715efbd0b492d0d467dde0ab26a56b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 IBM Corp.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testresources
from keystoneclient.tests.unit import client_fixtures
from keystoneclient.tests.unit.v3 import utils
class SimpleCertTests(utils.ClientTestCase, testresources.ResourcedTestCase):
resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
def test_get_ca_certificate(self):
self.stub_url('GET', ['OS-SIMPLE-CERT', 'ca'],
headers={'Content-Type': 'application/x-pem-file'},
text=self.examples.SIGNING_CA)
res = self.client.simple_cert.get_ca_certificates()
self.assertEqual(self.examples.SIGNING_CA, res)
def test_get_certificates(self):
self.stub_url('GET', ['OS-SIMPLE-CERT', 'certificates'],
headers={'Content-Type': 'application/x-pem-file'},
text=self.examples.SIGNING_CERT)
res = self.client.simple_cert.get_certificates()
self.assertEqual(self.examples.SIGNING_CERT, res)
def load_tests(loader, tests, pattern):
return testresources.OptimisingTestSuite(tests)
| 39.439024
| 77
| 0.713667
|
import testresources
from keystoneclient.tests.unit import client_fixtures
from keystoneclient.tests.unit.v3 import utils
class SimpleCertTests(utils.ClientTestCase, testresources.ResourcedTestCase):
resources = [('examples', client_fixtures.EXAMPLES_RESOURCE)]
def test_get_ca_certificate(self):
self.stub_url('GET', ['OS-SIMPLE-CERT', 'ca'],
headers={'Content-Type': 'application/x-pem-file'},
text=self.examples.SIGNING_CA)
res = self.client.simple_cert.get_ca_certificates()
self.assertEqual(self.examples.SIGNING_CA, res)
def test_get_certificates(self):
self.stub_url('GET', ['OS-SIMPLE-CERT', 'certificates'],
headers={'Content-Type': 'application/x-pem-file'},
text=self.examples.SIGNING_CERT)
res = self.client.simple_cert.get_certificates()
self.assertEqual(self.examples.SIGNING_CERT, res)
def load_tests(loader, tests, pattern):
return testresources.OptimisingTestSuite(tests)
| true
| true
|
1c4a24828c826c88606ac6a3b6cd095dfec7005a
| 15,703
|
py
|
Python
|
tests/chainer_tests/test_optimizer.py
|
toshihikoyanase/chainer
|
65b34a19d28f60f732c7069163ca23c710a309f4
|
[
"MIT"
] | null | null | null |
tests/chainer_tests/test_optimizer.py
|
toshihikoyanase/chainer
|
65b34a19d28f60f732c7069163ca23c710a309f4
|
[
"MIT"
] | 2
|
2018-01-09T23:05:30.000Z
|
2018-01-19T01:19:34.000Z
|
tests/chainer_tests/test_optimizer.py
|
bkvogel/chainer
|
894cd5d008f11eccdf6e1d7106f5b8bfff9ce005
|
[
"MIT"
] | null | null | null |
import copy
import unittest
import warnings
import mock
import numpy as np
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer import optimizer
from chainer import optimizers
from chainer import testing
from chainer.testing import attr
class TestHyperparameter(unittest.TestCase):
def setUp(self):
self.parent = optimizer.Hyperparameter()
self.parent.x = 1
self.parent.y = 2
self.child = optimizer.Hyperparameter(self.parent)
self.child.y = 3
self.child.z = 4
def test_getattr(self):
self.assertTrue(hasattr(self.parent, 'x'))
self.assertEqual(self.parent.x, 1)
self.assertTrue(hasattr(self.parent, 'y'))
self.assertEqual(self.parent.y, 2)
self.assertFalse(hasattr(self.parent, 'z'))
self.assertTrue(hasattr(self.child, 'x'))
self.assertEqual(self.child.x, 1)
self.assertTrue(hasattr(self.child, 'y'))
self.assertEqual(self.child.y, 3)
self.assertTrue(hasattr(self.child, 'z'))
self.assertEqual(self.child.z, 4)
def test_get_dict(self):
self.assertEqual(self.parent.get_dict(), {'x': 1, 'y': 2})
self.assertEqual(self.child.get_dict(), {'x': 1, 'y': 3, 'z': 4})
def test_repr(self):
self.assertEqual(repr(self.parent), 'Hyperparameter(x=1, y=2)')
self.assertEqual(repr(self.child), 'Hyperparameter(x=1, y=3, z=4)')
def test_deep_copy(self):
parent_copy, child_copy = copy.deepcopy([self.parent, self.child])
self.assertEqual(self.child.get_dict(), child_copy.get_dict())
self.assertEqual(self.parent.get_dict(), parent_copy.get_dict())
self.assertIs(child_copy.parent, parent_copy)
class TestUpdateRule(unittest.TestCase):
def setUp(self):
self.data = np.ones((2, 3), np.float32)
self.grad = np.ones_like(self.data)
self.var = chainer.Variable(self.data, grad=self.grad)
self.update_rule = optimizer.UpdateRule()
self.update_rule.update_core_cpu = mock.MagicMock()
self.update_rule.update_core_gpu = mock.MagicMock()
def test_update_cpu(self):
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core_cpu.call_count, 1)
self.assertEqual(self.update_rule.update_core_gpu.call_count, 0)
@attr.gpu
def test_update_gpu(self):
self.var.to_gpu()
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core_cpu.call_count, 0)
self.assertEqual(self.update_rule.update_core_gpu.call_count, 1)
def check_add_hook(self, hook):
self.update_rule.update(self.var)
self.assertEqual(hook.call_count, 1)
args = hook.call_args_list[0][0]
self.assertEqual(len(args), 2)
self.assertIs(args[0], self.update_rule)
self.assertIs(args[1], self.var)
def test_add_hook(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook)
self.check_add_hook(hook)
def test_add_hook_with_name(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook, name='hook')
self.check_add_hook(hook)
def test_remove_hook(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook, name='hook')
self.update_rule.remove_hook('hook')
self.update_rule.update(self.var)
self.assertEqual(hook.call_count, 0)
def test_add_hook_with_function_name(self):
hook_body = mock.MagicMock()
def foo(update_rule, data, grad):
hook_body(update_rule, data, grad)
self.update_rule.add_hook(foo)
self.update_rule.remove_hook('foo')
self.update_rule.update(self.var)
self.assertEqual(hook_body.call_count, 0)
def test_add_hook_no_name(self):
class CallableWithoutName(object):
def __call__(self, update_rule, param):
pass
with self.assertRaises(ValueError):
self.update_rule.add_hook(CallableWithoutName())
def test_add_hook_duplicated_name(self):
self.update_rule.add_hook(mock.MagicMock(), name='foo')
with self.assertRaises(ValueError):
self.update_rule.add_hook(mock.MagicMock(), name='foo')
def test_remove_hook_not_exist(self):
with self.assertRaises(KeyError):
self.update_rule.remove_hook('foo')
def test_disabled_update_rule(self):
self.update_rule.update_core = mock.MagicMock()
self.update_rule.enabled = False
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core.call_count, 0)
self.update_rule.enabled = True
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core.call_count, 1)
def setup_state(self):
def init_state(data):
state = self.update_rule.state
state['a'] = 0
state['b'] = np.array([1, 2, 3], dtype=np.float32)
self.update_rule.init_state = init_state
@attr.gpu
def test_state_copy_to_gpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['a'], int)
self.assertIsInstance(self.update_rule.state['b'], cuda.ndarray)
self.update_rule.update_core = update_core
self.var.to_gpu()
self.update_rule.update(self.var)
@attr.multi_gpu(2)
def test_state_copy_to_another_gpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['b'], cuda.ndarray)
self.assertEqual(self.update_rule.state['b'].device.id, 1)
# call update with arrays on GPU 0 (tested by another method)
self.update_rule.update_core = lambda param: None
self.update_rule.update(chainer.Variable(
cuda.to_gpu(self.data, 0), grad=cuda.to_gpu(self.grad, 0)))
# check if it copies the states correctly when arrays on another GPU
# are passed
self.update_rule.update_core = update_core
self.update_rule.update(chainer.Variable(
cuda.to_gpu(self.data, 1), grad=cuda.to_gpu(self.grad, 1)))
@attr.gpu
def test_state_copy_to_cpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['a'], int)
self.assertIsInstance(self.update_rule.state['b'], np.ndarray)
self.var.to_gpu()
self.update_rule.update(self.var)
self.var.to_cpu()
self.update_rule.update_core = update_core
self.update_rule.update(self.var)
class TestOptimizer(unittest.TestCase):
def setUp(self):
self.optimizer = optimizer.Optimizer()
def test_new_epoch(self):
self.optimizer.new_epoch()
self.assertEqual(1, self.optimizer.epoch)
def test_invalid_new_epoch(self):
self.optimizer.use_auto_new_epoch = True
with self.assertRaises(RuntimeError):
self.optimizer.new_epoch()
def test_auto_new_epoch(self):
self.optimizer.use_auto_new_epoch = True
self.optimizer.new_epoch(auto=True)
self.assertEqual(1, self.optimizer.epoch)
def test_invalid_auto_new_epoch(self):
with self.assertRaises(RuntimeError):
self.optimizer.new_epoch(auto=True)
class TestOptimizerHook(unittest.TestCase):
def setUp(self):
self.optimizer = optimizer.Optimizer()
self.target = SimpleLink(
np.arange(6, dtype=np.float32).reshape(2, 3),
np.arange(3, -3, -1, dtype=np.float32).reshape(2, 3))
def test_add_hook(self):
h1 = mock.MagicMock(timing='pre')
h1.call_for_each_param = False
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.call_hooks()
h1.assert_called_with(self.optimizer)
def test_add_hook_call_for_each_param(self):
h1 = mock.MagicMock(timing='pre')
h1.call_for_each_param = True
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.call_hooks()
h1.assert_called_with(self.target.param.update_rule, self.target.param)
def test_remove_hook(self):
h1 = mock.MagicMock(timing='pre')
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.remove_hook('h1')
self.optimizer.call_hooks()
self.assertFalse(h1.called)
def test_duplicated_hook(self):
self.optimizer.setup(self.target)
self.optimizer.add_hook(lambda s: None, 'h1', timing='pre')
with self.assertRaises(KeyError):
self.optimizer.add_hook(lambda s: None, 'h1', timing='pre')
def test_invalid_hook(self):
with self.assertRaises(TypeError):
self.optimizer.add_hook(1)
def test_add_hook_before_setup(self):
with self.assertRaises(RuntimeError):
self.optimizer.add_hook(lambda s: None, 'h1')
class SimpleLink(chainer.Link):
def __init__(self, w, g):
super(SimpleLink, self).__init__()
with self.init_scope():
self.param = chainer.Parameter(w)
self.param.grad = g
class TestGradientMethod(unittest.TestCase):
def setUp(self):
self.optimizer = chainer.GradientMethod()
self.target = chainer.ChainList(
SimpleLink(np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32)),
SimpleLink(np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32)))
self.optimizer.create_update_rule = mock.MagicMock
def setup_cpu(self):
self.optimizer.setup(self.target)
def setup_gpu(self, device=None):
self.target.to_gpu(device)
self.optimizer.setup(self.target)
def test_setup(self):
create_update_rule = mock.MagicMock()
self.optimizer.create_update_rule = create_update_rule
self.optimizer.setup(self.target)
self.assertEqual(create_update_rule.call_count, 2)
self.assertEqual(create_update_rule.call_args_list[0], [(), {}])
self.assertEqual(create_update_rule.call_args_list[1], [(), {}])
def check_update(self):
self.assertEqual(self.optimizer.t, 0)
self.optimizer.update()
self.assertEqual(self.optimizer.t, 1)
self.target[0].param.update_rule.update.assert_called_once_with(
self.target[0].param)
self.target[1].param.update_rule.update.assert_called_once_with(
self.target[1].param)
def test_update_cpu(self):
self.setup_cpu()
self.check_update()
@attr.gpu
def test_update_gpu(self):
self.setup_gpu()
self.check_update()
@testing.parameterize(*testing.product({
'shape': [(4, 3, 2)],
'dtype': [np.float16, np.float32, np.float64],
'loss_scale': [None, 1, 10],
}))
class TestGradientMethodLossScale(unittest.TestCase):
def setUp(self):
param0_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
param0_grad = np.copy(param0_data)
param1_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
param1_grad = np.copy(param1_data)
self.target = chainer.ChainList(
SimpleLink(param0_data, param0_grad),
SimpleLink(param1_data, param1_grad))
lr = 1.0
if self.loss_scale is not None:
lr = self.loss_scale
for i in range(2):
self.target[i].param._loss_scale = self.loss_scale
self.optimizer = chainer.optimizers.SGD(lr)
def setup_cpu(self):
self.optimizer.setup(self.target)
def setup_gpu(self, device=None):
self.target.to_gpu(device)
self.optimizer.setup(self.target)
def check_update(self):
self.optimizer.update()
xp = backend.get_array_module(self.target[0].param)
expected_data = xp.zeros(self.shape, dtype=self.dtype)
rtol, atol = 1e-4, 1e-5
if self.dtype is np.float16:
rtol, atol = 1e-1, 1e-2
for i in range(2):
testing.assert_allclose(self.target[i].param.data, expected_data,
rtol=rtol, atol=atol)
def test_update_cpu(self):
self.setup_cpu()
self.check_update()
@attr.gpu
def test_update_gpu(self):
self.setup_gpu()
self.check_update()
class TestCleargradHook(unittest.TestCase):
def setUp(self):
self.target = SimpleLink(
np.arange(6, dtype=np.float32).reshape(2, 3),
np.arange(3, -3, -1, dtype=np.float32).reshape(2, 3))
def check_cleargrad(self):
opt = optimizers.SGD(lr=1)
opt.setup(self.target)
opt.add_hook(CleargradHook(self))
opt.add_hook(DummyHook(self))
opt.update()
def test_cleargrad_cpu(self):
self.check_cleargrad()
@attr.gpu
def test_cleargrad_gpu(self):
self.target.to_gpu()
self.check_cleargrad()
class DummyOptimizer(chainer.GradientMethod):
def __init__(self, test):
super(DummyOptimizer, self).__init__()
self.test = test
def create_update_rule(self):
return mock.MagicMock()
class DummyHook(object):
name = 'Dummy'
timing = 'pre'
def __init__(self, test):
self.test = test
def __call__(self, opt):
for param in opt.target.params():
# Confirm all grads are not None
self.test.assertIsNotNone(param.grad)
class CleargradHook(object):
name = 'Cleargrad'
timing = 'pre'
def __init__(self, _):
pass
def __call__(self, opt):
for param in opt.target.params():
# Clear all grads
param.cleargrad()
class TestGradientMethodClearGrads(unittest.TestCase):
def setUp(self):
self.optimizer = DummyOptimizer(self)
self.target = SimpleLink(
np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32))
self.optimizer.setup(self.target)
self.optimizer.add_hook(DummyHook(self))
def test_update(self):
self.target.cleargrads()
self.optimizer.update()
class TestDeprecatedOptimizerHooksEmitsWarning(unittest.TestCase):
def setUp(self):
self.context = warnings.catch_warnings(record=True)
self.warnings = self.context.__enter__()
warnings.filterwarnings(action='always', category=DeprecationWarning)
def tearDown(self):
self.context.__exit__()
def test_gradient_clipping(self):
chainer.optimizer.GradientClipping(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_gradient_hard_clipping(self):
chainer.optimizer.GradientHardClipping(1., 2.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_gradient_noise(self):
chainer.optimizer.GradientNoise(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_lasso(self):
chainer.optimizer.Lasso(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_weight_decay(self):
chainer.optimizer.WeightDecay(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
testing.run_module(__name__, __file__)
| 32.244353
| 79
| 0.650513
|
import copy
import unittest
import warnings
import mock
import numpy as np
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer import optimizer
from chainer import optimizers
from chainer import testing
from chainer.testing import attr
class TestHyperparameter(unittest.TestCase):
def setUp(self):
self.parent = optimizer.Hyperparameter()
self.parent.x = 1
self.parent.y = 2
self.child = optimizer.Hyperparameter(self.parent)
self.child.y = 3
self.child.z = 4
def test_getattr(self):
self.assertTrue(hasattr(self.parent, 'x'))
self.assertEqual(self.parent.x, 1)
self.assertTrue(hasattr(self.parent, 'y'))
self.assertEqual(self.parent.y, 2)
self.assertFalse(hasattr(self.parent, 'z'))
self.assertTrue(hasattr(self.child, 'x'))
self.assertEqual(self.child.x, 1)
self.assertTrue(hasattr(self.child, 'y'))
self.assertEqual(self.child.y, 3)
self.assertTrue(hasattr(self.child, 'z'))
self.assertEqual(self.child.z, 4)
def test_get_dict(self):
self.assertEqual(self.parent.get_dict(), {'x': 1, 'y': 2})
self.assertEqual(self.child.get_dict(), {'x': 1, 'y': 3, 'z': 4})
def test_repr(self):
self.assertEqual(repr(self.parent), 'Hyperparameter(x=1, y=2)')
self.assertEqual(repr(self.child), 'Hyperparameter(x=1, y=3, z=4)')
def test_deep_copy(self):
parent_copy, child_copy = copy.deepcopy([self.parent, self.child])
self.assertEqual(self.child.get_dict(), child_copy.get_dict())
self.assertEqual(self.parent.get_dict(), parent_copy.get_dict())
self.assertIs(child_copy.parent, parent_copy)
class TestUpdateRule(unittest.TestCase):
def setUp(self):
self.data = np.ones((2, 3), np.float32)
self.grad = np.ones_like(self.data)
self.var = chainer.Variable(self.data, grad=self.grad)
self.update_rule = optimizer.UpdateRule()
self.update_rule.update_core_cpu = mock.MagicMock()
self.update_rule.update_core_gpu = mock.MagicMock()
def test_update_cpu(self):
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core_cpu.call_count, 1)
self.assertEqual(self.update_rule.update_core_gpu.call_count, 0)
@attr.gpu
def test_update_gpu(self):
self.var.to_gpu()
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core_cpu.call_count, 0)
self.assertEqual(self.update_rule.update_core_gpu.call_count, 1)
def check_add_hook(self, hook):
self.update_rule.update(self.var)
self.assertEqual(hook.call_count, 1)
args = hook.call_args_list[0][0]
self.assertEqual(len(args), 2)
self.assertIs(args[0], self.update_rule)
self.assertIs(args[1], self.var)
def test_add_hook(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook)
self.check_add_hook(hook)
def test_add_hook_with_name(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook, name='hook')
self.check_add_hook(hook)
def test_remove_hook(self):
hook = mock.MagicMock()
self.update_rule.add_hook(hook, name='hook')
self.update_rule.remove_hook('hook')
self.update_rule.update(self.var)
self.assertEqual(hook.call_count, 0)
def test_add_hook_with_function_name(self):
hook_body = mock.MagicMock()
def foo(update_rule, data, grad):
hook_body(update_rule, data, grad)
self.update_rule.add_hook(foo)
self.update_rule.remove_hook('foo')
self.update_rule.update(self.var)
self.assertEqual(hook_body.call_count, 0)
def test_add_hook_no_name(self):
class CallableWithoutName(object):
def __call__(self, update_rule, param):
pass
with self.assertRaises(ValueError):
self.update_rule.add_hook(CallableWithoutName())
def test_add_hook_duplicated_name(self):
self.update_rule.add_hook(mock.MagicMock(), name='foo')
with self.assertRaises(ValueError):
self.update_rule.add_hook(mock.MagicMock(), name='foo')
def test_remove_hook_not_exist(self):
with self.assertRaises(KeyError):
self.update_rule.remove_hook('foo')
def test_disabled_update_rule(self):
self.update_rule.update_core = mock.MagicMock()
self.update_rule.enabled = False
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core.call_count, 0)
self.update_rule.enabled = True
self.update_rule.update(self.var)
self.assertEqual(self.update_rule.update_core.call_count, 1)
def setup_state(self):
def init_state(data):
state = self.update_rule.state
state['a'] = 0
state['b'] = np.array([1, 2, 3], dtype=np.float32)
self.update_rule.init_state = init_state
@attr.gpu
def test_state_copy_to_gpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['a'], int)
self.assertIsInstance(self.update_rule.state['b'], cuda.ndarray)
self.update_rule.update_core = update_core
self.var.to_gpu()
self.update_rule.update(self.var)
@attr.multi_gpu(2)
def test_state_copy_to_another_gpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['b'], cuda.ndarray)
self.assertEqual(self.update_rule.state['b'].device.id, 1)
self.update_rule.update_core = lambda param: None
self.update_rule.update(chainer.Variable(
cuda.to_gpu(self.data, 0), grad=cuda.to_gpu(self.grad, 0)))
self.update_rule.update_core = update_core
self.update_rule.update(chainer.Variable(
cuda.to_gpu(self.data, 1), grad=cuda.to_gpu(self.grad, 1)))
@attr.gpu
def test_state_copy_to_cpu(self):
self.setup_state()
def update_core(param):
self.assertIsInstance(self.update_rule.state['a'], int)
self.assertIsInstance(self.update_rule.state['b'], np.ndarray)
self.var.to_gpu()
self.update_rule.update(self.var)
self.var.to_cpu()
self.update_rule.update_core = update_core
self.update_rule.update(self.var)
class TestOptimizer(unittest.TestCase):
def setUp(self):
self.optimizer = optimizer.Optimizer()
def test_new_epoch(self):
self.optimizer.new_epoch()
self.assertEqual(1, self.optimizer.epoch)
def test_invalid_new_epoch(self):
self.optimizer.use_auto_new_epoch = True
with self.assertRaises(RuntimeError):
self.optimizer.new_epoch()
def test_auto_new_epoch(self):
self.optimizer.use_auto_new_epoch = True
self.optimizer.new_epoch(auto=True)
self.assertEqual(1, self.optimizer.epoch)
def test_invalid_auto_new_epoch(self):
with self.assertRaises(RuntimeError):
self.optimizer.new_epoch(auto=True)
class TestOptimizerHook(unittest.TestCase):
def setUp(self):
self.optimizer = optimizer.Optimizer()
self.target = SimpleLink(
np.arange(6, dtype=np.float32).reshape(2, 3),
np.arange(3, -3, -1, dtype=np.float32).reshape(2, 3))
def test_add_hook(self):
h1 = mock.MagicMock(timing='pre')
h1.call_for_each_param = False
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.call_hooks()
h1.assert_called_with(self.optimizer)
def test_add_hook_call_for_each_param(self):
h1 = mock.MagicMock(timing='pre')
h1.call_for_each_param = True
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.call_hooks()
h1.assert_called_with(self.target.param.update_rule, self.target.param)
def test_remove_hook(self):
h1 = mock.MagicMock(timing='pre')
self.optimizer.setup(self.target)
self.optimizer.add_hook(h1, 'h1')
self.optimizer.remove_hook('h1')
self.optimizer.call_hooks()
self.assertFalse(h1.called)
def test_duplicated_hook(self):
self.optimizer.setup(self.target)
self.optimizer.add_hook(lambda s: None, 'h1', timing='pre')
with self.assertRaises(KeyError):
self.optimizer.add_hook(lambda s: None, 'h1', timing='pre')
def test_invalid_hook(self):
with self.assertRaises(TypeError):
self.optimizer.add_hook(1)
def test_add_hook_before_setup(self):
with self.assertRaises(RuntimeError):
self.optimizer.add_hook(lambda s: None, 'h1')
class SimpleLink(chainer.Link):
def __init__(self, w, g):
super(SimpleLink, self).__init__()
with self.init_scope():
self.param = chainer.Parameter(w)
self.param.grad = g
class TestGradientMethod(unittest.TestCase):
def setUp(self):
self.optimizer = chainer.GradientMethod()
self.target = chainer.ChainList(
SimpleLink(np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32)),
SimpleLink(np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32)))
self.optimizer.create_update_rule = mock.MagicMock
def setup_cpu(self):
self.optimizer.setup(self.target)
def setup_gpu(self, device=None):
self.target.to_gpu(device)
self.optimizer.setup(self.target)
def test_setup(self):
create_update_rule = mock.MagicMock()
self.optimizer.create_update_rule = create_update_rule
self.optimizer.setup(self.target)
self.assertEqual(create_update_rule.call_count, 2)
self.assertEqual(create_update_rule.call_args_list[0], [(), {}])
self.assertEqual(create_update_rule.call_args_list[1], [(), {}])
def check_update(self):
self.assertEqual(self.optimizer.t, 0)
self.optimizer.update()
self.assertEqual(self.optimizer.t, 1)
self.target[0].param.update_rule.update.assert_called_once_with(
self.target[0].param)
self.target[1].param.update_rule.update.assert_called_once_with(
self.target[1].param)
def test_update_cpu(self):
self.setup_cpu()
self.check_update()
@attr.gpu
def test_update_gpu(self):
self.setup_gpu()
self.check_update()
@testing.parameterize(*testing.product({
'shape': [(4, 3, 2)],
'dtype': [np.float16, np.float32, np.float64],
'loss_scale': [None, 1, 10],
}))
class TestGradientMethodLossScale(unittest.TestCase):
def setUp(self):
param0_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
param0_grad = np.copy(param0_data)
param1_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
param1_grad = np.copy(param1_data)
self.target = chainer.ChainList(
SimpleLink(param0_data, param0_grad),
SimpleLink(param1_data, param1_grad))
lr = 1.0
if self.loss_scale is not None:
lr = self.loss_scale
for i in range(2):
self.target[i].param._loss_scale = self.loss_scale
self.optimizer = chainer.optimizers.SGD(lr)
def setup_cpu(self):
self.optimizer.setup(self.target)
def setup_gpu(self, device=None):
self.target.to_gpu(device)
self.optimizer.setup(self.target)
def check_update(self):
self.optimizer.update()
xp = backend.get_array_module(self.target[0].param)
expected_data = xp.zeros(self.shape, dtype=self.dtype)
rtol, atol = 1e-4, 1e-5
if self.dtype is np.float16:
rtol, atol = 1e-1, 1e-2
for i in range(2):
testing.assert_allclose(self.target[i].param.data, expected_data,
rtol=rtol, atol=atol)
def test_update_cpu(self):
self.setup_cpu()
self.check_update()
@attr.gpu
def test_update_gpu(self):
self.setup_gpu()
self.check_update()
class TestCleargradHook(unittest.TestCase):
def setUp(self):
self.target = SimpleLink(
np.arange(6, dtype=np.float32).reshape(2, 3),
np.arange(3, -3, -1, dtype=np.float32).reshape(2, 3))
def check_cleargrad(self):
opt = optimizers.SGD(lr=1)
opt.setup(self.target)
opt.add_hook(CleargradHook(self))
opt.add_hook(DummyHook(self))
opt.update()
def test_cleargrad_cpu(self):
self.check_cleargrad()
@attr.gpu
def test_cleargrad_gpu(self):
self.target.to_gpu()
self.check_cleargrad()
class DummyOptimizer(chainer.GradientMethod):
def __init__(self, test):
super(DummyOptimizer, self).__init__()
self.test = test
def create_update_rule(self):
return mock.MagicMock()
class DummyHook(object):
name = 'Dummy'
timing = 'pre'
def __init__(self, test):
self.test = test
def __call__(self, opt):
for param in opt.target.params():
self.test.assertIsNotNone(param.grad)
class CleargradHook(object):
name = 'Cleargrad'
timing = 'pre'
def __init__(self, _):
pass
def __call__(self, opt):
for param in opt.target.params():
param.cleargrad()
class TestGradientMethodClearGrads(unittest.TestCase):
def setUp(self):
self.optimizer = DummyOptimizer(self)
self.target = SimpleLink(
np.arange(3).astype(np.float32),
np.arange(3).astype(np.float32))
self.optimizer.setup(self.target)
self.optimizer.add_hook(DummyHook(self))
def test_update(self):
self.target.cleargrads()
self.optimizer.update()
class TestDeprecatedOptimizerHooksEmitsWarning(unittest.TestCase):
def setUp(self):
self.context = warnings.catch_warnings(record=True)
self.warnings = self.context.__enter__()
warnings.filterwarnings(action='always', category=DeprecationWarning)
def tearDown(self):
self.context.__exit__()
def test_gradient_clipping(self):
chainer.optimizer.GradientClipping(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_gradient_hard_clipping(self):
chainer.optimizer.GradientHardClipping(1., 2.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_gradient_noise(self):
chainer.optimizer.GradientNoise(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_lasso(self):
chainer.optimizer.Lasso(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
def test_weight_decay(self):
chainer.optimizer.WeightDecay(1.)
self.assertEqual(len(self.warnings), 1)
self.assertIs(self.warnings[-1].category, DeprecationWarning)
testing.run_module(__name__, __file__)
| true
| true
|
1c4a24cc773c14d0bd1e71136945beaa0f74f406
| 880
|
py
|
Python
|
algorithms/sorting/bubble_sort.py
|
FatiahBalo/python-ds
|
9eb88425822b6da4d7bd673a124c13fbe6f17523
|
[
"MIT"
] | 1,723
|
2019-07-30T07:06:22.000Z
|
2022-03-31T15:22:22.000Z
|
algorithms/sorting/bubble_sort.py
|
FatiahBalo/python-ds
|
9eb88425822b6da4d7bd673a124c13fbe6f17523
|
[
"MIT"
] | 213
|
2019-10-06T08:07:47.000Z
|
2021-10-04T15:38:36.000Z
|
algorithms/sorting/bubble_sort.py
|
FatiahBalo/python-ds
|
9eb88425822b6da4d7bd673a124c13fbe6f17523
|
[
"MIT"
] | 628
|
2019-10-06T10:26:25.000Z
|
2022-03-31T01:41:00.000Z
|
"""
Bubble Sort worst time complexity occurs when array is reverse sorted - O(n^2)
Best time scenario is when array is already sorted - O(n)
"""
def bubble_sort(array):
n = len(array)
for i in range(n):
for j in range(0, n-i-1):
if array[j] > array[j+1]:
array[j], array[j+1] = array[j+1], array[j]
return array
def bubble_sort_optimized(array):
"""
Optimizes on bubble sort by taking care of already swapped cases
Reference - https://github.com/prabhupant/python-ds/pull/346
"""
has_swapped = True
num_of_iterations = 0
while has_swapped:
has_swapped = False
for i in range(len(array) - num_of_iterations - 1):
if array[i] > array[i + 1]:
array[i], array[i + 1] = array[i + 1], array[i]
has_swapped = True
num_of_iterations += 1
| 28.387097
| 78
| 0.590909
|
def bubble_sort(array):
n = len(array)
for i in range(n):
for j in range(0, n-i-1):
if array[j] > array[j+1]:
array[j], array[j+1] = array[j+1], array[j]
return array
def bubble_sort_optimized(array):
has_swapped = True
num_of_iterations = 0
while has_swapped:
has_swapped = False
for i in range(len(array) - num_of_iterations - 1):
if array[i] > array[i + 1]:
array[i], array[i + 1] = array[i + 1], array[i]
has_swapped = True
num_of_iterations += 1
| true
| true
|
1c4a25bdb5c01fe4151a35fa9ee19f84f4205240
| 59,338
|
py
|
Python
|
sdks/python/apache_beam/runners/common.py
|
rehmanmuradali/beam
|
de8ff705145cbbc41bea7750a0a5d3553924ab3a
|
[
"Apache-2.0"
] | 1
|
2022-01-24T22:07:52.000Z
|
2022-01-24T22:07:52.000Z
|
sdks/python/apache_beam/runners/common.py
|
rehmanmuradali/beam
|
de8ff705145cbbc41bea7750a0a5d3553924ab3a
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/apache_beam/runners/common.py
|
rehmanmuradali/beam
|
de8ff705145cbbc41bea7750a0a5d3553924ab3a
|
[
"Apache-2.0"
] | 1
|
2019-05-21T11:30:31.000Z
|
2019-05-21T11:30:31.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# cython: profile=True
"""Worker operations executor.
For internal use only; no backwards-compatibility guarantees.
"""
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
import threading
import traceback
from builtins import next
from builtins import object
from builtins import round
from builtins import zip
from typing import TYPE_CHECKING
from typing import Any
from typing import Dict
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Tuple
from future.utils import raise_with_traceback
from past.builtins import unicode
from apache_beam.coders import TupleCoder
from apache_beam.internal import util
from apache_beam.options.value_provider import RuntimeValueProvider
from apache_beam.pvalue import TaggedOutput
from apache_beam.runners.sdf_utils import NoOpWatermarkEstimatorProvider
from apache_beam.runners.sdf_utils import RestrictionTrackerView
from apache_beam.runners.sdf_utils import SplitResultPrimary
from apache_beam.runners.sdf_utils import SplitResultResidual
from apache_beam.runners.sdf_utils import ThreadsafeRestrictionTracker
from apache_beam.runners.sdf_utils import ThreadsafeWatermarkEstimator
from apache_beam.transforms import DoFn
from apache_beam.transforms import core
from apache_beam.transforms import userstate
from apache_beam.transforms.core import RestrictionProvider
from apache_beam.transforms.core import WatermarkEstimatorProvider
from apache_beam.transforms.window import GlobalWindow
from apache_beam.transforms.window import TimestampedValue
from apache_beam.transforms.window import WindowFn
from apache_beam.utils.counters import Counter
from apache_beam.utils.counters import CounterName
from apache_beam.utils.timestamp import Timestamp
from apache_beam.utils.windowed_value import WindowedValue
if TYPE_CHECKING:
from apache_beam.transforms import sideinputs
from apache_beam.transforms.core import TimerSpec
from apache_beam.io.iobase import RestrictionProgress
from apache_beam.iobase import RestrictionTracker
from apache_beam.iobase import WatermarkEstimator
class NameContext(object):
"""Holds the name information for a step."""
def __init__(self, step_name, transform_id=None):
# type: (str, Optional[str]) -> None
"""Creates a new step NameContext.
Args:
step_name: The name of the step.
"""
self.step_name = step_name
self.transform_id = transform_id
def __eq__(self, other):
return self.step_name == other.step_name
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __repr__(self):
return 'NameContext(%s)' % self.__dict__
def __hash__(self):
return hash(self.step_name)
def metrics_name(self):
"""Returns the step name used for metrics reporting."""
return self.step_name
def logging_name(self):
"""Returns the step name used for logging."""
return self.step_name
# TODO(BEAM-4028): Move DataflowNameContext to Dataflow internal code.
class DataflowNameContext(NameContext):
"""Holds the name information for a step in Dataflow.
This includes a step_name (e.g. s2), a user_name (e.g. Foo/Bar/ParDo(Fab)),
and a system_name (e.g. s2-shuffle-read34)."""
def __init__(self, step_name, user_name, system_name):
"""Creates a new step NameContext.
Args:
step_name: The internal name of the step (e.g. s2).
user_name: The full user-given name of the step (e.g. Foo/Bar/ParDo(Far)).
system_name: The step name in the optimized graph (e.g. s2-1).
"""
super(DataflowNameContext, self).__init__(step_name)
self.user_name = user_name
self.system_name = system_name
def __eq__(self, other):
return (
self.step_name == other.step_name and
self.user_name == other.user_name and
self.system_name == other.system_name)
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __hash__(self):
return hash((self.step_name, self.user_name, self.system_name))
def __repr__(self):
return 'DataflowNameContext(%s)' % self.__dict__
def logging_name(self):
"""Stackdriver logging relies on user-given step names (e.g. Foo/Bar)."""
return self.user_name
class Receiver(object):
"""For internal use only; no backwards-compatibility guarantees.
An object that consumes a WindowedValue.
This class can be efficiently used to pass values between the
sdk and worker harnesses.
"""
def receive(self, windowed_value):
# type: (WindowedValue) -> None
raise NotImplementedError
class MethodWrapper(object):
"""For internal use only; no backwards-compatibility guarantees.
Represents a method that can be invoked by `DoFnInvoker`."""
def __init__(self, obj_to_invoke, method_name):
"""
Initiates a ``MethodWrapper``.
Args:
obj_to_invoke: the object that contains the method. Has to either be a
`DoFn` object or a `RestrictionProvider` object.
method_name: name of the method as a string.
"""
if not isinstance(obj_to_invoke,
(DoFn, RestrictionProvider, WatermarkEstimatorProvider)):
raise ValueError(
'\'obj_to_invoke\' has to be either a \'DoFn\' or '
'a \'RestrictionProvider\'. Received %r instead.' % obj_to_invoke)
self.args, self.defaults = core.get_function_arguments(obj_to_invoke,
method_name)
# TODO(BEAM-5878) support kwonlyargs on Python 3.
self.method_value = getattr(obj_to_invoke, method_name)
self.has_userstate_arguments = False
self.state_args_to_replace = {} # type: Dict[str, core.StateSpec]
self.timer_args_to_replace = {} # type: Dict[str, core.TimerSpec]
self.timestamp_arg_name = None # type: Optional[str]
self.window_arg_name = None # type: Optional[str]
self.key_arg_name = None # type: Optional[str]
self.restriction_provider = None
self.restriction_provider_arg_name = None
self.watermark_estimator_provider = None
self.watermark_estimator_provider_arg_name = None
if hasattr(self.method_value, 'unbounded_per_element'):
self.unbounded_per_element = True
else:
self.unbounded_per_element = False
for kw, v in zip(self.args[-len(self.defaults):], self.defaults):
if isinstance(v, core.DoFn.StateParam):
self.state_args_to_replace[kw] = v.state_spec
self.has_userstate_arguments = True
elif isinstance(v, core.DoFn.TimerParam):
self.timer_args_to_replace[kw] = v.timer_spec
self.has_userstate_arguments = True
elif core.DoFn.TimestampParam == v:
self.timestamp_arg_name = kw
elif core.DoFn.WindowParam == v:
self.window_arg_name = kw
elif core.DoFn.KeyParam == v:
self.key_arg_name = kw
elif isinstance(v, core.DoFn.RestrictionParam):
self.restriction_provider = v.restriction_provider
self.restriction_provider_arg_name = kw
elif isinstance(v, core.DoFn.WatermarkEstimatorParam):
self.watermark_estimator_provider = v.watermark_estimator_provider
self.watermark_estimator_provider_arg_name = kw
# Create NoOpWatermarkEstimatorProvider if there is no
# WatermarkEstimatorParam provided.
if self.watermark_estimator_provider is None:
self.watermark_estimator_provider = NoOpWatermarkEstimatorProvider()
def invoke_timer_callback(
self, user_state_context, key, window, timestamp, pane_info):
# TODO(ccy): support side inputs.
kwargs = {}
if self.has_userstate_arguments:
for kw, state_spec in self.state_args_to_replace.items():
kwargs[kw] = user_state_context.get_state(state_spec, key, window)
for kw, timer_spec in self.timer_args_to_replace.items():
kwargs[kw] = user_state_context.get_timer(
timer_spec, key, window, timestamp, pane_info)
if self.timestamp_arg_name:
kwargs[self.timestamp_arg_name] = Timestamp.of(timestamp)
if self.window_arg_name:
kwargs[self.window_arg_name] = window
if self.key_arg_name:
kwargs[self.key_arg_name] = key
if kwargs:
return self.method_value(**kwargs)
else:
return self.method_value()
class DoFnSignature(object):
"""Represents the signature of a given ``DoFn`` object.
Signature of a ``DoFn`` provides a view of the properties of a given ``DoFn``.
Among other things, this will give an extensible way for for (1) accessing the
structure of the ``DoFn`` including methods and method parameters
(2) identifying features that a given ``DoFn`` support, for example, whether
a given ``DoFn`` is a Splittable ``DoFn`` (
https://s.apache.org/splittable-do-fn) (3) validating a ``DoFn`` based on the
feature set offered by it.
"""
def __init__(self, do_fn):
# type: (core.DoFn) -> None
# We add a property here for all methods defined by Beam DoFn features.
assert isinstance(do_fn, core.DoFn)
self.do_fn = do_fn
self.process_method = MethodWrapper(do_fn, 'process')
self.start_bundle_method = MethodWrapper(do_fn, 'start_bundle')
self.finish_bundle_method = MethodWrapper(do_fn, 'finish_bundle')
self.setup_lifecycle_method = MethodWrapper(do_fn, 'setup')
self.teardown_lifecycle_method = MethodWrapper(do_fn, 'teardown')
restriction_provider = self.get_restriction_provider()
watermark_estimator_provider = self.get_watermark_estimator_provider()
self.create_watermark_estimator_method = (
MethodWrapper(
watermark_estimator_provider, 'create_watermark_estimator'))
self.initial_restriction_method = (
MethodWrapper(restriction_provider, 'initial_restriction')
if restriction_provider else None)
self.create_tracker_method = (
MethodWrapper(restriction_provider, 'create_tracker')
if restriction_provider else None)
self.split_method = (
MethodWrapper(restriction_provider, 'split')
if restriction_provider else None)
self._validate()
# Handle stateful DoFns.
self._is_stateful_dofn = userstate.is_stateful_dofn(do_fn)
self.timer_methods = {} # type: Dict[TimerSpec, MethodWrapper]
if self._is_stateful_dofn:
# Populate timer firing methods, keyed by TimerSpec.
_, all_timer_specs = userstate.get_dofn_specs(do_fn)
for timer_spec in all_timer_specs:
method = timer_spec._attached_callback
self.timer_methods[timer_spec] = MethodWrapper(do_fn, method.__name__)
def get_restriction_provider(self):
# type: () -> RestrictionProvider
return self.process_method.restriction_provider
def get_watermark_estimator_provider(self):
# type: () -> WatermarkEstimatorProvider
return self.process_method.watermark_estimator_provider
def is_unbounded_per_element(self):
return self.process_method.unbounded_per_element
def _validate(self):
# type: () -> None
self._validate_process()
self._validate_bundle_method(self.start_bundle_method)
self._validate_bundle_method(self.finish_bundle_method)
self._validate_stateful_dofn()
def _validate_process(self):
# type: () -> None
"""Validate that none of the DoFnParameters are repeated in the function
"""
param_ids = [
d.param_id for d in self.process_method.defaults
if isinstance(d, core._DoFnParam)
]
if len(param_ids) != len(set(param_ids)):
raise ValueError(
'DoFn %r has duplicate process method parameters: %s.' %
(self.do_fn, param_ids))
def _validate_bundle_method(self, method_wrapper):
"""Validate that none of the DoFnParameters are used in the function
"""
for param in core.DoFn.DoFnProcessParams:
if param in method_wrapper.defaults:
raise ValueError(
'DoFn.process() method-only parameter %s cannot be used in %s.' %
(param, method_wrapper))
def _validate_stateful_dofn(self):
# type: () -> None
userstate.validate_stateful_dofn(self.do_fn)
def is_splittable_dofn(self):
# type: () -> bool
return self.get_restriction_provider() is not None
def get_restriction_coder(self):
# type: () -> Optional[TupleCoder]
"""Get coder for a restriction when processing an SDF. """
if self.is_splittable_dofn():
return TupleCoder([
(self.get_restriction_provider().restriction_coder()),
(self.get_watermark_estimator_provider().estimator_state_coder())
])
else:
return None
def is_stateful_dofn(self):
# type: () -> bool
return self._is_stateful_dofn
def has_timers(self):
# type: () -> bool
_, all_timer_specs = userstate.get_dofn_specs(self.do_fn)
return bool(all_timer_specs)
def has_bundle_finalization(self):
for sig in (self.start_bundle_method,
self.process_method,
self.finish_bundle_method):
for d in sig.defaults:
try:
if d == DoFn.BundleFinalizerParam:
return True
except Exception: # pylint: disable=broad-except
# Default value might be incomparable.
pass
return False
class DoFnInvoker(object):
"""An abstraction that can be used to execute DoFn methods.
A DoFnInvoker describes a particular way for invoking methods of a DoFn
represented by a given DoFnSignature."""
def __init__(self,
output_processor, # type: OutputProcessor
signature # type: DoFnSignature
):
# type: (...) -> None
"""
Initializes `DoFnInvoker`
:param output_processor: an OutputProcessor for receiving elements produced
by invoking functions of the DoFn.
:param signature: a DoFnSignature for the DoFn being invoked
"""
self.output_processor = output_processor
self.signature = signature
self.user_state_context = None # type: Optional[userstate.UserStateContext]
self.bundle_finalizer_param = None # type: Optional[core._BundleFinalizerParam]
@staticmethod
def create_invoker(
signature, # type: DoFnSignature
output_processor, # type: _OutputProcessor
context=None, # type: Optional[DoFnContext]
side_inputs=None, # type: Optional[List[sideinputs.SideInputMap]]
input_args=None, input_kwargs=None,
process_invocation=True,
user_state_context=None, # type: Optional[userstate.UserStateContext]
bundle_finalizer_param=None # type: Optional[core._BundleFinalizerParam]
):
# type: (...) -> DoFnInvoker
""" Creates a new DoFnInvoker based on given arguments.
Args:
output_processor: an OutputProcessor for receiving elements produced by
invoking functions of the DoFn.
signature: a DoFnSignature for the DoFn being invoked.
context: Context to be used when invoking the DoFn (deprecated).
side_inputs: side inputs to be used when invoking th process method.
input_args: arguments to be used when invoking the process method. Some
of the arguments given here might be placeholders (for
example for side inputs) that get filled before invoking the
process method.
input_kwargs: keyword arguments to be used when invoking the process
method. Some of the keyword arguments given here might be
placeholders (for example for side inputs) that get filled
before invoking the process method.
process_invocation: If True, this function may return an invoker that
performs extra optimizations for invoking process()
method efficiently.
user_state_context: The UserStateContext instance for the current
Stateful DoFn.
bundle_finalizer_param: The param that passed to a process method, which
allows a callback to be registered.
"""
side_inputs = side_inputs or []
default_arg_values = signature.process_method.defaults
use_simple_invoker = not process_invocation or (
not side_inputs and not input_args and not input_kwargs and
not default_arg_values and not signature.is_stateful_dofn())
if use_simple_invoker:
return SimpleInvoker(output_processor, signature)
else:
if context is None:
raise TypeError("Must provide context when not using SimpleInvoker")
return PerWindowInvoker(
output_processor,
signature,
context,
side_inputs,
input_args,
input_kwargs,
user_state_context,
bundle_finalizer_param)
def invoke_process(self,
windowed_value, # type: WindowedValue
restriction=None,
watermark_estimator_state=None,
additional_args=None,
additional_kwargs=None
):
# type: (...) -> Iterable[SplitResultResidual]
"""Invokes the DoFn.process() function.
Args:
windowed_value: a WindowedValue object that gives the element for which
process() method should be invoked along with the window
the element belongs to.
restriction: The restriction to use when executing this splittable DoFn.
Should only be specified for splittable DoFns.
watermark_estimator_state: The watermark estimator state to use when
executing this splittable DoFn. Should only
be specified for splittable DoFns.
additional_args: additional arguments to be passed to the current
`DoFn.process()` invocation, usually as side inputs.
additional_kwargs: additional keyword arguments to be passed to the
current `DoFn.process()` invocation.
"""
raise NotImplementedError
def invoke_setup(self):
# type: () -> None
"""Invokes the DoFn.setup() method
"""
self.signature.setup_lifecycle_method.method_value()
def invoke_start_bundle(self):
# type: () -> None
"""Invokes the DoFn.start_bundle() method.
"""
self.output_processor.start_bundle_outputs(
self.signature.start_bundle_method.method_value())
def invoke_finish_bundle(self):
# type: () -> None
"""Invokes the DoFn.finish_bundle() method.
"""
self.output_processor.finish_bundle_outputs(
self.signature.finish_bundle_method.method_value())
def invoke_teardown(self):
# type: () -> None
"""Invokes the DoFn.teardown() method
"""
self.signature.teardown_lifecycle_method.method_value()
def invoke_user_timer(self, timer_spec, key, window, timestamp, pane_info):
# self.output_processor is Optional, but in practice it won't be None here
self.output_processor.process_outputs(
WindowedValue(None, timestamp, (window, )),
self.signature.timer_methods[timer_spec].invoke_timer_callback(
self.user_state_context, key, window, timestamp, pane_info))
def invoke_create_watermark_estimator(self, estimator_state):
return self.signature.create_watermark_estimator_method.method_value(
estimator_state)
def invoke_split(self, element, restriction):
return self.signature.split_method.method_value(element, restriction)
def invoke_initial_restriction(self, element):
return self.signature.initial_restriction_method.method_value(element)
def invoke_create_tracker(self, restriction):
return self.signature.create_tracker_method.method_value(restriction)
class SimpleInvoker(DoFnInvoker):
"""An invoker that processes elements ignoring windowing information."""
def __init__(self,
output_processor, # type: OutputProcessor
signature # type: DoFnSignature
):
# type: (...) -> None
super(SimpleInvoker, self).__init__(output_processor, signature)
self.process_method = signature.process_method.method_value
def invoke_process(self,
windowed_value, # type: WindowedValue
restriction=None,
watermark_estimator_state=None,
additional_args=None,
additional_kwargs=None
):
# type: (...) -> None
self.output_processor.process_outputs(
windowed_value, self.process_method(windowed_value.value))
class PerWindowInvoker(DoFnInvoker):
"""An invoker that processes elements considering windowing information."""
def __init__(self,
output_processor, # type: _OutputProcessor
signature, # type: DoFnSignature
context, # type: DoFnContext
side_inputs, # type: Iterable[sideinputs.SideInputMap]
input_args,
input_kwargs,
user_state_context, # type: Optional[userstate.UserStateContext]
bundle_finalizer_param # type: Optional[core._BundleFinalizerParam]
):
super(PerWindowInvoker, self).__init__(output_processor, signature)
self.side_inputs = side_inputs
self.context = context
self.process_method = signature.process_method.method_value
default_arg_values = signature.process_method.defaults
self.has_windowed_inputs = (
not all(si.is_globally_windowed() for si in side_inputs) or
(core.DoFn.WindowParam in default_arg_values) or
signature.is_stateful_dofn())
self.user_state_context = user_state_context
self.is_splittable = signature.is_splittable_dofn()
self.threadsafe_restriction_tracker = None # type: Optional[ThreadsafeRestrictionTracker]
self.threadsafe_watermark_estimator = None # type: Optional[ThreadsafeWatermarkEstimator]
self.current_windowed_value = None # type: Optional[WindowedValue]
self.bundle_finalizer_param = bundle_finalizer_param
self.is_key_param_required = False
if self.is_splittable:
self.splitting_lock = threading.Lock()
self.current_window_index = None
self.stop_window_index = None
# Try to prepare all the arguments that can just be filled in
# without any additional work. in the process function.
# Also cache all the placeholders needed in the process function.
# Flag to cache additional arguments on the first element if all
# inputs are within the global window.
self.cache_globally_windowed_args = not self.has_windowed_inputs
input_args = input_args if input_args else []
input_kwargs = input_kwargs if input_kwargs else {}
arg_names = signature.process_method.args
# Create placeholder for element parameter of DoFn.process() method.
# Not to be confused with ArgumentPlaceHolder, which may be passed in
# input_args and is a placeholder for side-inputs.
class ArgPlaceholder(object):
def __init__(self, placeholder):
self.placeholder = placeholder
if core.DoFn.ElementParam not in default_arg_values:
# TODO(BEAM-7867): Handle cases in which len(arg_names) ==
# len(default_arg_values).
args_to_pick = len(arg_names) - len(default_arg_values) - 1
# Positional argument values for process(), with placeholders for special
# values such as the element, timestamp, etc.
args_with_placeholders = ([ArgPlaceholder(core.DoFn.ElementParam)] +
input_args[:args_to_pick])
else:
args_to_pick = len(arg_names) - len(default_arg_values)
args_with_placeholders = input_args[:args_to_pick]
# Fill the OtherPlaceholders for context, key, window or timestamp
remaining_args_iter = iter(input_args[args_to_pick:])
for a, d in zip(arg_names[-len(default_arg_values):], default_arg_values):
if core.DoFn.ElementParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.KeyParam == d:
self.is_key_param_required = True
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.WindowParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.TimestampParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.PaneInfoParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.SideInputParam == d:
# If no more args are present then the value must be passed via kwarg
try:
args_with_placeholders.append(next(remaining_args_iter))
except StopIteration:
if a not in input_kwargs:
raise ValueError("Value for sideinput %s not provided" % a)
elif isinstance(d, core.DoFn.StateParam):
args_with_placeholders.append(ArgPlaceholder(d))
elif isinstance(d, core.DoFn.TimerParam):
args_with_placeholders.append(ArgPlaceholder(d))
elif isinstance(d, type) and core.DoFn.BundleFinalizerParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
else:
# If no more args are present then the value must be passed via kwarg
try:
args_with_placeholders.append(next(remaining_args_iter))
except StopIteration:
pass
args_with_placeholders.extend(list(remaining_args_iter))
# Stash the list of placeholder positions for performance
self.placeholders = [(i, x.placeholder)
for (i, x) in enumerate(args_with_placeholders)
if isinstance(x, ArgPlaceholder)]
self.args_for_process = args_with_placeholders
self.kwargs_for_process = input_kwargs
def invoke_process(self,
windowed_value, # type: WindowedValue
restriction=None,
watermark_estimator_state=None,
additional_args=None,
additional_kwargs=None
):
# type: (...) -> Iterable[SplitResultResidual]
if not additional_args:
additional_args = []
if not additional_kwargs:
additional_kwargs = {}
self.context.set_element(windowed_value)
# Call for the process function for each window if has windowed side inputs
# or if the process accesses the window parameter. We can just call it once
# otherwise as none of the arguments are changing
residuals = []
if self.is_splittable:
with self.splitting_lock:
self.current_windowed_value = windowed_value
self.restriction = restriction
self.watermark_estimator_state = watermark_estimator_state
try:
if self.has_windowed_inputs and len(windowed_value.windows) > 1:
for i, w in enumerate(windowed_value.windows):
if not self._should_process_window_for_sdf(
windowed_value, additional_kwargs, i):
break
residual = self._invoke_process_per_window(
WindowedValue(
windowed_value.value, windowed_value.timestamp, (w, )),
additional_args,
additional_kwargs)
if residual:
residuals.append(residual)
else:
if self._should_process_window_for_sdf(windowed_value,
additional_kwargs):
residual = self._invoke_process_per_window(
windowed_value, additional_args, additional_kwargs)
if residual:
residuals.append(residual)
finally:
with self.splitting_lock:
self.current_windowed_value = None
self.restriction = None
self.watermark_estimator_state = None
self.current_window_index = None
self.threadsafe_restriction_tracker = None
self.threadsafe_watermark_estimator = None
elif self.has_windowed_inputs and len(windowed_value.windows) != 1:
for w in windowed_value.windows:
self._invoke_process_per_window(
WindowedValue(
windowed_value.value, windowed_value.timestamp, (w, )),
additional_args,
additional_kwargs)
else:
self._invoke_process_per_window(
windowed_value, additional_args, additional_kwargs)
return residuals
def _should_process_window_for_sdf(
self,
windowed_value, # type: WindowedValue
additional_kwargs,
window_index=None, # type: Optional[int]
):
restriction_tracker = self.invoke_create_tracker(self.restriction)
watermark_estimator = self.invoke_create_watermark_estimator(
self.watermark_estimator_state)
with self.splitting_lock:
if window_index:
self.current_window_index = window_index
if window_index == 0:
self.stop_window_index = len(windowed_value.windows)
if window_index == self.stop_window_index:
return False
self.threadsafe_restriction_tracker = ThreadsafeRestrictionTracker(
restriction_tracker)
self.threadsafe_watermark_estimator = (
ThreadsafeWatermarkEstimator(watermark_estimator))
restriction_tracker_param = (
self.signature.process_method.restriction_provider_arg_name)
if not restriction_tracker_param:
raise ValueError(
'DoFn is splittable but DoFn does not have a '
'RestrictionTrackerParam defined')
additional_kwargs[restriction_tracker_param] = (
RestrictionTrackerView(self.threadsafe_restriction_tracker))
watermark_param = (
self.signature.process_method.watermark_estimator_provider_arg_name)
# When the watermark_estimator is a NoOpWatermarkEstimator, the system
# will not add watermark_param into the DoFn param list.
if watermark_param is not None:
additional_kwargs[watermark_param] = self.threadsafe_watermark_estimator
return True
def _invoke_process_per_window(self,
windowed_value, # type: WindowedValue
additional_args,
additional_kwargs,
):
# type: (...) -> Optional[SplitResultResidual]
if self.has_windowed_inputs:
window, = windowed_value.windows
side_inputs = [si[window] for si in self.side_inputs]
side_inputs.extend(additional_args)
args_for_process, kwargs_for_process = util.insert_values_in_args(
self.args_for_process, self.kwargs_for_process,
side_inputs)
elif self.cache_globally_windowed_args:
# Attempt to cache additional args if all inputs are globally
# windowed inputs when processing the first element.
self.cache_globally_windowed_args = False
# Fill in sideInputs if they are globally windowed
global_window = GlobalWindow()
self.args_for_process, self.kwargs_for_process = (
util.insert_values_in_args(
self.args_for_process, self.kwargs_for_process,
[si[global_window] for si in self.side_inputs]))
args_for_process, kwargs_for_process = (
self.args_for_process, self.kwargs_for_process)
else:
args_for_process, kwargs_for_process = (
self.args_for_process, self.kwargs_for_process)
# Extract key in the case of a stateful DoFn. Note that in the case of a
# stateful DoFn, we set during __init__ self.has_windowed_inputs to be
# True. Therefore, windows will be exploded coming into this method, and
# we can rely on the window variable being set above.
if self.user_state_context or self.is_key_param_required:
try:
key, unused_value = windowed_value.value
except (TypeError, ValueError):
raise ValueError((
'Input value to a stateful DoFn or KeyParam must be a KV tuple; '
'instead, got \'%s\'.') % (windowed_value.value, ))
for i, p in self.placeholders:
if core.DoFn.ElementParam == p:
args_for_process[i] = windowed_value.value
elif core.DoFn.KeyParam == p:
args_for_process[i] = key
elif core.DoFn.WindowParam == p:
args_for_process[i] = window
elif core.DoFn.TimestampParam == p:
args_for_process[i] = windowed_value.timestamp
elif core.DoFn.PaneInfoParam == p:
args_for_process[i] = windowed_value.pane_info
elif isinstance(p, core.DoFn.StateParam):
assert self.user_state_context is not None
args_for_process[i] = (
self.user_state_context.get_state(p.state_spec, key, window))
elif isinstance(p, core.DoFn.TimerParam):
assert self.user_state_context is not None
args_for_process[i] = (
self.user_state_context.get_timer(
p.timer_spec,
key,
window,
windowed_value.timestamp,
windowed_value.pane_info))
elif core.DoFn.BundleFinalizerParam == p:
args_for_process[i] = self.bundle_finalizer_param
if additional_kwargs:
if kwargs_for_process is None:
kwargs_for_process = additional_kwargs
else:
for key in additional_kwargs:
kwargs_for_process[key] = additional_kwargs[key]
if kwargs_for_process:
self.output_processor.process_outputs(
windowed_value,
self.process_method(*args_for_process, **kwargs_for_process),
self.threadsafe_watermark_estimator)
else:
self.output_processor.process_outputs(
windowed_value,
self.process_method(*args_for_process),
self.threadsafe_watermark_estimator)
if self.is_splittable:
assert self.threadsafe_restriction_tracker is not None
self.threadsafe_restriction_tracker.check_done()
deferred_status = self.threadsafe_restriction_tracker.deferred_status()
if deferred_status:
deferred_restriction, deferred_timestamp = deferred_status
element = windowed_value.value
size = self.signature.get_restriction_provider().restriction_size(
element, deferred_restriction)
current_watermark = (
self.threadsafe_watermark_estimator.current_watermark())
estimator_state = (
self.threadsafe_watermark_estimator.get_estimator_state())
residual_value = ((element, (deferred_restriction, estimator_state)),
size)
return SplitResultResidual(
residual_value=windowed_value.with_value(residual_value),
current_watermark=current_watermark,
deferred_timestamp=deferred_timestamp)
return None
@staticmethod
def _try_split(fraction,
window_index, # type: Optional[int]
stop_window_index, # type: Optional[int]
windowed_value, # type: WindowedValue
restriction,
watermark_estimator_state,
restriction_provider, # type: RestrictionProvider
restriction_tracker, # type: RestrictionTracker
watermark_estimator, # type: WatermarkEstimator
):
# type: (...) -> Optional[Tuple[Iterable[SplitResultPrimary], Iterable[SplitResultResidual], Optional[int]]]
"""Try to split returning a primaries, residuals and a new stop index.
For non-window observing splittable DoFns we split the current restriction
and assign the primary and residual to all the windows.
For window observing splittable DoFns, we:
1) return a split at a window boundary if the fraction lies outside of the
current window.
2) attempt to split the current restriction, if successful then return
the primary and residual for the current window and an additional
primary and residual for any fully processed and fully unprocessed
windows.
3) fall back to returning a split at the window boundary if possible
Args:
window_index: the current index of the window being processed or None
if the splittable DoFn is not window observing.
stop_window_index: the current index to stop processing at or None
if the splittable DoFn is not window observing.
windowed_value: the current windowed value
restriction: the initial restriction when processing was started.
watermark_estimator_state: the initial watermark estimator state when
processing was started.
restriction_provider: the DoFn's restriction provider
restriction_tracker: the current restriction tracker
watermark_estimator: the current watermark estimator
Returns:
A tuple containing (primaries, residuals, new_stop_index) or None if
splitting was not possible. new_stop_index will only be set if the
splittable DoFn is window observing otherwise it will be None.
"""
def compute_whole_window_split(to_index, from_index):
restriction_size = restriction_provider.restriction_size(
windowed_value, restriction)
# The primary and residual both share the same value only differing
# by the set of windows they are in.
value = ((windowed_value.value, (restriction, watermark_estimator_state)),
restriction_size)
primary_restriction = SplitResultPrimary(
primary_value=WindowedValue(
value,
windowed_value.timestamp,
windowed_value.windows[:to_index])) if to_index > 0 else None
# Don't report any updated watermarks for the residual since they have
# not processed any part of the restriction.
residual_restriction = SplitResultResidual(
residual_value=WindowedValue(
value,
windowed_value.timestamp,
windowed_value.windows[from_index:stop_window_index]),
current_watermark=None,
deferred_timestamp=None) if from_index < stop_window_index else None
return (primary_restriction, residual_restriction)
primary_restrictions = []
residual_restrictions = []
window_observing = window_index is not None
# If we are processing each window separately and we aren't on the last
# window then compute whether the split lies within the current window
# or a future window.
if window_observing and window_index != stop_window_index - 1:
progress = restriction_tracker.current_progress()
if not progress:
# Assume no work has been completed for the current window if progress
# is unavailable.
from apache_beam.io.iobase import RestrictionProgress
progress = RestrictionProgress(completed=0, remaining=1)
scaled_progress = PerWindowInvoker._scale_progress(
progress, window_index, stop_window_index)
# Compute the fraction of the remainder relative to the scaled progress.
# If the value is greater than or equal to progress.remaining_work then we
# should split at the closest window boundary.
fraction_of_remainder = scaled_progress.remaining_work * fraction
if fraction_of_remainder >= progress.remaining_work:
# The fraction is outside of the current window and hence we will
# split at the closest window boundary. Favor a split and return the
# last window if we would have rounded up to the end of the window
# based upon the fraction.
new_stop_window_index = min(
stop_window_index - 1,
window_index + max(
1,
int(
round((
progress.completed_work +
scaled_progress.remaining_work * fraction) /
progress.total_work))))
primary, residual = compute_whole_window_split(
new_stop_window_index, new_stop_window_index)
assert primary is not None
assert residual is not None
return ([primary], [residual], new_stop_window_index)
else:
# The fraction is within the current window being processed so compute
# the updated fraction based upon the number of windows being processed.
new_stop_window_index = window_index + 1
fraction = fraction_of_remainder / progress.remaining_work
# Attempt to split below, if we can't then we'll compute a split
# using only window boundaries
else:
# We aren't splitting within multiple windows so we don't change our
# stop index.
new_stop_window_index = stop_window_index
# Temporary workaround for [BEAM-7473]: get current_watermark before
# split, in case watermark gets advanced before getting split results.
# In worst case, current_watermark is always stale, which is ok.
current_watermark = (watermark_estimator.current_watermark())
current_estimator_state = (watermark_estimator.get_estimator_state())
split = restriction_tracker.try_split(fraction)
if split:
primary, residual = split
element = windowed_value.value
primary_size = restriction_provider.restriction_size(
windowed_value.value, primary)
residual_size = restriction_provider.restriction_size(
windowed_value.value, residual)
# We use the watermark estimator state for the original process call
# for the primary and the updated watermark estimator state for the
# residual for the split.
primary_split_value = ((element, (primary, watermark_estimator_state)),
primary_size)
residual_split_value = ((element, (residual, current_estimator_state)),
residual_size)
windows = (
windowed_value.windows[window_index],
) if window_observing else windowed_value.windows
primary_restrictions.append(
SplitResultPrimary(
primary_value=WindowedValue(
primary_split_value, windowed_value.timestamp, windows)))
residual_restrictions.append(
SplitResultResidual(
residual_value=WindowedValue(
residual_split_value, windowed_value.timestamp, windows),
current_watermark=current_watermark,
deferred_timestamp=None))
if window_observing:
assert new_stop_window_index == window_index + 1
primary, residual = compute_whole_window_split(
window_index, window_index + 1)
if primary:
primary_restrictions.append(primary)
if residual:
residual_restrictions.append(residual)
return (
primary_restrictions, residual_restrictions, new_stop_window_index)
elif new_stop_window_index and new_stop_window_index != stop_window_index:
# If we failed to split but have a new stop index then return a split
# at the window boundary.
primary, residual = compute_whole_window_split(
new_stop_window_index, new_stop_window_index)
assert primary is not None
assert residual is not None
return ([primary], [residual], new_stop_window_index)
else:
return None
def try_split(self, fraction):
# type: (...) -> Optional[Tuple[Iterable[SplitResultPrimary], Iterable[SplitResultResidual]]]
if not self.is_splittable:
return None
with self.splitting_lock:
if not self.threadsafe_restriction_tracker:
return None
# Make a local reference to member variables that change references during
# processing under lock before attempting to split so we have a consistent
# view of all the references.
result = PerWindowInvoker._try_split(
fraction,
self.current_window_index,
self.stop_window_index,
self.current_windowed_value,
self.restriction,
self.watermark_estimator_state,
self.signature.get_restriction_provider(),
self.threadsafe_restriction_tracker,
self.threadsafe_watermark_estimator)
if not result:
return None
residuals, primaries, self.stop_window_index = result
return (residuals, primaries)
@staticmethod
def _scale_progress(progress, window_index, stop_window_index):
# We scale progress based upon the amount of work we will do for one
# window and have it apply for all windows.
completed = window_index * progress.total_work + progress.completed_work
remaining = (
stop_window_index -
(window_index + 1)) * progress.total_work + progress.remaining_work
from apache_beam.io.iobase import RestrictionProgress
return RestrictionProgress(completed=completed, remaining=remaining)
def current_element_progress(self):
# type: () -> Optional[RestrictionProgress]
if not self.is_splittable:
return None
with self.splitting_lock:
current_window_index = self.current_window_index
stop_window_index = self.stop_window_index
threadsafe_restriction_tracker = self.threadsafe_restriction_tracker
if not threadsafe_restriction_tracker:
return None
progress = threadsafe_restriction_tracker.current_progress()
if not current_window_index or not progress:
return progress
# stop_window_index should always be set if current_window_index is set,
# it is an error otherwise.
assert stop_window_index
return PerWindowInvoker._scale_progress(
progress, current_window_index, stop_window_index)
class DoFnRunner:
"""For internal use only; no backwards-compatibility guarantees.
A helper class for executing ParDo operations.
"""
def __init__(self,
fn, # type: core.DoFn
args,
kwargs,
side_inputs, # type: Iterable[sideinputs.SideInputMap]
windowing,
tagged_receivers, # type: Mapping[Optional[str], Receiver]
step_name=None, # type: Optional[str]
logging_context=None,
state=None,
scoped_metrics_container=None,
operation_name=None,
user_state_context=None # type: Optional[userstate.UserStateContext]
):
"""Initializes a DoFnRunner.
Args:
fn: user DoFn to invoke
args: positional side input arguments (static and placeholder), if any
kwargs: keyword side input arguments (static and placeholder), if any
side_inputs: list of sideinput.SideInputMaps for deferred side inputs
windowing: windowing properties of the output PCollection(s)
tagged_receivers: a dict of tag name to Receiver objects
step_name: the name of this step
logging_context: DEPRECATED [BEAM-4728]
state: handle for accessing DoFn state
scoped_metrics_container: DEPRECATED
operation_name: The system name assigned by the runner for this operation.
user_state_context: The UserStateContext instance for the current
Stateful DoFn.
"""
# Need to support multiple iterations.
side_inputs = list(side_inputs)
self.step_name = step_name
self.context = DoFnContext(step_name, state=state)
self.bundle_finalizer_param = DoFn.BundleFinalizerParam()
do_fn_signature = DoFnSignature(fn)
# Optimize for the common case.
main_receivers = tagged_receivers[None]
# TODO(BEAM-3937): Remove if block after output counter released.
if 'outputs_per_element_counter' in RuntimeValueProvider.experiments:
# TODO(BEAM-3955): Make step_name and operation_name less confused.
output_counter_name = (
CounterName('per-element-output-count', step_name=operation_name))
per_element_output_counter = state._counter_factory.get_counter(
output_counter_name, Counter.DATAFLOW_DISTRIBUTION).accumulator
else:
per_element_output_counter = None
output_processor = _OutputProcessor(
windowing.windowfn,
main_receivers,
tagged_receivers,
per_element_output_counter)
if do_fn_signature.is_stateful_dofn() and not user_state_context:
raise Exception(
'Requested execution of a stateful DoFn, but no user state context '
'is available. This likely means that the current runner does not '
'support the execution of stateful DoFns.')
self.do_fn_invoker = DoFnInvoker.create_invoker(
do_fn_signature,
output_processor,
self.context,
side_inputs,
args,
kwargs,
user_state_context=user_state_context,
bundle_finalizer_param=self.bundle_finalizer_param)
def process(self, windowed_value):
# type: (WindowedValue) -> Iterable[SplitResultResidual]
try:
return self.do_fn_invoker.invoke_process(windowed_value)
except BaseException as exn:
self._reraise_augmented(exn)
return []
def process_with_sized_restriction(self, windowed_value):
# type: (WindowedValue) -> Iterable[SplitResultResidual]
(element, (restriction, estimator_state)), _ = windowed_value.value
return self.do_fn_invoker.invoke_process(
windowed_value.with_value(element),
restriction=restriction,
watermark_estimator_state=estimator_state)
def try_split(self, fraction):
# type: (...) -> Optional[Tuple[Iterable[SplitResultPrimary], Iterable[SplitResultResidual]]]
assert isinstance(self.do_fn_invoker, PerWindowInvoker)
return self.do_fn_invoker.try_split(fraction)
def current_element_progress(self):
# type: () -> Optional[RestrictionProgress]
assert isinstance(self.do_fn_invoker, PerWindowInvoker)
return self.do_fn_invoker.current_element_progress()
def process_user_timer(self, timer_spec, key, window, timestamp, pane_info):
try:
self.do_fn_invoker.invoke_user_timer(
timer_spec, key, window, timestamp, pane_info)
except BaseException as exn:
self._reraise_augmented(exn)
def _invoke_bundle_method(self, bundle_method):
try:
self.context.set_element(None)
bundle_method()
except BaseException as exn:
self._reraise_augmented(exn)
def _invoke_lifecycle_method(self, lifecycle_method):
try:
self.context.set_element(None)
lifecycle_method()
except BaseException as exn:
self._reraise_augmented(exn)
def setup(self):
# type: () -> None
self._invoke_lifecycle_method(self.do_fn_invoker.invoke_setup)
def start(self):
# type: () -> None
self._invoke_bundle_method(self.do_fn_invoker.invoke_start_bundle)
def finish(self):
# type: () -> None
self._invoke_bundle_method(self.do_fn_invoker.invoke_finish_bundle)
def teardown(self):
# type: () -> None
self._invoke_lifecycle_method(self.do_fn_invoker.invoke_teardown)
def finalize(self):
# type: () -> None
self.bundle_finalizer_param.finalize_bundle()
def _reraise_augmented(self, exn):
if getattr(exn, '_tagged_with_step', False) or not self.step_name:
raise
step_annotation = " [while running '%s']" % self.step_name
# To emulate exception chaining (not available in Python 2).
try:
# Attempt to construct the same kind of exception
# with an augmented message.
new_exn = type(exn)(exn.args[0] + step_annotation, *exn.args[1:])
new_exn._tagged_with_step = True # Could raise attribute error.
except: # pylint: disable=bare-except
# If anything goes wrong, construct a RuntimeError whose message
# records the original exception's type and message.
new_exn = RuntimeError(
traceback.format_exception_only(type(exn), exn)[-1].strip() +
step_annotation)
new_exn._tagged_with_step = True
raise_with_traceback(new_exn)
class OutputProcessor(object):
def process_outputs(
self, windowed_input_element, results, watermark_estimator=None):
# type: (WindowedValue, Iterable[Any], Optional[WatermarkEstimator]) -> None
raise NotImplementedError
class _OutputProcessor(OutputProcessor):
"""Processes output produced by DoFn method invocations."""
def __init__(self,
window_fn,
main_receivers, # type: Receiver
tagged_receivers, # type: Mapping[Optional[str], Receiver]
per_element_output_counter):
"""Initializes ``_OutputProcessor``.
Args:
window_fn: a windowing function (WindowFn).
main_receivers: a dict of tag name to Receiver objects.
tagged_receivers: main receiver object.
per_element_output_counter: per_element_output_counter of one work_item.
could be none if experimental flag turn off
"""
self.window_fn = window_fn
self.main_receivers = main_receivers
self.tagged_receivers = tagged_receivers
self.per_element_output_counter = per_element_output_counter
def process_outputs(
self, windowed_input_element, results, watermark_estimator=None):
# type: (WindowedValue, Iterable[Any], Optional[WatermarkEstimator]) -> None
"""Dispatch the result of process computation to the appropriate receivers.
A value wrapped in a TaggedOutput object will be unwrapped and
then dispatched to the appropriate indexed output.
"""
if results is None:
# TODO(BEAM-3937): Remove if block after output counter released.
# Only enable per_element_output_counter when counter cythonized.
if (self.per_element_output_counter is not None and
self.per_element_output_counter.is_cythonized):
self.per_element_output_counter.add_input(0)
return
output_element_count = 0
for result in results:
# results here may be a generator, which cannot call len on it.
output_element_count += 1
tag = None
if isinstance(result, TaggedOutput):
tag = result.tag
if not isinstance(tag, (str, unicode)):
raise TypeError('In %s, tag %s is not a string' % (self, tag))
result = result.value
if isinstance(result, WindowedValue):
windowed_value = result
if (windowed_input_element is not None and
len(windowed_input_element.windows) != 1):
windowed_value.windows *= len(windowed_input_element.windows)
elif isinstance(result, TimestampedValue):
assign_context = WindowFn.AssignContext(result.timestamp, result.value)
windowed_value = WindowedValue(
result.value,
result.timestamp,
self.window_fn.assign(assign_context))
if len(windowed_input_element.windows) != 1:
windowed_value.windows *= len(windowed_input_element.windows)
else:
windowed_value = windowed_input_element.with_value(result)
if watermark_estimator is not None:
watermark_estimator.observe_timestamp(windowed_value.timestamp)
if tag is None:
self.main_receivers.receive(windowed_value)
else:
self.tagged_receivers[tag].receive(windowed_value)
# TODO(BEAM-3937): Remove if block after output counter released.
# Only enable per_element_output_counter when counter cythonized
if (self.per_element_output_counter is not None and
self.per_element_output_counter.is_cythonized):
self.per_element_output_counter.add_input(output_element_count)
def start_bundle_outputs(self, results):
"""Validate that start_bundle does not output any elements"""
if results is None:
return
raise RuntimeError(
'Start Bundle should not output any elements but got %s' % results)
def finish_bundle_outputs(self, results):
"""Dispatch the result of finish_bundle to the appropriate receivers.
A value wrapped in a TaggedOutput object will be unwrapped and
then dispatched to the appropriate indexed output.
"""
if results is None:
return
for result in results:
tag = None
if isinstance(result, TaggedOutput):
tag = result.tag
if not isinstance(tag, (str, unicode)):
raise TypeError('In %s, tag %s is not a string' % (self, tag))
result = result.value
if isinstance(result, WindowedValue):
windowed_value = result
else:
raise RuntimeError('Finish Bundle should only output WindowedValue ' +\
'type but got %s' % type(result))
if tag is None:
self.main_receivers.receive(windowed_value)
else:
self.tagged_receivers[tag].receive(windowed_value)
class _NoContext(WindowFn.AssignContext):
"""An uninspectable WindowFn.AssignContext."""
NO_VALUE = object()
def __init__(self, value, timestamp=NO_VALUE):
self.value = value
self._timestamp = timestamp
@property
def timestamp(self):
if self._timestamp is self.NO_VALUE:
raise ValueError('No timestamp in this context.')
else:
return self._timestamp
@property
def existing_windows(self):
raise ValueError('No existing_windows in this context.')
class DoFnState(object):
"""For internal use only; no backwards-compatibility guarantees.
Keeps track of state that DoFns want, currently, user counters.
"""
def __init__(self, counter_factory):
self.step_name = ''
self._counter_factory = counter_factory
def counter_for(self, aggregator):
"""Looks up the counter for this aggregator, creating one if necessary."""
return self._counter_factory.get_aggregator_counter(
self.step_name, aggregator)
# TODO(robertwb): Replace core.DoFnContext with this.
class DoFnContext(object):
"""For internal use only; no backwards-compatibility guarantees."""
def __init__(self, label, element=None, state=None):
self.label = label
self.state = state
if element is not None:
self.set_element(element)
def set_element(self, windowed_value):
# type: (Optional[WindowedValue]) -> None
self.windowed_value = windowed_value
@property
def element(self):
if self.windowed_value is None:
raise AttributeError('element not accessible in this context')
else:
return self.windowed_value.value
@property
def timestamp(self):
if self.windowed_value is None:
raise AttributeError('timestamp not accessible in this context')
else:
return self.windowed_value.timestamp
@property
def windows(self):
if self.windowed_value is None:
raise AttributeError('windows not accessible in this context')
else:
return self.windowed_value.windows
| 39.985175
| 112
| 0.695473
|
from __future__ import absolute_import
from __future__ import division
import threading
import traceback
from builtins import next
from builtins import object
from builtins import round
from builtins import zip
from typing import TYPE_CHECKING
from typing import Any
from typing import Dict
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Tuple
from future.utils import raise_with_traceback
from past.builtins import unicode
from apache_beam.coders import TupleCoder
from apache_beam.internal import util
from apache_beam.options.value_provider import RuntimeValueProvider
from apache_beam.pvalue import TaggedOutput
from apache_beam.runners.sdf_utils import NoOpWatermarkEstimatorProvider
from apache_beam.runners.sdf_utils import RestrictionTrackerView
from apache_beam.runners.sdf_utils import SplitResultPrimary
from apache_beam.runners.sdf_utils import SplitResultResidual
from apache_beam.runners.sdf_utils import ThreadsafeRestrictionTracker
from apache_beam.runners.sdf_utils import ThreadsafeWatermarkEstimator
from apache_beam.transforms import DoFn
from apache_beam.transforms import core
from apache_beam.transforms import userstate
from apache_beam.transforms.core import RestrictionProvider
from apache_beam.transforms.core import WatermarkEstimatorProvider
from apache_beam.transforms.window import GlobalWindow
from apache_beam.transforms.window import TimestampedValue
from apache_beam.transforms.window import WindowFn
from apache_beam.utils.counters import Counter
from apache_beam.utils.counters import CounterName
from apache_beam.utils.timestamp import Timestamp
from apache_beam.utils.windowed_value import WindowedValue
if TYPE_CHECKING:
from apache_beam.transforms import sideinputs
from apache_beam.transforms.core import TimerSpec
from apache_beam.io.iobase import RestrictionProgress
from apache_beam.iobase import RestrictionTracker
from apache_beam.iobase import WatermarkEstimator
class NameContext(object):
def __init__(self, step_name, transform_id=None):
self.step_name = step_name
self.transform_id = transform_id
def __eq__(self, other):
return self.step_name == other.step_name
def __ne__(self, other):
return not self == other
def __repr__(self):
return 'NameContext(%s)' % self.__dict__
def __hash__(self):
return hash(self.step_name)
def metrics_name(self):
return self.step_name
def logging_name(self):
return self.step_name
class DataflowNameContext(NameContext):
def __init__(self, step_name, user_name, system_name):
super(DataflowNameContext, self).__init__(step_name)
self.user_name = user_name
self.system_name = system_name
def __eq__(self, other):
return (
self.step_name == other.step_name and
self.user_name == other.user_name and
self.system_name == other.system_name)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.step_name, self.user_name, self.system_name))
def __repr__(self):
return 'DataflowNameContext(%s)' % self.__dict__
def logging_name(self):
return self.user_name
class Receiver(object):
def receive(self, windowed_value):
raise NotImplementedError
class MethodWrapper(object):
def __init__(self, obj_to_invoke, method_name):
if not isinstance(obj_to_invoke,
(DoFn, RestrictionProvider, WatermarkEstimatorProvider)):
raise ValueError(
'\'obj_to_invoke\' has to be either a \'DoFn\' or '
'a \'RestrictionProvider\'. Received %r instead.' % obj_to_invoke)
self.args, self.defaults = core.get_function_arguments(obj_to_invoke,
method_name)
self.method_value = getattr(obj_to_invoke, method_name)
self.has_userstate_arguments = False
self.state_args_to_replace = {}
self.timer_args_to_replace = {}
self.timestamp_arg_name = None
self.window_arg_name = None
self.key_arg_name = None
self.restriction_provider = None
self.restriction_provider_arg_name = None
self.watermark_estimator_provider = None
self.watermark_estimator_provider_arg_name = None
if hasattr(self.method_value, 'unbounded_per_element'):
self.unbounded_per_element = True
else:
self.unbounded_per_element = False
for kw, v in zip(self.args[-len(self.defaults):], self.defaults):
if isinstance(v, core.DoFn.StateParam):
self.state_args_to_replace[kw] = v.state_spec
self.has_userstate_arguments = True
elif isinstance(v, core.DoFn.TimerParam):
self.timer_args_to_replace[kw] = v.timer_spec
self.has_userstate_arguments = True
elif core.DoFn.TimestampParam == v:
self.timestamp_arg_name = kw
elif core.DoFn.WindowParam == v:
self.window_arg_name = kw
elif core.DoFn.KeyParam == v:
self.key_arg_name = kw
elif isinstance(v, core.DoFn.RestrictionParam):
self.restriction_provider = v.restriction_provider
self.restriction_provider_arg_name = kw
elif isinstance(v, core.DoFn.WatermarkEstimatorParam):
self.watermark_estimator_provider = v.watermark_estimator_provider
self.watermark_estimator_provider_arg_name = kw
if self.watermark_estimator_provider is None:
self.watermark_estimator_provider = NoOpWatermarkEstimatorProvider()
def invoke_timer_callback(
self, user_state_context, key, window, timestamp, pane_info):
kwargs = {}
if self.has_userstate_arguments:
for kw, state_spec in self.state_args_to_replace.items():
kwargs[kw] = user_state_context.get_state(state_spec, key, window)
for kw, timer_spec in self.timer_args_to_replace.items():
kwargs[kw] = user_state_context.get_timer(
timer_spec, key, window, timestamp, pane_info)
if self.timestamp_arg_name:
kwargs[self.timestamp_arg_name] = Timestamp.of(timestamp)
if self.window_arg_name:
kwargs[self.window_arg_name] = window
if self.key_arg_name:
kwargs[self.key_arg_name] = key
if kwargs:
return self.method_value(**kwargs)
else:
return self.method_value()
class DoFnSignature(object):
def __init__(self, do_fn):
assert isinstance(do_fn, core.DoFn)
self.do_fn = do_fn
self.process_method = MethodWrapper(do_fn, 'process')
self.start_bundle_method = MethodWrapper(do_fn, 'start_bundle')
self.finish_bundle_method = MethodWrapper(do_fn, 'finish_bundle')
self.setup_lifecycle_method = MethodWrapper(do_fn, 'setup')
self.teardown_lifecycle_method = MethodWrapper(do_fn, 'teardown')
restriction_provider = self.get_restriction_provider()
watermark_estimator_provider = self.get_watermark_estimator_provider()
self.create_watermark_estimator_method = (
MethodWrapper(
watermark_estimator_provider, 'create_watermark_estimator'))
self.initial_restriction_method = (
MethodWrapper(restriction_provider, 'initial_restriction')
if restriction_provider else None)
self.create_tracker_method = (
MethodWrapper(restriction_provider, 'create_tracker')
if restriction_provider else None)
self.split_method = (
MethodWrapper(restriction_provider, 'split')
if restriction_provider else None)
self._validate()
self._is_stateful_dofn = userstate.is_stateful_dofn(do_fn)
self.timer_methods = {}
if self._is_stateful_dofn:
_, all_timer_specs = userstate.get_dofn_specs(do_fn)
for timer_spec in all_timer_specs:
method = timer_spec._attached_callback
self.timer_methods[timer_spec] = MethodWrapper(do_fn, method.__name__)
def get_restriction_provider(self):
return self.process_method.restriction_provider
def get_watermark_estimator_provider(self):
return self.process_method.watermark_estimator_provider
def is_unbounded_per_element(self):
return self.process_method.unbounded_per_element
def _validate(self):
self._validate_process()
self._validate_bundle_method(self.start_bundle_method)
self._validate_bundle_method(self.finish_bundle_method)
self._validate_stateful_dofn()
def _validate_process(self):
param_ids = [
d.param_id for d in self.process_method.defaults
if isinstance(d, core._DoFnParam)
]
if len(param_ids) != len(set(param_ids)):
raise ValueError(
'DoFn %r has duplicate process method parameters: %s.' %
(self.do_fn, param_ids))
def _validate_bundle_method(self, method_wrapper):
for param in core.DoFn.DoFnProcessParams:
if param in method_wrapper.defaults:
raise ValueError(
'DoFn.process() method-only parameter %s cannot be used in %s.' %
(param, method_wrapper))
def _validate_stateful_dofn(self):
userstate.validate_stateful_dofn(self.do_fn)
def is_splittable_dofn(self):
return self.get_restriction_provider() is not None
def get_restriction_coder(self):
if self.is_splittable_dofn():
return TupleCoder([
(self.get_restriction_provider().restriction_coder()),
(self.get_watermark_estimator_provider().estimator_state_coder())
])
else:
return None
def is_stateful_dofn(self):
return self._is_stateful_dofn
def has_timers(self):
_, all_timer_specs = userstate.get_dofn_specs(self.do_fn)
return bool(all_timer_specs)
def has_bundle_finalization(self):
for sig in (self.start_bundle_method,
self.process_method,
self.finish_bundle_method):
for d in sig.defaults:
try:
if d == DoFn.BundleFinalizerParam:
return True
except Exception:
pass
return False
class DoFnInvoker(object):
def __init__(self,
output_processor,
signature
):
self.output_processor = output_processor
self.signature = signature
self.user_state_context = None
self.bundle_finalizer_param = None
@staticmethod
def create_invoker(
signature,
output_processor,
context=None,
side_inputs=None,
input_args=None, input_kwargs=None,
process_invocation=True,
user_state_context=None,
bundle_finalizer_param=None
):
side_inputs = side_inputs or []
default_arg_values = signature.process_method.defaults
use_simple_invoker = not process_invocation or (
not side_inputs and not input_args and not input_kwargs and
not default_arg_values and not signature.is_stateful_dofn())
if use_simple_invoker:
return SimpleInvoker(output_processor, signature)
else:
if context is None:
raise TypeError("Must provide context when not using SimpleInvoker")
return PerWindowInvoker(
output_processor,
signature,
context,
side_inputs,
input_args,
input_kwargs,
user_state_context,
bundle_finalizer_param)
def invoke_process(self,
windowed_value,
restriction=None,
watermark_estimator_state=None,
additional_args=None,
additional_kwargs=None
):
raise NotImplementedError
def invoke_setup(self):
self.signature.setup_lifecycle_method.method_value()
def invoke_start_bundle(self):
self.output_processor.start_bundle_outputs(
self.signature.start_bundle_method.method_value())
def invoke_finish_bundle(self):
self.output_processor.finish_bundle_outputs(
self.signature.finish_bundle_method.method_value())
def invoke_teardown(self):
self.signature.teardown_lifecycle_method.method_value()
def invoke_user_timer(self, timer_spec, key, window, timestamp, pane_info):
self.output_processor.process_outputs(
WindowedValue(None, timestamp, (window, )),
self.signature.timer_methods[timer_spec].invoke_timer_callback(
self.user_state_context, key, window, timestamp, pane_info))
def invoke_create_watermark_estimator(self, estimator_state):
return self.signature.create_watermark_estimator_method.method_value(
estimator_state)
def invoke_split(self, element, restriction):
return self.signature.split_method.method_value(element, restriction)
def invoke_initial_restriction(self, element):
return self.signature.initial_restriction_method.method_value(element)
def invoke_create_tracker(self, restriction):
return self.signature.create_tracker_method.method_value(restriction)
class SimpleInvoker(DoFnInvoker):
def __init__(self,
output_processor, # type: OutputProcessor
signature # type: DoFnSignature
):
# type: (...) -> None
super(SimpleInvoker, self).__init__(output_processor, signature)
self.process_method = signature.process_method.method_value
def invoke_process(self,
windowed_value, # type: WindowedValue
restriction=None,
watermark_estimator_state=None,
additional_args=None,
additional_kwargs=None
):
# type: (...) -> None
self.output_processor.process_outputs(
windowed_value, self.process_method(windowed_value.value))
class PerWindowInvoker(DoFnInvoker):
def __init__(self,
output_processor, # type: _OutputProcessor
signature, # type: DoFnSignature
context, # type: DoFnContext
side_inputs, # type: Iterable[sideinputs.SideInputMap]
input_args,
input_kwargs,
user_state_context, # type: Optional[userstate.UserStateContext]
bundle_finalizer_param # type: Optional[core._BundleFinalizerParam]
):
super(PerWindowInvoker, self).__init__(output_processor, signature)
self.side_inputs = side_inputs
self.context = context
self.process_method = signature.process_method.method_value
default_arg_values = signature.process_method.defaults
self.has_windowed_inputs = (
not all(si.is_globally_windowed() for si in side_inputs) or
(core.DoFn.WindowParam in default_arg_values) or
signature.is_stateful_dofn())
self.user_state_context = user_state_context
self.is_splittable = signature.is_splittable_dofn()
self.threadsafe_restriction_tracker = None # type: Optional[ThreadsafeRestrictionTracker]
self.threadsafe_watermark_estimator = None # type: Optional[ThreadsafeWatermarkEstimator]
self.current_windowed_value = None # type: Optional[WindowedValue]
self.bundle_finalizer_param = bundle_finalizer_param
self.is_key_param_required = False
if self.is_splittable:
self.splitting_lock = threading.Lock()
self.current_window_index = None
self.stop_window_index = None
# Try to prepare all the arguments that can just be filled in
# without any additional work. in the process function.
# Also cache all the placeholders needed in the process function.
# Flag to cache additional arguments on the first element if all
# inputs are within the global window.
self.cache_globally_windowed_args = not self.has_windowed_inputs
input_args = input_args if input_args else []
input_kwargs = input_kwargs if input_kwargs else {}
arg_names = signature.process_method.args
# Create placeholder for element parameter of DoFn.process() method.
# Not to be confused with ArgumentPlaceHolder, which may be passed in
# input_args and is a placeholder for side-inputs.
class ArgPlaceholder(object):
def __init__(self, placeholder):
self.placeholder = placeholder
if core.DoFn.ElementParam not in default_arg_values:
# TODO(BEAM-7867): Handle cases in which len(arg_names) ==
# len(default_arg_values).
args_to_pick = len(arg_names) - len(default_arg_values) - 1
# Positional argument values for process(), with placeholders for special
# values such as the element, timestamp, etc.
args_with_placeholders = ([ArgPlaceholder(core.DoFn.ElementParam)] +
input_args[:args_to_pick])
else:
args_to_pick = len(arg_names) - len(default_arg_values)
args_with_placeholders = input_args[:args_to_pick]
# Fill the OtherPlaceholders for context, key, window or timestamp
remaining_args_iter = iter(input_args[args_to_pick:])
for a, d in zip(arg_names[-len(default_arg_values):], default_arg_values):
if core.DoFn.ElementParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.KeyParam == d:
self.is_key_param_required = True
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.WindowParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.TimestampParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.PaneInfoParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
elif core.DoFn.SideInputParam == d:
# If no more args are present then the value must be passed via kwarg
try:
args_with_placeholders.append(next(remaining_args_iter))
except StopIteration:
if a not in input_kwargs:
raise ValueError("Value for sideinput %s not provided" % a)
elif isinstance(d, core.DoFn.StateParam):
args_with_placeholders.append(ArgPlaceholder(d))
elif isinstance(d, core.DoFn.TimerParam):
args_with_placeholders.append(ArgPlaceholder(d))
elif isinstance(d, type) and core.DoFn.BundleFinalizerParam == d:
args_with_placeholders.append(ArgPlaceholder(d))
else:
# If no more args are present then the value must be passed via kwarg
try:
args_with_placeholders.append(next(remaining_args_iter))
except StopIteration:
pass
args_with_placeholders.extend(list(remaining_args_iter))
# Stash the list of placeholder positions for performance
self.placeholders = [(i, x.placeholder)
for (i, x) in enumerate(args_with_placeholders)
if isinstance(x, ArgPlaceholder)]
self.args_for_process = args_with_placeholders
self.kwargs_for_process = input_kwargs
def invoke_process(self,
windowed_value, # type: WindowedValue
restriction=None,
watermark_estimator_state=None,
additional_args=None,
additional_kwargs=None
):
# type: (...) -> Iterable[SplitResultResidual]
if not additional_args:
additional_args = []
if not additional_kwargs:
additional_kwargs = {}
self.context.set_element(windowed_value)
# Call for the process function for each window if has windowed side inputs
# or if the process accesses the window parameter. We can just call it once
# otherwise as none of the arguments are changing
residuals = []
if self.is_splittable:
with self.splitting_lock:
self.current_windowed_value = windowed_value
self.restriction = restriction
self.watermark_estimator_state = watermark_estimator_state
try:
if self.has_windowed_inputs and len(windowed_value.windows) > 1:
for i, w in enumerate(windowed_value.windows):
if not self._should_process_window_for_sdf(
windowed_value, additional_kwargs, i):
break
residual = self._invoke_process_per_window(
WindowedValue(
windowed_value.value, windowed_value.timestamp, (w, )),
additional_args,
additional_kwargs)
if residual:
residuals.append(residual)
else:
if self._should_process_window_for_sdf(windowed_value,
additional_kwargs):
residual = self._invoke_process_per_window(
windowed_value, additional_args, additional_kwargs)
if residual:
residuals.append(residual)
finally:
with self.splitting_lock:
self.current_windowed_value = None
self.restriction = None
self.watermark_estimator_state = None
self.current_window_index = None
self.threadsafe_restriction_tracker = None
self.threadsafe_watermark_estimator = None
elif self.has_windowed_inputs and len(windowed_value.windows) != 1:
for w in windowed_value.windows:
self._invoke_process_per_window(
WindowedValue(
windowed_value.value, windowed_value.timestamp, (w, )),
additional_args,
additional_kwargs)
else:
self._invoke_process_per_window(
windowed_value, additional_args, additional_kwargs)
return residuals
def _should_process_window_for_sdf(
self,
windowed_value, # type: WindowedValue
additional_kwargs,
window_index=None, # type: Optional[int]
):
restriction_tracker = self.invoke_create_tracker(self.restriction)
watermark_estimator = self.invoke_create_watermark_estimator(
self.watermark_estimator_state)
with self.splitting_lock:
if window_index:
self.current_window_index = window_index
if window_index == 0:
self.stop_window_index = len(windowed_value.windows)
if window_index == self.stop_window_index:
return False
self.threadsafe_restriction_tracker = ThreadsafeRestrictionTracker(
restriction_tracker)
self.threadsafe_watermark_estimator = (
ThreadsafeWatermarkEstimator(watermark_estimator))
restriction_tracker_param = (
self.signature.process_method.restriction_provider_arg_name)
if not restriction_tracker_param:
raise ValueError(
'DoFn is splittable but DoFn does not have a '
'RestrictionTrackerParam defined')
additional_kwargs[restriction_tracker_param] = (
RestrictionTrackerView(self.threadsafe_restriction_tracker))
watermark_param = (
self.signature.process_method.watermark_estimator_provider_arg_name)
# When the watermark_estimator is a NoOpWatermarkEstimator, the system
# will not add watermark_param into the DoFn param list.
if watermark_param is not None:
additional_kwargs[watermark_param] = self.threadsafe_watermark_estimator
return True
def _invoke_process_per_window(self,
windowed_value, # type: WindowedValue
additional_args,
additional_kwargs,
):
# type: (...) -> Optional[SplitResultResidual]
if self.has_windowed_inputs:
window, = windowed_value.windows
side_inputs = [si[window] for si in self.side_inputs]
side_inputs.extend(additional_args)
args_for_process, kwargs_for_process = util.insert_values_in_args(
self.args_for_process, self.kwargs_for_process,
side_inputs)
elif self.cache_globally_windowed_args:
# Attempt to cache additional args if all inputs are globally
# windowed inputs when processing the first element.
self.cache_globally_windowed_args = False
# Fill in sideInputs if they are globally windowed
global_window = GlobalWindow()
self.args_for_process, self.kwargs_for_process = (
util.insert_values_in_args(
self.args_for_process, self.kwargs_for_process,
[si[global_window] for si in self.side_inputs]))
args_for_process, kwargs_for_process = (
self.args_for_process, self.kwargs_for_process)
else:
args_for_process, kwargs_for_process = (
self.args_for_process, self.kwargs_for_process)
# Extract key in the case of a stateful DoFn. Note that in the case of a
# stateful DoFn, we set during __init__ self.has_windowed_inputs to be
# True. Therefore, windows will be exploded coming into this method, and
# we can rely on the window variable being set above.
if self.user_state_context or self.is_key_param_required:
try:
key, unused_value = windowed_value.value
except (TypeError, ValueError):
raise ValueError((
'Input value to a stateful DoFn or KeyParam must be a KV tuple; '
'instead, got \'%s\'.') % (windowed_value.value, ))
for i, p in self.placeholders:
if core.DoFn.ElementParam == p:
args_for_process[i] = windowed_value.value
elif core.DoFn.KeyParam == p:
args_for_process[i] = key
elif core.DoFn.WindowParam == p:
args_for_process[i] = window
elif core.DoFn.TimestampParam == p:
args_for_process[i] = windowed_value.timestamp
elif core.DoFn.PaneInfoParam == p:
args_for_process[i] = windowed_value.pane_info
elif isinstance(p, core.DoFn.StateParam):
assert self.user_state_context is not None
args_for_process[i] = (
self.user_state_context.get_state(p.state_spec, key, window))
elif isinstance(p, core.DoFn.TimerParam):
assert self.user_state_context is not None
args_for_process[i] = (
self.user_state_context.get_timer(
p.timer_spec,
key,
window,
windowed_value.timestamp,
windowed_value.pane_info))
elif core.DoFn.BundleFinalizerParam == p:
args_for_process[i] = self.bundle_finalizer_param
if additional_kwargs:
if kwargs_for_process is None:
kwargs_for_process = additional_kwargs
else:
for key in additional_kwargs:
kwargs_for_process[key] = additional_kwargs[key]
if kwargs_for_process:
self.output_processor.process_outputs(
windowed_value,
self.process_method(*args_for_process, **kwargs_for_process),
self.threadsafe_watermark_estimator)
else:
self.output_processor.process_outputs(
windowed_value,
self.process_method(*args_for_process),
self.threadsafe_watermark_estimator)
if self.is_splittable:
assert self.threadsafe_restriction_tracker is not None
self.threadsafe_restriction_tracker.check_done()
deferred_status = self.threadsafe_restriction_tracker.deferred_status()
if deferred_status:
deferred_restriction, deferred_timestamp = deferred_status
element = windowed_value.value
size = self.signature.get_restriction_provider().restriction_size(
element, deferred_restriction)
current_watermark = (
self.threadsafe_watermark_estimator.current_watermark())
estimator_state = (
self.threadsafe_watermark_estimator.get_estimator_state())
residual_value = ((element, (deferred_restriction, estimator_state)),
size)
return SplitResultResidual(
residual_value=windowed_value.with_value(residual_value),
current_watermark=current_watermark,
deferred_timestamp=deferred_timestamp)
return None
@staticmethod
def _try_split(fraction,
window_index, # type: Optional[int]
stop_window_index, # type: Optional[int]
windowed_value, # type: WindowedValue
restriction,
watermark_estimator_state,
restriction_provider, # type: RestrictionProvider
restriction_tracker, # type: RestrictionTracker
watermark_estimator, # type: WatermarkEstimator
):
# type: (...) -> Optional[Tuple[Iterable[SplitResultPrimary], Iterable[SplitResultResidual], Optional[int]]]
def compute_whole_window_split(to_index, from_index):
restriction_size = restriction_provider.restriction_size(
windowed_value, restriction)
# The primary and residual both share the same value only differing
# by the set of windows they are in.
value = ((windowed_value.value, (restriction, watermark_estimator_state)),
restriction_size)
primary_restriction = SplitResultPrimary(
primary_value=WindowedValue(
value,
windowed_value.timestamp,
windowed_value.windows[:to_index])) if to_index > 0 else None
# Don't report any updated watermarks for the residual since they have
residual_restriction = SplitResultResidual(
residual_value=WindowedValue(
value,
windowed_value.timestamp,
windowed_value.windows[from_index:stop_window_index]),
current_watermark=None,
deferred_timestamp=None) if from_index < stop_window_index else None
return (primary_restriction, residual_restriction)
primary_restrictions = []
residual_restrictions = []
window_observing = window_index is not None
# window then compute whether the split lies within the current window
# or a future window.
if window_observing and window_index != stop_window_index - 1:
progress = restriction_tracker.current_progress()
if not progress:
# Assume no work has been completed for the current window if progress
# is unavailable.
from apache_beam.io.iobase import RestrictionProgress
progress = RestrictionProgress(completed=0, remaining=1)
scaled_progress = PerWindowInvoker._scale_progress(
progress, window_index, stop_window_index)
# Compute the fraction of the remainder relative to the scaled progress.
# If the value is greater than or equal to progress.remaining_work then we
# should split at the closest window boundary.
fraction_of_remainder = scaled_progress.remaining_work * fraction
if fraction_of_remainder >= progress.remaining_work:
# The fraction is outside of the current window and hence we will
# split at the closest window boundary. Favor a split and return the
# last window if we would have rounded up to the end of the window
# based upon the fraction.
new_stop_window_index = min(
stop_window_index - 1,
window_index + max(
1,
int(
round((
progress.completed_work +
scaled_progress.remaining_work * fraction) /
progress.total_work))))
primary, residual = compute_whole_window_split(
new_stop_window_index, new_stop_window_index)
assert primary is not None
assert residual is not None
return ([primary], [residual], new_stop_window_index)
else:
# The fraction is within the current window being processed so compute
# the updated fraction based upon the number of windows being processed.
new_stop_window_index = window_index + 1
fraction = fraction_of_remainder / progress.remaining_work
# Attempt to split below, if we can't then we'll compute a split
# using only window boundaries
else:
# We aren't splitting within multiple windows so we don't change our
# stop index.
new_stop_window_index = stop_window_index
# Temporary workaround for [BEAM-7473]: get current_watermark before
# split, in case watermark gets advanced before getting split results.
# In worst case, current_watermark is always stale, which is ok.
current_watermark = (watermark_estimator.current_watermark())
current_estimator_state = (watermark_estimator.get_estimator_state())
split = restriction_tracker.try_split(fraction)
if split:
primary, residual = split
element = windowed_value.value
primary_size = restriction_provider.restriction_size(
windowed_value.value, primary)
residual_size = restriction_provider.restriction_size(
windowed_value.value, residual)
# We use the watermark estimator state for the original process call
# for the primary and the updated watermark estimator state for the
# residual for the split.
primary_split_value = ((element, (primary, watermark_estimator_state)),
primary_size)
residual_split_value = ((element, (residual, current_estimator_state)),
residual_size)
windows = (
windowed_value.windows[window_index],
) if window_observing else windowed_value.windows
primary_restrictions.append(
SplitResultPrimary(
primary_value=WindowedValue(
primary_split_value, windowed_value.timestamp, windows)))
residual_restrictions.append(
SplitResultResidual(
residual_value=WindowedValue(
residual_split_value, windowed_value.timestamp, windows),
current_watermark=current_watermark,
deferred_timestamp=None))
if window_observing:
assert new_stop_window_index == window_index + 1
primary, residual = compute_whole_window_split(
window_index, window_index + 1)
if primary:
primary_restrictions.append(primary)
if residual:
residual_restrictions.append(residual)
return (
primary_restrictions, residual_restrictions, new_stop_window_index)
elif new_stop_window_index and new_stop_window_index != stop_window_index:
# If we failed to split but have a new stop index then return a split
# at the window boundary.
primary, residual = compute_whole_window_split(
new_stop_window_index, new_stop_window_index)
assert primary is not None
assert residual is not None
return ([primary], [residual], new_stop_window_index)
else:
return None
def try_split(self, fraction):
# type: (...) -> Optional[Tuple[Iterable[SplitResultPrimary], Iterable[SplitResultResidual]]]
if not self.is_splittable:
return None
with self.splitting_lock:
if not self.threadsafe_restriction_tracker:
return None
# Make a local reference to member variables that change references during
# processing under lock before attempting to split so we have a consistent
# view of all the references.
result = PerWindowInvoker._try_split(
fraction,
self.current_window_index,
self.stop_window_index,
self.current_windowed_value,
self.restriction,
self.watermark_estimator_state,
self.signature.get_restriction_provider(),
self.threadsafe_restriction_tracker,
self.threadsafe_watermark_estimator)
if not result:
return None
residuals, primaries, self.stop_window_index = result
return (residuals, primaries)
@staticmethod
def _scale_progress(progress, window_index, stop_window_index):
# We scale progress based upon the amount of work we will do for one
# window and have it apply for all windows.
completed = window_index * progress.total_work + progress.completed_work
remaining = (
stop_window_index -
(window_index + 1)) * progress.total_work + progress.remaining_work
from apache_beam.io.iobase import RestrictionProgress
return RestrictionProgress(completed=completed, remaining=remaining)
def current_element_progress(self):
# type: () -> Optional[RestrictionProgress]
if not self.is_splittable:
return None
with self.splitting_lock:
current_window_index = self.current_window_index
stop_window_index = self.stop_window_index
threadsafe_restriction_tracker = self.threadsafe_restriction_tracker
if not threadsafe_restriction_tracker:
return None
progress = threadsafe_restriction_tracker.current_progress()
if not current_window_index or not progress:
return progress
# stop_window_index should always be set if current_window_index is set,
# it is an error otherwise.
assert stop_window_index
return PerWindowInvoker._scale_progress(
progress, current_window_index, stop_window_index)
class DoFnRunner:
def __init__(self,
fn, # type: core.DoFn
args,
kwargs,
side_inputs, # type: Iterable[sideinputs.SideInputMap]
windowing,
tagged_receivers, # type: Mapping[Optional[str], Receiver]
step_name=None, # type: Optional[str]
logging_context=None,
state=None,
scoped_metrics_container=None,
operation_name=None,
user_state_context=None # type: Optional[userstate.UserStateContext]
):
# Need to support multiple iterations.
side_inputs = list(side_inputs)
self.step_name = step_name
self.context = DoFnContext(step_name, state=state)
self.bundle_finalizer_param = DoFn.BundleFinalizerParam()
do_fn_signature = DoFnSignature(fn)
# Optimize for the common case.
main_receivers = tagged_receivers[None]
# TODO(BEAM-3937): Remove if block after output counter released.
if 'outputs_per_element_counter' in RuntimeValueProvider.experiments:
# TODO(BEAM-3955): Make step_name and operation_name less confused.
output_counter_name = (
CounterName('per-element-output-count', step_name=operation_name))
per_element_output_counter = state._counter_factory.get_counter(
output_counter_name, Counter.DATAFLOW_DISTRIBUTION).accumulator
else:
per_element_output_counter = None
output_processor = _OutputProcessor(
windowing.windowfn,
main_receivers,
tagged_receivers,
per_element_output_counter)
if do_fn_signature.is_stateful_dofn() and not user_state_context:
raise Exception(
'Requested execution of a stateful DoFn, but no user state context '
'is available. This likely means that the current runner does not '
'support the execution of stateful DoFns.')
self.do_fn_invoker = DoFnInvoker.create_invoker(
do_fn_signature,
output_processor,
self.context,
side_inputs,
args,
kwargs,
user_state_context=user_state_context,
bundle_finalizer_param=self.bundle_finalizer_param)
def process(self, windowed_value):
# type: (WindowedValue) -> Iterable[SplitResultResidual]
try:
return self.do_fn_invoker.invoke_process(windowed_value)
except BaseException as exn:
self._reraise_augmented(exn)
return []
def process_with_sized_restriction(self, windowed_value):
# type: (WindowedValue) -> Iterable[SplitResultResidual]
(element, (restriction, estimator_state)), _ = windowed_value.value
return self.do_fn_invoker.invoke_process(
windowed_value.with_value(element),
restriction=restriction,
watermark_estimator_state=estimator_state)
def try_split(self, fraction):
# type: (...) -> Optional[Tuple[Iterable[SplitResultPrimary], Iterable[SplitResultResidual]]]
assert isinstance(self.do_fn_invoker, PerWindowInvoker)
return self.do_fn_invoker.try_split(fraction)
def current_element_progress(self):
# type: () -> Optional[RestrictionProgress]
assert isinstance(self.do_fn_invoker, PerWindowInvoker)
return self.do_fn_invoker.current_element_progress()
def process_user_timer(self, timer_spec, key, window, timestamp, pane_info):
try:
self.do_fn_invoker.invoke_user_timer(
timer_spec, key, window, timestamp, pane_info)
except BaseException as exn:
self._reraise_augmented(exn)
def _invoke_bundle_method(self, bundle_method):
try:
self.context.set_element(None)
bundle_method()
except BaseException as exn:
self._reraise_augmented(exn)
def _invoke_lifecycle_method(self, lifecycle_method):
try:
self.context.set_element(None)
lifecycle_method()
except BaseException as exn:
self._reraise_augmented(exn)
def setup(self):
# type: () -> None
self._invoke_lifecycle_method(self.do_fn_invoker.invoke_setup)
def start(self):
# type: () -> None
self._invoke_bundle_method(self.do_fn_invoker.invoke_start_bundle)
def finish(self):
# type: () -> None
self._invoke_bundle_method(self.do_fn_invoker.invoke_finish_bundle)
def teardown(self):
# type: () -> None
self._invoke_lifecycle_method(self.do_fn_invoker.invoke_teardown)
def finalize(self):
# type: () -> None
self.bundle_finalizer_param.finalize_bundle()
def _reraise_augmented(self, exn):
if getattr(exn, '_tagged_with_step', False) or not self.step_name:
raise
step_annotation = " [while running '%s']" % self.step_name
# To emulate exception chaining (not available in Python 2).
try:
# Attempt to construct the same kind of exception
# with an augmented message.
new_exn = type(exn)(exn.args[0] + step_annotation, *exn.args[1:])
new_exn._tagged_with_step = True # Could raise attribute error.
except: # pylint: disable=bare-except
# If anything goes wrong, construct a RuntimeError whose message
# records the original exception's type and message.
new_exn = RuntimeError(
traceback.format_exception_only(type(exn), exn)[-1].strip() +
step_annotation)
new_exn._tagged_with_step = True
raise_with_traceback(new_exn)
class OutputProcessor(object):
def process_outputs(
self, windowed_input_element, results, watermark_estimator=None):
raise NotImplementedError
class _OutputProcessor(OutputProcessor):
def __init__(self,
window_fn,
main_receivers,
tagged_receivers,
per_element_output_counter):
self.window_fn = window_fn
self.main_receivers = main_receivers
self.tagged_receivers = tagged_receivers
self.per_element_output_counter = per_element_output_counter
def process_outputs(
self, windowed_input_element, results, watermark_estimator=None):
if results is None:
if (self.per_element_output_counter is not None and
self.per_element_output_counter.is_cythonized):
self.per_element_output_counter.add_input(0)
return
output_element_count = 0
for result in results:
output_element_count += 1
tag = None
if isinstance(result, TaggedOutput):
tag = result.tag
if not isinstance(tag, (str, unicode)):
raise TypeError('In %s, tag %s is not a string' % (self, tag))
result = result.value
if isinstance(result, WindowedValue):
windowed_value = result
if (windowed_input_element is not None and
len(windowed_input_element.windows) != 1):
windowed_value.windows *= len(windowed_input_element.windows)
elif isinstance(result, TimestampedValue):
assign_context = WindowFn.AssignContext(result.timestamp, result.value)
windowed_value = WindowedValue(
result.value,
result.timestamp,
self.window_fn.assign(assign_context))
if len(windowed_input_element.windows) != 1:
windowed_value.windows *= len(windowed_input_element.windows)
else:
windowed_value = windowed_input_element.with_value(result)
if watermark_estimator is not None:
watermark_estimator.observe_timestamp(windowed_value.timestamp)
if tag is None:
self.main_receivers.receive(windowed_value)
else:
self.tagged_receivers[tag].receive(windowed_value)
if (self.per_element_output_counter is not None and
self.per_element_output_counter.is_cythonized):
self.per_element_output_counter.add_input(output_element_count)
def start_bundle_outputs(self, results):
if results is None:
return
raise RuntimeError(
'Start Bundle should not output any elements but got %s' % results)
def finish_bundle_outputs(self, results):
if results is None:
return
for result in results:
tag = None
if isinstance(result, TaggedOutput):
tag = result.tag
if not isinstance(tag, (str, unicode)):
raise TypeError('In %s, tag %s is not a string' % (self, tag))
result = result.value
if isinstance(result, WindowedValue):
windowed_value = result
else:
raise RuntimeError('Finish Bundle should only output WindowedValue ' +\
'type but got %s' % type(result))
if tag is None:
self.main_receivers.receive(windowed_value)
else:
self.tagged_receivers[tag].receive(windowed_value)
class _NoContext(WindowFn.AssignContext):
NO_VALUE = object()
def __init__(self, value, timestamp=NO_VALUE):
self.value = value
self._timestamp = timestamp
@property
def timestamp(self):
if self._timestamp is self.NO_VALUE:
raise ValueError('No timestamp in this context.')
else:
return self._timestamp
@property
def existing_windows(self):
raise ValueError('No existing_windows in this context.')
class DoFnState(object):
def __init__(self, counter_factory):
self.step_name = ''
self._counter_factory = counter_factory
def counter_for(self, aggregator):
return self._counter_factory.get_aggregator_counter(
self.step_name, aggregator)
class DoFnContext(object):
def __init__(self, label, element=None, state=None):
self.label = label
self.state = state
if element is not None:
self.set_element(element)
def set_element(self, windowed_value):
self.windowed_value = windowed_value
@property
def element(self):
if self.windowed_value is None:
raise AttributeError('element not accessible in this context')
else:
return self.windowed_value.value
@property
def timestamp(self):
if self.windowed_value is None:
raise AttributeError('timestamp not accessible in this context')
else:
return self.windowed_value.timestamp
@property
def windows(self):
if self.windowed_value is None:
raise AttributeError('windows not accessible in this context')
else:
return self.windowed_value.windows
| true
| true
|
1c4a26ec5b0edc44879d37cfc3b85a407c83e08a
| 9,618
|
py
|
Python
|
src/python/pants/reporting/plaintext_reporter.py
|
qma/pants
|
604f58a366b66bc5cfa83e7250cb8af8130832cf
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/reporting/plaintext_reporter.py
|
qma/pants
|
604f58a366b66bc5cfa83e7250cb8af8130832cf
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/reporting/plaintext_reporter.py
|
qma/pants
|
604f58a366b66bc5cfa83e7250cb8af8130832cf
|
[
"Apache-2.0"
] | 1
|
2019-06-10T17:24:34.000Z
|
2019-06-10T17:24:34.000Z
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from collections import namedtuple
import six
from colors import cyan, green, red, yellow
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.reporting.plaintext_reporter_base import PlainTextReporterBase
from pants.reporting.report import Report
from pants.reporting.reporter import Reporter
from pants.util.memo import memoized_method
class ToolOutputFormat(object):
"""Configuration item for displaying Tool Output to the console."""
SUPPRESS = 'SUPPRESS' # Do not display output from the workunit unless its outcome != SUCCESS
INDENT = 'INDENT' # Indent the output to line up with the indentation of the other log output
UNINDENTED = 'UNINDENTED' # Display the output raw, with no leading indentation
@classmethod
@memoized_method
def keys(cls):
return [key for key in dir(cls) if not key.startswith('_') and key.isupper()]
class LabelFormat(object):
"""Configuration item for displaying a workunit label to the console."""
SUPPRESS = 'SUPPRESS' # Don't show the label at all
DOT = 'DOT' # Just output a single '.' with no newline
FULL = 'FULL' # Show the timestamp and label
CHILD_SUPPRESS = 'CHILD_SUPPRESS' # Suppress labels for all children of this node
CHILD_DOT = 'CHILD_DOT' # Display a dot for all children of this node
@classmethod
@memoized_method
def keys(cls):
return [key for key in dir(cls) if not key.startswith('_') and key.isupper()]
class PlainTextReporter(PlainTextReporterBase):
"""Plain-text reporting to stdout.
We only report progress for things under the default work root. It gets too
confusing to try and show progress for background work too.
"""
# Console reporting settings.
# outfile: Write to this file-like object.
# color: use ANSI colors in output.
# indent: Whether to indent the reporting to reflect the nesting of workunits.
# timing: Show timing report at the end of the run.
# cache_stats: Show artifact cache report at the end of the run.
Settings = namedtuple('Settings',
Reporter.Settings._fields + ('outfile', 'color', 'indent', 'timing',
'cache_stats', 'label_format',
'tool_output_format'))
_COLOR_BY_LEVEL = {
Report.FATAL: red,
Report.ERROR: red,
Report.WARN: yellow,
Report.INFO: green,
Report.DEBUG: cyan
}
# Format the std output from these workunit types as specified. If no format is specified, the
# default is ToolOutputFormat.SUPPRESS
TOOL_OUTPUT_FORMATTING = {
WorkUnitLabel.MULTITOOL: ToolOutputFormat.SUPPRESS,
WorkUnitLabel.BOOTSTRAP: ToolOutputFormat.SUPPRESS,
WorkUnitLabel.COMPILER : ToolOutputFormat.INDENT,
WorkUnitLabel.TEST : ToolOutputFormat.INDENT,
WorkUnitLabel.REPL : ToolOutputFormat.UNINDENTED,
WorkUnitLabel.RUN : ToolOutputFormat.UNINDENTED
}
# Format the labels from these workunit types as specified. If no format is specified, the
# default is LabelFormat.FULL
LABEL_FORMATTING = {
WorkUnitLabel.MULTITOOL: LabelFormat.CHILD_DOT,
WorkUnitLabel.BOOTSTRAP: LabelFormat.CHILD_SUPPRESS,
}
def __init__(self, run_tracker, settings):
super(PlainTextReporter, self).__init__(run_tracker, settings)
for key, value in settings.label_format.items():
if key not in WorkUnitLabel.keys():
self.emit('*** Got invalid key {} for --reporting-console-label-format. Expected one of {}\n'
.format(key, WorkUnitLabel.keys()))
if value not in LabelFormat.keys():
self.emit('*** Got invalid value {} for --reporting-console-label-format. Expected one of {}\n'
.format(value, LabelFormat.keys()))
for key, value in settings.tool_output_format.items():
if key not in WorkUnitLabel.keys():
self.emit('*** Got invalid key {} for --reporting-console-tool-output-format. Expected one of {}\n'
.format(key, WorkUnitLabel.keys()))
if value not in ToolOutputFormat.keys():
self.emit('*** Got invalid value {} for --reporting-console-tool-output-format. Expected one of {}\n'
.format(value, ToolOutputFormat.keys()))
# Mix in the new settings with the defaults.
self.LABEL_FORMATTING.update(settings.label_format.items())
self.TOOL_OUTPUT_FORMATTING.update(settings.tool_output_format.items())
def open(self):
"""Implementation of Reporter callback."""
pass
def close(self):
"""Implementation of Reporter callback."""
self.emit(self.generate_epilog(self.settings))
def start_workunit(self, workunit):
"""Implementation of Reporter callback."""
if not self.is_under_main_root(workunit):
return
label_format = self._get_label_format(workunit)
if label_format == LabelFormat.FULL:
self._emit_indented_workunit_label(workunit)
# Start output on a new line.
tool_output_format = self._get_tool_output_format(workunit)
if tool_output_format == ToolOutputFormat.INDENT:
self.emit(self._prefix(workunit, b'\n'))
elif tool_output_format == ToolOutputFormat.UNINDENTED:
self.emit(b'\n')
elif label_format == LabelFormat.DOT:
self.emit(b'.')
self.flush()
def end_workunit(self, workunit):
"""Implementation of Reporter callback."""
if not self.is_under_main_root(workunit):
return
if workunit.outcome() != WorkUnit.SUCCESS and not self._show_output(workunit):
# Emit the suppressed workunit output, if any, to aid in debugging the problem.
if self._get_label_format(workunit) != LabelFormat.FULL:
self._emit_indented_workunit_label(workunit)
for name, outbuf in workunit.outputs().items():
self.emit(self._prefix(workunit, b'\n==== {} ====\n'.format(name)))
self.emit(self._prefix(workunit, outbuf.read_from(0)))
self.flush()
def do_handle_log(self, workunit, level, *msg_elements):
"""Implementation of Reporter callback."""
if not self.is_under_main_root(workunit):
return
# If the element is a (msg, detail) pair, we ignore the detail. There's no
# useful way to display it on the console.
elements = [e if isinstance(e, six.string_types) else e[0] for e in msg_elements]
msg = b'\n' + b''.join(elements)
if self.use_color_for_workunit(workunit, self.settings.color):
msg = self._COLOR_BY_LEVEL.get(level, lambda x: x)(msg)
self.emit(self._prefix(workunit, msg))
self.flush()
def handle_output(self, workunit, label, s):
"""Implementation of Reporter callback."""
if not self.is_under_main_root(workunit):
return
tool_output_format = self._get_tool_output_format(workunit)
if tool_output_format == ToolOutputFormat.INDENT:
self.emit(self._prefix(workunit, s))
elif tool_output_format == ToolOutputFormat.UNINDENTED:
self.emit(s)
self.flush()
def emit(self, s):
self.settings.outfile.write(s)
def flush(self):
self.settings.outfile.flush()
def _get_label_format(self, workunit):
for label, label_format in self.LABEL_FORMATTING.items():
if workunit.has_label(label):
return label_format
# Recursively look for a setting to suppress child label formatting.
if workunit.parent:
label_format = self._get_label_format(workunit.parent)
if label_format == LabelFormat.CHILD_DOT:
return LabelFormat.DOT
if label_format == LabelFormat.CHILD_SUPPRESS:
return LabelFormat.SUPPRESS
return LabelFormat.FULL
def _get_tool_output_format(self, workunit):
for label, tool_output_format in self.TOOL_OUTPUT_FORMATTING.items():
if workunit.has_label(label):
return tool_output_format
return ToolOutputFormat.SUPPRESS
def _emit_indented_workunit_label(self, workunit):
self.emit(b'\n{} {} {}[{}]'.format(
workunit.start_time_string,
workunit.start_delta_string,
self._indent(workunit),
workunit.name if self.settings.indent else workunit.path()))
# Emit output from some tools and not others.
# This is an arbitrary choice, but one that turns out to be useful to users in practice.
def _show_output(self, workunit):
tool_output_format = self._get_tool_output_format(workunit)
return not tool_output_format == ToolOutputFormat.SUPPRESS
def _format_aggregated_timings(self, aggregated_timings):
return b'\n'.join([b'{timing:.3f} {label}'.format(**x) for x in aggregated_timings.get_all()])
def _format_artifact_cache_stats(self, artifact_cache_stats):
stats = artifact_cache_stats.get_all()
return b'No artifact cache reads.' if not stats else \
b'\n'.join([b'{cache_name} - Hits: {num_hits} Misses: {num_misses}'.format(**x)
for x in stats])
def _indent(self, workunit):
return b' ' * (len(workunit.ancestors()) - 1)
_time_string_filler = b' ' * len('HH:MM:SS mm:ss ')
def _prefix(self, workunit, s):
if self.settings.indent:
def replace(x, c):
return x.replace(c, c + PlainTextReporter._time_string_filler + self._indent(workunit))
return replace(replace(s, b'\r'), b'\n')
else:
return PlainTextReporter._time_string_filler + s
| 39.580247
| 109
| 0.694843
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from collections import namedtuple
import six
from colors import cyan, green, red, yellow
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.reporting.plaintext_reporter_base import PlainTextReporterBase
from pants.reporting.report import Report
from pants.reporting.reporter import Reporter
from pants.util.memo import memoized_method
class ToolOutputFormat(object):
SUPPRESS = 'SUPPRESS'
INDENT = 'INDENT'
UNINDENTED = 'UNINDENTED'
@classmethod
@memoized_method
def keys(cls):
return [key for key in dir(cls) if not key.startswith('_') and key.isupper()]
class LabelFormat(object):
SUPPRESS = 'SUPPRESS'
DOT = 'DOT' # Just output a single '.' with no newline
FULL = 'FULL' # Show the timestamp and label
CHILD_SUPPRESS = 'CHILD_SUPPRESS' # Suppress labels for all children of this node
CHILD_DOT = 'CHILD_DOT' # Display a dot for all children of this node
@classmethod
@memoized_method
def keys(cls):
return [key for key in dir(cls) if not key.startswith('_') and key.isupper()]
class PlainTextReporter(PlainTextReporterBase):
# Console reporting settings.
# outfile: Write to this file-like object.
# color: use ANSI colors in output.
# indent: Whether to indent the reporting to reflect the nesting of workunits.
# timing: Show timing report at the end of the run.
# cache_stats: Show artifact cache report at the end of the run.
Settings = namedtuple('Settings',
Reporter.Settings._fields + ('outfile', 'color', 'indent', 'timing',
'cache_stats', 'label_format',
'tool_output_format'))
_COLOR_BY_LEVEL = {
Report.FATAL: red,
Report.ERROR: red,
Report.WARN: yellow,
Report.INFO: green,
Report.DEBUG: cyan
}
# Format the std output from these workunit types as specified. If no format is specified, the
# default is ToolOutputFormat.SUPPRESS
TOOL_OUTPUT_FORMATTING = {
WorkUnitLabel.MULTITOOL: ToolOutputFormat.SUPPRESS,
WorkUnitLabel.BOOTSTRAP: ToolOutputFormat.SUPPRESS,
WorkUnitLabel.COMPILER : ToolOutputFormat.INDENT,
WorkUnitLabel.TEST : ToolOutputFormat.INDENT,
WorkUnitLabel.REPL : ToolOutputFormat.UNINDENTED,
WorkUnitLabel.RUN : ToolOutputFormat.UNINDENTED
}
# Format the labels from these workunit types as specified. If no format is specified, the
# default is LabelFormat.FULL
LABEL_FORMATTING = {
WorkUnitLabel.MULTITOOL: LabelFormat.CHILD_DOT,
WorkUnitLabel.BOOTSTRAP: LabelFormat.CHILD_SUPPRESS,
}
def __init__(self, run_tracker, settings):
super(PlainTextReporter, self).__init__(run_tracker, settings)
for key, value in settings.label_format.items():
if key not in WorkUnitLabel.keys():
self.emit('*** Got invalid key {} for --reporting-console-label-format. Expected one of {}\n'
.format(key, WorkUnitLabel.keys()))
if value not in LabelFormat.keys():
self.emit('*** Got invalid value {} for --reporting-console-label-format. Expected one of {}\n'
.format(value, LabelFormat.keys()))
for key, value in settings.tool_output_format.items():
if key not in WorkUnitLabel.keys():
self.emit('*** Got invalid key {} for --reporting-console-tool-output-format. Expected one of {}\n'
.format(key, WorkUnitLabel.keys()))
if value not in ToolOutputFormat.keys():
self.emit('*** Got invalid value {} for --reporting-console-tool-output-format. Expected one of {}\n'
.format(value, ToolOutputFormat.keys()))
# Mix in the new settings with the defaults.
self.LABEL_FORMATTING.update(settings.label_format.items())
self.TOOL_OUTPUT_FORMATTING.update(settings.tool_output_format.items())
def open(self):
pass
def close(self):
self.emit(self.generate_epilog(self.settings))
def start_workunit(self, workunit):
if not self.is_under_main_root(workunit):
return
label_format = self._get_label_format(workunit)
if label_format == LabelFormat.FULL:
self._emit_indented_workunit_label(workunit)
# Start output on a new line.
tool_output_format = self._get_tool_output_format(workunit)
if tool_output_format == ToolOutputFormat.INDENT:
self.emit(self._prefix(workunit, b'\n'))
elif tool_output_format == ToolOutputFormat.UNINDENTED:
self.emit(b'\n')
elif label_format == LabelFormat.DOT:
self.emit(b'.')
self.flush()
def end_workunit(self, workunit):
if not self.is_under_main_root(workunit):
return
if workunit.outcome() != WorkUnit.SUCCESS and not self._show_output(workunit):
# Emit the suppressed workunit output, if any, to aid in debugging the problem.
if self._get_label_format(workunit) != LabelFormat.FULL:
self._emit_indented_workunit_label(workunit)
for name, outbuf in workunit.outputs().items():
self.emit(self._prefix(workunit, b'\n==== {} ====\n'.format(name)))
self.emit(self._prefix(workunit, outbuf.read_from(0)))
self.flush()
def do_handle_log(self, workunit, level, *msg_elements):
if not self.is_under_main_root(workunit):
return
# If the element is a (msg, detail) pair, we ignore the detail. There's no
elements = [e if isinstance(e, six.string_types) else e[0] for e in msg_elements]
msg = b'\n' + b''.join(elements)
if self.use_color_for_workunit(workunit, self.settings.color):
msg = self._COLOR_BY_LEVEL.get(level, lambda x: x)(msg)
self.emit(self._prefix(workunit, msg))
self.flush()
def handle_output(self, workunit, label, s):
if not self.is_under_main_root(workunit):
return
tool_output_format = self._get_tool_output_format(workunit)
if tool_output_format == ToolOutputFormat.INDENT:
self.emit(self._prefix(workunit, s))
elif tool_output_format == ToolOutputFormat.UNINDENTED:
self.emit(s)
self.flush()
def emit(self, s):
self.settings.outfile.write(s)
def flush(self):
self.settings.outfile.flush()
def _get_label_format(self, workunit):
for label, label_format in self.LABEL_FORMATTING.items():
if workunit.has_label(label):
return label_format
if workunit.parent:
label_format = self._get_label_format(workunit.parent)
if label_format == LabelFormat.CHILD_DOT:
return LabelFormat.DOT
if label_format == LabelFormat.CHILD_SUPPRESS:
return LabelFormat.SUPPRESS
return LabelFormat.FULL
def _get_tool_output_format(self, workunit):
for label, tool_output_format in self.TOOL_OUTPUT_FORMATTING.items():
if workunit.has_label(label):
return tool_output_format
return ToolOutputFormat.SUPPRESS
def _emit_indented_workunit_label(self, workunit):
self.emit(b'\n{} {} {}[{}]'.format(
workunit.start_time_string,
workunit.start_delta_string,
self._indent(workunit),
workunit.name if self.settings.indent else workunit.path()))
def _show_output(self, workunit):
tool_output_format = self._get_tool_output_format(workunit)
return not tool_output_format == ToolOutputFormat.SUPPRESS
def _format_aggregated_timings(self, aggregated_timings):
return b'\n'.join([b'{timing:.3f} {label}'.format(**x) for x in aggregated_timings.get_all()])
def _format_artifact_cache_stats(self, artifact_cache_stats):
stats = artifact_cache_stats.get_all()
return b'No artifact cache reads.' if not stats else \
b'\n'.join([b'{cache_name} - Hits: {num_hits} Misses: {num_misses}'.format(**x)
for x in stats])
def _indent(self, workunit):
return b' ' * (len(workunit.ancestors()) - 1)
_time_string_filler = b' ' * len('HH:MM:SS mm:ss ')
def _prefix(self, workunit, s):
if self.settings.indent:
def replace(x, c):
return x.replace(c, c + PlainTextReporter._time_string_filler + self._indent(workunit))
return replace(replace(s, b'\r'), b'\n')
else:
return PlainTextReporter._time_string_filler + s
| true
| true
|
1c4a285a311b988d2b7f90c24434d040cd54642a
| 3,886
|
py
|
Python
|
setup.py
|
herrkaefer/psycopgr
|
376e8511ac591d32533118b5006135458cb5f27f
|
[
"MIT"
] | 23
|
2017-12-01T03:47:23.000Z
|
2022-01-06T23:36:56.000Z
|
setup.py
|
herrkaefer/psycopgr
|
376e8511ac591d32533118b5006135458cb5f27f
|
[
"MIT"
] | 9
|
2017-11-10T00:33:50.000Z
|
2021-06-06T01:27:20.000Z
|
setup.py
|
herrkaefer/psycopgr
|
376e8511ac591d32533118b5006135458cb5f27f
|
[
"MIT"
] | 9
|
2017-11-09T13:38:10.000Z
|
2022-02-17T16:08:35.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Ackownledgement:
# This file is modified from https://github.com/kennethreitz/setup.py
#
# Note: To use the 'upload' functionality of this file, you must:
# $ pipenv install twine --dev
import io
import os
import sys
from shutil import rmtree
from setuptools import find_packages, setup, Command
# Package meta-data.
NAME = 'psycopgr'
DESCRIPTION = 'A Python wrapper of pgRouting for routing from nodes to nodes on real map.'
URL = 'https://github.com/herrkaefer/psycopgr'
EMAIL = 'gloolar@gmail.com'
AUTHOR = 'Yang Liu'
REQUIRES_PYTHON = '>=2.7'
VERSION = '1.0.6'
# What packages are required for this module to be executed?
REQUIRED = [
'psycopg2',
]
# What packages are optional?
EXTRAS = {
# 'fancy feature': ['django'],
}
# The rest you shouldn't have to touch too much :)
# ------------------------------------------------
# Except, perhaps the License and Trove Classifiers!
# If you do change the License, remember to change the Trove Classifier for that!
here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try:
with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = '\n' + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
# Load the package's __version__.py module as a dictionary.
about = {}
if not VERSION:
project_slug = NAME.lower().replace("-", "_").replace(" ", "_")
with open(os.path.join(here, project_slug, '__version__.py')) as f:
exec(f.read(), about)
else:
about['__version__'] = VERSION
class UploadCommand(Command):
"""Support setup.py upload."""
description = 'Build and publish the package.'
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
self.status('Building Source and Wheel (universal) distribution…')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPI via Twine…')
os.system('twine upload dist/*')
self.status('Pushing git tags…')
os.system('git tag v{0}'.format(about['__version__']))
os.system('git push --tags')
sys.exit()
# Where the magic happens:
setup(
name=NAME,
version=about['__version__'],
description=DESCRIPTION,
long_description=long_description,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]),
# If your package is a single module, use this instead of 'packages':
# py_modules=['mypackage'],
# entry_points={
# 'console_scripts': ['mycli=mymodule:cli'],
# },
install_requires=REQUIRED,
extras_require=EXTRAS,
include_package_data=True,
license='MIT',
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
],
# $ setup.py publish support.
cmdclass={
'upload': UploadCommand,
},
)
| 28.785185
| 90
| 0.643592
|
import io
import os
import sys
from shutil import rmtree
from setuptools import find_packages, setup, Command
NAME = 'psycopgr'
DESCRIPTION = 'A Python wrapper of pgRouting for routing from nodes to nodes on real map.'
URL = 'https://github.com/herrkaefer/psycopgr'
EMAIL = 'gloolar@gmail.com'
AUTHOR = 'Yang Liu'
REQUIRES_PYTHON = '>=2.7'
VERSION = '1.0.6'
REQUIRED = [
'psycopg2',
]
EXTRAS = {
}
# ------------------------------------------------
# Except, perhaps the License and Trove Classifiers!
# If you do change the License, remember to change the Trove Classifier for that!
here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try:
with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = '\n' + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
# Load the package's __version__.py module as a dictionary.
about = {}
if not VERSION:
project_slug = NAME.lower().replace("-", "_").replace(" ", "_")
with open(os.path.join(here, project_slug, '__version__.py')) as f:
exec(f.read(), about)
else:
about['__version__'] = VERSION
class UploadCommand(Command):
description = 'Build and publish the package.'
user_options = []
@staticmethod
def status(s):
print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
self.status('Building Source and Wheel (universal) distribution…')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPI via Twine…')
os.system('twine upload dist/*')
self.status('Pushing git tags…')
os.system('git tag v{0}'.format(about['__version__']))
os.system('git push --tags')
sys.exit()
setup(
name=NAME,
version=about['__version__'],
description=DESCRIPTION,
long_description=long_description,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]),
install_requires=REQUIRED,
extras_require=EXTRAS,
include_package_data=True,
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
],
cmdclass={
'upload': UploadCommand,
},
)
| true
| true
|
1c4a2fa84a2670f568ddc2052bf3b41997f3a6af
| 7,462
|
py
|
Python
|
tests/components/media_player/test_async_helpers.py
|
shanbs/home-assistant
|
818776d2b4f11e4f51992dc88bc0a6f9055833b2
|
[
"Apache-2.0"
] | 4
|
2019-01-10T14:47:54.000Z
|
2021-04-22T02:06:27.000Z
|
tests/components/media_player/test_async_helpers.py
|
shanbs/home-assistant
|
818776d2b4f11e4f51992dc88bc0a6f9055833b2
|
[
"Apache-2.0"
] | 6
|
2021-02-08T21:02:40.000Z
|
2022-03-12T00:52:16.000Z
|
tests/components/media_player/test_async_helpers.py
|
shanbs/home-assistant
|
818776d2b4f11e4f51992dc88bc0a6f9055833b2
|
[
"Apache-2.0"
] | 3
|
2018-08-29T19:26:20.000Z
|
2020-01-19T11:58:22.000Z
|
"""The tests for the Async Media player helper functions."""
import unittest
import asyncio
import homeassistant.components.media_player as mp
from homeassistant.const import (
STATE_PLAYING, STATE_PAUSED, STATE_ON, STATE_OFF, STATE_IDLE)
from homeassistant.util.async_ import run_coroutine_threadsafe
from tests.common import get_test_home_assistant
class AsyncMediaPlayer(mp.MediaPlayerDevice):
"""Async media player test class."""
def __init__(self, hass):
"""Initialize the test media player."""
self.hass = hass
self._volume = 0
self._state = STATE_OFF
@property
def state(self):
"""State of the player."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@asyncio.coroutine
def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._volume = volume
@asyncio.coroutine
def async_media_play(self):
"""Send play command."""
self._state = STATE_PLAYING
@asyncio.coroutine
def async_media_pause(self):
"""Send pause command."""
self._state = STATE_PAUSED
@asyncio.coroutine
def async_turn_on(self):
"""Turn the media player on."""
self._state = STATE_ON
@asyncio.coroutine
def async_turn_off(self):
"""Turn the media player off."""
self._state = STATE_OFF
class SyncMediaPlayer(mp.MediaPlayerDevice):
"""Sync media player test class."""
def __init__(self, hass):
"""Initialize the test media player."""
self.hass = hass
self._volume = 0
self._state = STATE_OFF
@property
def state(self):
"""State of the player."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._volume = volume
def volume_up(self):
"""Turn volume up for media player."""
if self.volume_level < 1:
self.set_volume_level(min(1, self.volume_level + .2))
def volume_down(self):
"""Turn volume down for media player."""
if self.volume_level > 0:
self.set_volume_level(max(0, self.volume_level - .2))
def media_play_pause(self):
"""Play or pause the media player."""
if self._state == STATE_PLAYING:
self._state = STATE_PAUSED
else:
self._state = STATE_PLAYING
def toggle(self):
"""Toggle the power on the media player."""
if self._state in [STATE_OFF, STATE_IDLE]:
self._state = STATE_ON
else:
self._state = STATE_OFF
@asyncio.coroutine
def async_media_play_pause(self):
"""Create a coroutine to wrap the future returned by ABC.
This allows the run_coroutine_threadsafe helper to be used.
"""
yield from super().async_media_play_pause()
@asyncio.coroutine
def async_toggle(self):
"""Create a coroutine to wrap the future returned by ABC.
This allows the run_coroutine_threadsafe helper to be used.
"""
yield from super().async_toggle()
class TestAsyncMediaPlayer(unittest.TestCase):
"""Test the media_player module."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.player = AsyncMediaPlayer(self.hass)
def tearDown(self):
"""Shut down test instance."""
self.hass.stop()
def test_volume_up(self):
"""Test the volume_up helper function."""
assert self.player.volume_level == 0
run_coroutine_threadsafe(
self.player.async_set_volume_level(0.5), self.hass.loop).result()
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_up(), self.hass.loop).result()
assert self.player.volume_level == 0.6
def test_volume_down(self):
"""Test the volume_down helper function."""
assert self.player.volume_level == 0
run_coroutine_threadsafe(
self.player.async_set_volume_level(0.5), self.hass.loop).result()
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_down(), self.hass.loop).result()
assert self.player.volume_level == 0.4
def test_media_play_pause(self):
"""Test the media_play_pause helper function."""
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PLAYING
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PAUSED
def test_toggle(self):
"""Test the toggle helper function."""
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_ON
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_OFF
class TestSyncMediaPlayer(unittest.TestCase):
"""Test the media_player module."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.player = SyncMediaPlayer(self.hass)
def tearDown(self):
"""Shut down test instance."""
self.hass.stop()
def test_volume_up(self):
"""Test the volume_up helper function."""
assert self.player.volume_level == 0
self.player.set_volume_level(0.5)
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_up(), self.hass.loop).result()
assert self.player.volume_level == 0.7
def test_volume_down(self):
"""Test the volume_down helper function."""
assert self.player.volume_level == 0
self.player.set_volume_level(0.5)
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_down(), self.hass.loop).result()
assert self.player.volume_level == 0.3
def test_media_play_pause(self):
"""Test the media_play_pause helper function."""
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PLAYING
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PAUSED
def test_toggle(self):
"""Test the toggle helper function."""
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_ON
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_OFF
| 33.3125
| 77
| 0.641785
|
import unittest
import asyncio
import homeassistant.components.media_player as mp
from homeassistant.const import (
STATE_PLAYING, STATE_PAUSED, STATE_ON, STATE_OFF, STATE_IDLE)
from homeassistant.util.async_ import run_coroutine_threadsafe
from tests.common import get_test_home_assistant
class AsyncMediaPlayer(mp.MediaPlayerDevice):
def __init__(self, hass):
self.hass = hass
self._volume = 0
self._state = STATE_OFF
@property
def state(self):
return self._state
@property
def volume_level(self):
return self._volume
@asyncio.coroutine
def async_set_volume_level(self, volume):
self._volume = volume
@asyncio.coroutine
def async_media_play(self):
self._state = STATE_PLAYING
@asyncio.coroutine
def async_media_pause(self):
self._state = STATE_PAUSED
@asyncio.coroutine
def async_turn_on(self):
self._state = STATE_ON
@asyncio.coroutine
def async_turn_off(self):
self._state = STATE_OFF
class SyncMediaPlayer(mp.MediaPlayerDevice):
def __init__(self, hass):
self.hass = hass
self._volume = 0
self._state = STATE_OFF
@property
def state(self):
return self._state
@property
def volume_level(self):
return self._volume
def set_volume_level(self, volume):
self._volume = volume
def volume_up(self):
if self.volume_level < 1:
self.set_volume_level(min(1, self.volume_level + .2))
def volume_down(self):
if self.volume_level > 0:
self.set_volume_level(max(0, self.volume_level - .2))
def media_play_pause(self):
if self._state == STATE_PLAYING:
self._state = STATE_PAUSED
else:
self._state = STATE_PLAYING
def toggle(self):
if self._state in [STATE_OFF, STATE_IDLE]:
self._state = STATE_ON
else:
self._state = STATE_OFF
@asyncio.coroutine
def async_media_play_pause(self):
yield from super().async_media_play_pause()
@asyncio.coroutine
def async_toggle(self):
yield from super().async_toggle()
class TestAsyncMediaPlayer(unittest.TestCase):
def setUp(self):
self.hass = get_test_home_assistant()
self.player = AsyncMediaPlayer(self.hass)
def tearDown(self):
self.hass.stop()
def test_volume_up(self):
assert self.player.volume_level == 0
run_coroutine_threadsafe(
self.player.async_set_volume_level(0.5), self.hass.loop).result()
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_up(), self.hass.loop).result()
assert self.player.volume_level == 0.6
def test_volume_down(self):
assert self.player.volume_level == 0
run_coroutine_threadsafe(
self.player.async_set_volume_level(0.5), self.hass.loop).result()
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_down(), self.hass.loop).result()
assert self.player.volume_level == 0.4
def test_media_play_pause(self):
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PLAYING
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PAUSED
def test_toggle(self):
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_ON
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_OFF
class TestSyncMediaPlayer(unittest.TestCase):
def setUp(self):
self.hass = get_test_home_assistant()
self.player = SyncMediaPlayer(self.hass)
def tearDown(self):
self.hass.stop()
def test_volume_up(self):
assert self.player.volume_level == 0
self.player.set_volume_level(0.5)
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_up(), self.hass.loop).result()
assert self.player.volume_level == 0.7
def test_volume_down(self):
assert self.player.volume_level == 0
self.player.set_volume_level(0.5)
assert self.player.volume_level == 0.5
run_coroutine_threadsafe(
self.player.async_volume_down(), self.hass.loop).result()
assert self.player.volume_level == 0.3
def test_media_play_pause(self):
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PLAYING
run_coroutine_threadsafe(
self.player.async_media_play_pause(), self.hass.loop).result()
assert self.player.state == STATE_PAUSED
def test_toggle(self):
assert self.player.state == STATE_OFF
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_ON
run_coroutine_threadsafe(
self.player.async_toggle(), self.hass.loop).result()
assert self.player.state == STATE_OFF
| true
| true
|
1c4a2fb0bc1d5a5a20808b69120dab5af1370e7c
| 1,850
|
py
|
Python
|
main.py
|
qianqianjun/DCGAN
|
4e2d37f1d785e592e59334b91d197ef0475c1c99
|
[
"MIT"
] | null | null | null |
main.py
|
qianqianjun/DCGAN
|
4e2d37f1d785e592e59334b91d197ef0475c1c99
|
[
"MIT"
] | null | null | null |
main.py
|
qianqianjun/DCGAN
|
4e2d37f1d785e592e59334b91d197ef0475c1c99
|
[
"MIT"
] | null | null | null |
"""
write by qianqianjun
2019.12.20
运行GAN进行训练的入口文件。
"""
import os
import tensorflow as tf
from train_argparse import hps
from dataset_loader import train_images
from data_provider import MnistData
from DCGAN import DCGAN
from utils import combine_imgs
# 创建生成结果目录
output_dir='./out'
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# 创建DCGAN
dcgan=DCGAN(hps)
# 加载Mnist 数据集
mnist_data=MnistData(train_images,hps.z_dim,hps.img_size)
# 构建计算图模型
z_placeholder,img_placeholder,generated_imgs,losses=dcgan.build()
# 构建训练过程模型
train_op=dcgan.build_train_op(losses,hps.learning_rate,hps.beta1)
# 开始进行训练~ :
init_op=tf.global_variables_initializer()
# 定义训练多少步
train_steps=hps.train_steps
with tf.Session() as sess:
sess.run(init_op)
for step in range(train_steps):
batch_imgs,batch_z=mnist_data.next_batch(hps.batch_size)
fetches=[train_op,losses['g'],losses['d']]
should_sample=(step+1) %100 ==0
# 如果到了该保存中间结果的步骤,则run 的时候在 fetches 中加上生成的图像
if should_sample:
fetches+= [generated_imgs]
output_values=sess.run(
fetches,feed_dict={
z_placeholder:batch_z,
img_placeholder:batch_imgs,
}
)
_,g_loss_val,d_loss_val=output_values[0:3]
# 打印训练过程的损失情况
if (step+1) %200==0:
print('step: %4d , g_loss: %4.3f , d_loss: %4.3f' % (step, g_loss_val, d_loss_val))
# 保存中间过程图片结果:
if should_sample:
gen_imgs_val=output_values[3]
gen_img_path=os.path.join(output_dir,'%05d-gen.jpg' % (step+1))
gt_img_path=os.path.join(output_dir,'%05d-gt.jpg' % (step+1))
gen_img=combine_imgs(gen_imgs_val,hps.img_size)
gt_img=combine_imgs(batch_imgs,hps.img_size)
gen_img.save(gen_img_path)
gt_img.save(gt_img_path)
| 30.327869
| 95
| 0.675676
|
import os
import tensorflow as tf
from train_argparse import hps
from dataset_loader import train_images
from data_provider import MnistData
from DCGAN import DCGAN
from utils import combine_imgs
output_dir='./out'
if not os.path.exists(output_dir):
os.mkdir(output_dir)
dcgan=DCGAN(hps)
mnist_data=MnistData(train_images,hps.z_dim,hps.img_size)
z_placeholder,img_placeholder,generated_imgs,losses=dcgan.build()
train_op=dcgan.build_train_op(losses,hps.learning_rate,hps.beta1)
init_op=tf.global_variables_initializer()
train_steps=hps.train_steps
with tf.Session() as sess:
sess.run(init_op)
for step in range(train_steps):
batch_imgs,batch_z=mnist_data.next_batch(hps.batch_size)
fetches=[train_op,losses['g'],losses['d']]
should_sample=(step+1) %100 ==0
if should_sample:
fetches+= [generated_imgs]
output_values=sess.run(
fetches,feed_dict={
z_placeholder:batch_z,
img_placeholder:batch_imgs,
}
)
_,g_loss_val,d_loss_val=output_values[0:3]
if (step+1) %200==0:
print('step: %4d , g_loss: %4.3f , d_loss: %4.3f' % (step, g_loss_val, d_loss_val))
if should_sample:
gen_imgs_val=output_values[3]
gen_img_path=os.path.join(output_dir,'%05d-gen.jpg' % (step+1))
gt_img_path=os.path.join(output_dir,'%05d-gt.jpg' % (step+1))
gen_img=combine_imgs(gen_imgs_val,hps.img_size)
gt_img=combine_imgs(batch_imgs,hps.img_size)
gen_img.save(gen_img_path)
gt_img.save(gt_img_path)
| true
| true
|
1c4a30464aa70d1e27bbeb964001a464d4795d36
| 11,724
|
py
|
Python
|
shellbot/listener.py
|
bernard357/shellbot
|
daf64fbab4085d1591bf9a1aecd06b4fc615d132
|
[
"Apache-2.0"
] | 11
|
2017-04-30T18:10:27.000Z
|
2021-11-07T16:59:29.000Z
|
shellbot/listener.py
|
DataCraft-AI/shellbot
|
daf64fbab4085d1591bf9a1aecd06b4fc615d132
|
[
"Apache-2.0"
] | 38
|
2017-04-20T17:33:05.000Z
|
2017-11-10T20:19:07.000Z
|
shellbot/listener.py
|
DataCraft-AI/shellbot
|
daf64fbab4085d1591bf9a1aecd06b4fc615d132
|
[
"Apache-2.0"
] | 3
|
2017-04-21T21:14:53.000Z
|
2021-07-27T22:01:21.000Z
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from multiprocessing import Process
import random
from six import string_types
import time
import yaml
from .events import Event, Message, Join, Leave
class Listener(Process):
"""
Handles messages received from chat space
"""
DEFER_DURATION = 2.0 # let SSL stabilize before pumping from the queue
EMPTY_DELAY = 0.005 # time to wait if queue is empty
FRESH_DURATION = 0.5 # maximum amount of time for listener detection
def __init__(self, engine=None, filter=None):
"""
Handles events received from chat spaces
:param engine: the overarching engine
:type engine: Engine
:param filter: if provided, used to filter every event
:type filter: callable
If a ``filter`` is provided, then it is called for each event received.
An event may be a Message, a Join or Leave notification,
or any other Event.
Example::
def filter(event):
# duplicate input stream
my_queue.put(str(event))
# change input stream
event.text = event.text.title()
return event
listener = Listener(filter=filter)
"""
Process.__init__(self)
self.engine = engine
self.filter = filter
def run(self):
"""
Continuously receives updates
This function is looping on items received from the queue, and
is handling them one by one in the background.
Processing should be handled in a separate background process, like
in the following example::
listener = Listener(engine=my_engine)
process = listener.start()
The recommended way for stopping the process is to change the
parameter ``general.switch`` in the context. For example::
engine.set('general.switch', 'off')
Alternatively, the loop is also broken when a poison pill is pushed
to the queue. For example::
engine.ears.put(None)
"""
logging.info(u"Starting listener")
time.sleep(self.DEFER_DURATION) # let SSL stabilize first
try:
self.engine.set('listener.counter', 0)
while self.engine.get('general.switch', 'on') == 'on':
if self.engine.get('listener.lock', 'off') == 'on':
time.sleep(0.001)
continue
if self.engine.ears.empty():
self.idle()
time.sleep(self.EMPTY_DELAY)
continue
try:
item = self.engine.ears.get_nowait()
if item is None:
break
self.process(item)
except Exception as feedback:
logging.exception(feedback)
except KeyboardInterrupt:
pass
logging.info(u"Listener has been stopped")
def idle(self):
"""
Finds something smart to do
"""
if self.engine.bots_to_load:
id = self.engine.bots_to_load.pop()
self.engine.ears.put({'type': 'load_bot', 'id': id})
elif not self.engine.get('vacuum.stamp'):
self.engine.set('vacuum.stamp', time.time())
elif time.time() - self.engine.get('vacuum.stamp') > 5.0:
self.engine.set('vacuum.stamp', time.time())
if self.engine.bots:
id = random.choice(list(self.engine.bots.keys()))
# if id:
# logging.debug(u"- to vacuum: {}".format(id))
# self.engine.vacuum.put()
def process(self, item):
"""
Processes items received from the chat space
:param item: the item received
:type item: dict or json-encoded string
This function dispatches items based on their type. The type is
a key of the provided dict.
Following types are handled:
* ``message`` -- This is a textual message, maybe with a file attached.
The message is given to the ``on_message()`` function.
* ``join`` -- This is when a person or the bot joins a space.
The function ``on_join()`` is called, providing details on the
person or the bot who joined
* ``leave`` -- This is when a person or the bot leaves a space.
The function ``on_leave()`` is called with details on the
leaving person or bot.
* ``load_bot`` -- This is a special event to load the cache in the
process that is running the listener. The identifier of the channel
to load is provided as well.
* on any other case, the function ``on_inbound()`` is
called.
"""
counter = self.engine.context.increment('listener.counter')
logging.debug(u'Listener is working on {}'.format(counter))
if isinstance(item, string_types):
item = yaml.safe_load(item) # better unicode than json.loads()
assert isinstance(item, dict) # low-level event representation
if item['type'] == 'message':
logging.debug(u"- processing a 'message' event")
event = Message(item)
if self.filter:
event = self.filter(event)
self.on_message(event)
elif item['type'] == 'join':
logging.debug(u"- processing a 'join' event")
event = Join(item)
if self.filter:
event = self.filter(event)
self.on_join(event)
elif item['type'] == 'leave':
logging.debug(u"- processing a 'leave' event")
event = Leave(item)
if self.filter:
event = self.filter(event)
self.on_leave(event)
elif item['type'] == 'load_bot':
logging.debug(u"- processing a 'load_bot' event")
bot = self.engine.get_bot(channel_id=item['id'])
else:
logging.debug(u"- processing an inbound event")
event = Event(item)
if self.filter:
event = self.filter(event)
self.on_inbound(event)
def on_message(self, received):
"""
A message has been received
:param received: the message received
:type received: Message
Received information is transmitted to registered callbacks on the
``message`` event at the engine level.
When a message is directed to the bot it is submitted directly to the
shell. This is handled as a command, that can be executed immediately,
or pushed to the inbox and processed by the worker when possible.
All other input is thrown away, except if there is some
downwards listeners. In that situation the input is pushed to a queue
so that some process can pick it up and process it.
The protocol for downwards listeners works like this:
* Check the ``bot.fan`` queue frequently
* On each check, update the string ``fan.<channel_id>`` in the context
with the value of ``time.time()``. This will say that you are around.
The value of ``fan.<channel_id>`` is checked on every message that is not
for the bot itself. If this is fresh enough, then data is put to the
``bot.fan`` queue. Else message is just thrown away.
"""
assert received.type == 'message' # sanity check
self.engine.dispatch('message', received=received)
bot = self.engine.get_bot(received.channel_id)
if received.from_id == self.engine.get('bot.id'):
logging.debug(u"- sent by me, thrown away")
return
input = received.text
if input is None:
logging.debug(u"- no input in this item, thrown away")
return
if len(input) > 0 and input[0] in ['@', '/', '!']:
input = input[1:]
label = 'fan.' + received.channel_id
logging.debug(u"- sensing fan listener on '{}'".format(label))
elapsed = time.time() - self.engine.get(label, 0)
if elapsed < self.FRESH_DURATION:
logging.debug(u"- putting input to fan queue")
bot.fan.put(input) # forward downstream
return
name = self.engine.get('bot.name', 'shelly')
if input.startswith(name):
logging.debug(u"- bot name in command")
input = input[len(name):].strip()
elif received.is_direct:
logging.debug(u"- direct message")
elif self.engine.get('bot.id') in received.mentioned_ids:
logging.debug(u"- bot mentioned in command")
else:
logging.info(u"- not for me, thrown away")
return
logging.debug(u"- submitting command to the shell")
self.engine.shell.do(input, received=received)
def on_join(self, received):
"""
A person, or the bot, has joined a space
:param received: the event received
:type received: Join
Received information is transmitted to registered callbacks on the
``join`` at the engine level.
In the special case where the bot itself is joining a channel by
invitation, then the event ``enter`` is dispatched instead.
"""
assert received.type == 'join'
if received.actor_id == self.engine.get('bot.id'):
self.engine.dispatch('enter', received=received)
bot = self.engine.get_bot(received.channel_id)
self.engine.on_enter(received)
else:
bot = self.engine.get_bot(received.channel_id)
self.engine.dispatch('join', received=received)
def on_leave(self, received):
"""
A person, or the bot, has left a space
:param received: the event received
:type received: Leave
Received information is transmitted to registered callbacks on the
``leave`` at the engine level.
In the special case where the bot itself has been kicked off
from a channel, then the event ``exit`` is dispatched instead.
"""
assert received.type == 'leave'
if received.actor_id == self.engine.get('bot.id'):
self.engine.dispatch('exit', received=received)
self.engine.on_exit(received)
else:
self.engine.dispatch('leave', received=received)
def on_inbound(self, received):
"""
Another event has been received
:param received: the event received
:type received: Event or derivative
Received information is transmitted to registered callbacks on the
``inbound`` at the engine level.
"""
assert received.type not in ('message', 'join', 'leave')
self.engine.dispatch('inbound', received=received)
| 33.212465
| 81
| 0.598601
|
import json
import logging
from multiprocessing import Process
import random
from six import string_types
import time
import yaml
from .events import Event, Message, Join, Leave
class Listener(Process):
DEFER_DURATION = 2.0
EMPTY_DELAY = 0.005
FRESH_DURATION = 0.5
def __init__(self, engine=None, filter=None):
Process.__init__(self)
self.engine = engine
self.filter = filter
def run(self):
logging.info(u"Starting listener")
time.sleep(self.DEFER_DURATION)
try:
self.engine.set('listener.counter', 0)
while self.engine.get('general.switch', 'on') == 'on':
if self.engine.get('listener.lock', 'off') == 'on':
time.sleep(0.001)
continue
if self.engine.ears.empty():
self.idle()
time.sleep(self.EMPTY_DELAY)
continue
try:
item = self.engine.ears.get_nowait()
if item is None:
break
self.process(item)
except Exception as feedback:
logging.exception(feedback)
except KeyboardInterrupt:
pass
logging.info(u"Listener has been stopped")
def idle(self):
if self.engine.bots_to_load:
id = self.engine.bots_to_load.pop()
self.engine.ears.put({'type': 'load_bot', 'id': id})
elif not self.engine.get('vacuum.stamp'):
self.engine.set('vacuum.stamp', time.time())
elif time.time() - self.engine.get('vacuum.stamp') > 5.0:
self.engine.set('vacuum.stamp', time.time())
if self.engine.bots:
id = random.choice(list(self.engine.bots.keys()))
def process(self, item):
counter = self.engine.context.increment('listener.counter')
logging.debug(u'Listener is working on {}'.format(counter))
if isinstance(item, string_types):
item = yaml.safe_load(item)
assert isinstance(item, dict)
if item['type'] == 'message':
logging.debug(u"- processing a 'message' event")
event = Message(item)
if self.filter:
event = self.filter(event)
self.on_message(event)
elif item['type'] == 'join':
logging.debug(u"- processing a 'join' event")
event = Join(item)
if self.filter:
event = self.filter(event)
self.on_join(event)
elif item['type'] == 'leave':
logging.debug(u"- processing a 'leave' event")
event = Leave(item)
if self.filter:
event = self.filter(event)
self.on_leave(event)
elif item['type'] == 'load_bot':
logging.debug(u"- processing a 'load_bot' event")
bot = self.engine.get_bot(channel_id=item['id'])
else:
logging.debug(u"- processing an inbound event")
event = Event(item)
if self.filter:
event = self.filter(event)
self.on_inbound(event)
def on_message(self, received):
assert received.type == 'message'
self.engine.dispatch('message', received=received)
bot = self.engine.get_bot(received.channel_id)
if received.from_id == self.engine.get('bot.id'):
logging.debug(u"- sent by me, thrown away")
return
input = received.text
if input is None:
logging.debug(u"- no input in this item, thrown away")
return
if len(input) > 0 and input[0] in ['@', '/', '!']:
input = input[1:]
label = 'fan.' + received.channel_id
logging.debug(u"- sensing fan listener on '{}'".format(label))
elapsed = time.time() - self.engine.get(label, 0)
if elapsed < self.FRESH_DURATION:
logging.debug(u"- putting input to fan queue")
bot.fan.put(input)
return
name = self.engine.get('bot.name', 'shelly')
if input.startswith(name):
logging.debug(u"- bot name in command")
input = input[len(name):].strip()
elif received.is_direct:
logging.debug(u"- direct message")
elif self.engine.get('bot.id') in received.mentioned_ids:
logging.debug(u"- bot mentioned in command")
else:
logging.info(u"- not for me, thrown away")
return
logging.debug(u"- submitting command to the shell")
self.engine.shell.do(input, received=received)
def on_join(self, received):
assert received.type == 'join'
if received.actor_id == self.engine.get('bot.id'):
self.engine.dispatch('enter', received=received)
bot = self.engine.get_bot(received.channel_id)
self.engine.on_enter(received)
else:
bot = self.engine.get_bot(received.channel_id)
self.engine.dispatch('join', received=received)
def on_leave(self, received):
assert received.type == 'leave'
if received.actor_id == self.engine.get('bot.id'):
self.engine.dispatch('exit', received=received)
self.engine.on_exit(received)
else:
self.engine.dispatch('leave', received=received)
def on_inbound(self, received):
assert received.type not in ('message', 'join', 'leave')
self.engine.dispatch('inbound', received=received)
| true
| true
|
1c4a30519feee2e48d05593d11019103cfebb776
| 469
|
py
|
Python
|
src/Edge.py
|
ZacharyJohnson1/python-graph-theory
|
885096f9066e22b477d0c58d9cec5db2a62571c1
|
[
"MIT"
] | null | null | null |
src/Edge.py
|
ZacharyJohnson1/python-graph-theory
|
885096f9066e22b477d0c58d9cec5db2a62571c1
|
[
"MIT"
] | null | null | null |
src/Edge.py
|
ZacharyJohnson1/python-graph-theory
|
885096f9066e22b477d0c58d9cec5db2a62571c1
|
[
"MIT"
] | null | null | null |
class Edge:
def __init__(self, u, v, w):
self.u = u
self.v = v
self.w = w
def __eq__(self, edge):
if self.u.id == edge.u.id and self.v.id == edge.v.id:
return True
return False
def __ge__(self, v):
return True if self.w > v.w else False
def __lt__(self, v):
return True if self.w < v.w else False
@staticmethod
def weight(edge):
return edge.w
| 15.129032
| 61
| 0.503198
|
class Edge:
def __init__(self, u, v, w):
self.u = u
self.v = v
self.w = w
def __eq__(self, edge):
if self.u.id == edge.u.id and self.v.id == edge.v.id:
return True
return False
def __ge__(self, v):
return True if self.w > v.w else False
def __lt__(self, v):
return True if self.w < v.w else False
@staticmethod
def weight(edge):
return edge.w
| true
| true
|
1c4a3097d9a65dce695cc13d39c358f32e52ebdc
| 81
|
py
|
Python
|
abc/161/A.py
|
tonko2/AtCoder
|
5d617072517881d226d7c8af09cb88684d41af7e
|
[
"Xnet",
"X11",
"CECILL-B"
] | 2
|
2022-01-22T07:56:58.000Z
|
2022-01-24T00:29:37.000Z
|
abc/161/A.py
|
tonko2/AtCoder
|
5d617072517881d226d7c8af09cb88684d41af7e
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
abc/161/A.py
|
tonko2/AtCoder
|
5d617072517881d226d7c8af09cb88684d41af7e
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
X, Y, Z = map(int, input().split())
X, Y = Y, X
X, Z = Z, X
print(f'{X} {Y} {Z}')
| 20.25
| 35
| 0.444444
|
X, Y, Z = map(int, input().split())
X, Y = Y, X
X, Z = Z, X
print(f'{X} {Y} {Z}')
| true
| true
|
1c4a30bb90b9f5879da24bf6517fa49b6226eb8b
| 73,574
|
py
|
Python
|
user_agent_parser/constants.py
|
Purushot14/user-agent-parser
|
5b6a3cb0d024ce1a2822a617e5a18b45806f8d1b
|
[
"MIT"
] | null | null | null |
user_agent_parser/constants.py
|
Purushot14/user-agent-parser
|
5b6a3cb0d024ce1a2822a617e5a18b45806f8d1b
|
[
"MIT"
] | null | null | null |
user_agent_parser/constants.py
|
Purushot14/user-agent-parser
|
5b6a3cb0d024ce1a2822a617e5a18b45806f8d1b
|
[
"MIT"
] | null | null | null |
"""
Created by prakash at 02/03/22
"""
__author__ = 'Prakash14'
class OS:
WINDOWS = "Windows"
WINDOWS_PHONE = "Windows Phone"
ANDROID = "Android"
MAC_OS = "Mac Os"
LINUX = "Linux"
IOS = "iOS"
CHROME_OS = "Chrome OS"
class DEVICE_TYPE:
COMPUTER = "Computer"
MOBILE = "Mobile"
SERVER = "Server"
BOT = "Bot"
class DEVICE_NAME:
IPHONE = "iPhone"
IPAD = "iPad"
MAC = "Mac"
CHROME_BOOK = "Chrome Book"
ANDROID = "Android Phone"
MOBILE_DEVICE_CODE_NAME = {
# OnePlus Devices
"AC2003": "OnePlus Nord 5G",
"EB2101": "OnePlus Nord CE 5G",
"EB2103": "OnePlus Nord CE 5G",
"DN2101": "OnePlus Nord 5G",
"DN2103": "OnePlus Nord 5G",
"AC2001": "OnePlus Nord",
"GM1901": "OnePlus 7",
"A6000": "OnePlus 6",
"A6010": "OnePlus 6T",
"A6003": "OnePlus 6",
"A5010": "OnePlus 5T",
"A5000": "OnePlus 5",
"LE2101": "OnePlus 9R",
"LE2100": "OnePlus 9R",
"LE2113": "OnePlus 9",
"LE2111": "OnePlus 9",
"LE2110": "OnePlus 9",
"LE2117": "OnePlus 9",
"LE2121": "OnePlus 9 Pro",
"LE2125": "OnePlus 9 Pro",
"LE2123": "OnePlus 9 Pro",
"LE2120": "OnePlus 9 Pro",
"LE2127": "OnePlus 9 Pro",
"GM1911": "OnePlus 7 Pro",
"GM1913": "OnePlus 7 Pro",
"GM1917": "OnePlus 7 Pro",
"GM1910": "OnePlus 7 Pro",
"GM1915": "OnePlus 7 Pro",
"HD1901": "OnePlus 7T",
"HD1903": "OnePlus 7T",
"HD1900": "OnePlus 7T",
"HD1907": "OnePlus 7T",
"HD1905": "OnePlus 7T",
"HD1911": "OnePlus 7T",
"KB2001": "OnePlus 8T",
"KB2000": "OnePlus 8T",
"KB2003": "OnePlus 8T",
"KB2005": "OnePlus 8T",
"IN2013": "OnePlus 8",
"IN2017": "OnePlus 8",
"IN2019": "OnePlus 8",
"IN2010": "OnePlus 8",
"IN2011": "OnePlus 8",
"IN2021": "OnePlus 8 Pro",
"IN2023": "OnePlus 8 Pro",
"IN2020": "OnePlus 8 Pro",
"IN2025": "OnePlus 8 Pro",
# Samsung Devices
"SM-X900": "Samsung Galaxy Tab S8 Ultra",
"SM-X906": "Samsung Galaxy Tab S8 Ultra",
"SM-X800": "Samsung Galaxy Tab S8+",
"SM-X806": "Samsung Galaxy Tab S8+",
"SM-X700": "Samsung Galaxy Tab S8",
"SM-X706": "Samsung Galaxy Tab S8",
"SM-S908": "Samsung Galaxy S22 Ultra",
"SM-S906": "Samsung Galaxy S22+",
"SM-S901": "Samsung Galaxy S22",
"SM-G990": "Samsung Galaxy S21 FE",
"SM-A136": "Samsung Galaxy A13 5G",
"SM-X200": "Samsung Galaxy Tab A8 10.5",
"SM-X205": "Samsung Galaxy Tab A8 10.5",
"SM-A032": "Samsung Galaxy A03 Core",
"SM-E426": "Samsung Galaxy F42 5G",
"SM-M526": "Samsung Galaxy M52 5G",
"SM-M225": "Samsung Galaxy M22",
"SM-M326": "Samsung Galaxy M32 5G",
"SM-A037": "Samsung Galaxy A03s",
"SM-A528": "Samsung Galaxy A52s 5G",
"SM-F926B": "Samsung Galaxy Z Fold3 5G",
"SM-F711B": "Samsung Galaxy Z Flip3 5G",
"SM-E225": "Samsung Galaxy F22",
"SM-M325": "Samsung Galaxy M32",
"SM-A226": "Samsung Galaxy A22 5G",
"SM-A225": "Samsung Galaxy A22",
"SM-T730": "Samsung Galaxy Tab S7 FE",
"SM-T736B": "Samsung Galaxy Tab S7 FE",
"SM-T220": "Samsung Galaxy Tab A7 Lite",
"SM-T225": "Samsung Galaxy Tab A7 Lite",
"SM-E526": "Samsung Galaxy F52 5G",
"SM-M426": "Samsung Galaxy M42 5G",
"SM-E025": "Samsung Galaxy F02s",
"SM-F127": "Samsung Galaxy F12",
"SM-A725": "Samsung Galaxy A72",
"SM-A526": "Samsung Galaxy A52 5G",
"SM-A525": "Samsung Galaxy A52",
"SM-A325": "Samsung Galaxy A32",
"SM-M625": "Samsung Galaxy M62",
"SM-E625": "Samsung Galaxy F62",
"SM-M127": "Samsung Galaxy M12",
"SM-M022": "Samsung Galaxy M02",
"SM-A022": "Samsung Galaxy A02",
"SM-G991": "Samsung Galaxy S21",
"SM-G996": "Samsung Galaxy S21+",
"SM-G998": "Samsung Galaxy S21 Ultra",
"SM-A326": "Samsung Galaxy A32 5G",
"SM-M025": "Samsung Galaxy M02s",
"SM-A025": "Samsung Galaxy A02s",
"SM-A125": "Samsung Galaxy A12",
"SM-M217": "Samsung Galaxy M21s",
"SM-A426": "Samsung Galaxy A42 5G",
"SM-F415": "Samsung Galaxy F41",
"SM-G780": "Samsung Galaxy S20 FE",
"SM-G781": "Samsung Galaxy S20 FE",
"SM-F916B": "Samsung Galaxy Z Fold2 5G",
"SM-M515": "Samsung Galaxy M51",
"SM-N980": "Samsung Galaxy Note 20",
"SM-N981": "Samsung Galaxy Note 20",
"SM-N985": "Samsung Galaxy Note 20 Ultra",
"SM-N986": "Samsung Galaxy Note 20 Ultra",
"SM-F707": "Samsung Galaxy Z Flip 5G",
"SM-T870": "Samsung Galaxy Tab S7",
"SM-T875": "Samsung Galaxy Tab S7",
"SM-T876B": "Samsung Galaxy Tab S7",
"SM-T970": "Samsung Galaxy Tab S7+",
"SM-T976B": "Samsung Galaxy Tab S7+",
"SM-M317": "Samsung Galaxy M31s",
"SM-A013": "Samsung Galaxy A01 Core",
"SM-M017": "Samsung Galaxy M01s",
"SM-M015": "Samsung Galaxy M01",
"SM-A217": "Samsung Galaxy A21s",
"SM-A716F": "Samsung Galaxy A71 5G",
"SM-A516F": "Samsung Galaxy A51 5G",
"SM-A215": "Samsung Galaxy A21",
"SM-P610N": "Samsung Galaxy Tab S6 Lite",
"SM-P615": "Samsung Galaxy Tab S6 Lite",
"SM-G980": "Samsung Galaxy S20",
"SM-G981": "Samsung Galaxy S20",
"SM-G985": "Samsung Galaxy S20+",
"SM-G986": "Samsung Galaxy S20+",
"SM-G988": "Samsung Galaxy S20 Ultra",
"SM-M115": "Samsung Galaxy M11",
"SM-M115F": "Samsung Galaxy M11",
"SM-A315": "Samsung Galaxy A31",
"SM-A415": "Samsung Galaxy A41",
"SM-M215": "Samsung Galaxy M21",
"SM-A115": "Samsung Galaxy A11",
"SM-M315": "Samsung Galaxy M31",
"SM-F700": "Samsung Galaxy Z Flip",
"SM-T866N": "Samsung Galaxy Tab S6 5G",
"SM-G715F": "Samsung Galaxy Xcover Pro",
"SM-N770F": "Samsung Galaxy Note 10 Lite",
"SM-G770F": "Samsung Galaxy S10 Lite",
"SM-A015": "Samsung Galaxy A01",
"SM-A715": "Samsung Galaxy A71",
"SM-A515": "Samsung Galaxy A51",
"SM-M307": "Samsung Galaxy M30s",
"SM-A207": "Samsung Galaxy A20s",
"SM-M107": "Samsung Galaxy M10s",
"SM-A707": "Samsung Galaxy A70s",
"SM-A507": "Samsung Galaxy A50s",
"SM-A307": "Samsung Galaxy A30s",
"SM-A908": "Samsung Galaxy A90 5G",
"SM-F900": "Samsung Galaxy Z Fold",
"SM-F907": "Samsung Galaxy Z Fold",
"SM-A107": "Samsung Galaxy A10s",
"SM-A102": "Samsung Galaxy A10e",
"SM-N970": "Samsung Galaxy Note 10",
"SM-N971": "Samsung Galaxy Note 10",
"SM-N975F": "Samsung Galaxy Note 10+",
"SM-N976": "Samsung Galaxy Note 10+",
"SM-M405": "Samsung Galaxy M40",
"SM-G977": "Samsung Galaxy S10 5G",
"SM-T920": "Samsung Galaxy View 2",
"SM-T927": "Samsung Galaxy View 2",
"SM-T927A": "Samsung Galaxy View 2",
"SM-A606": "Samsung Galaxy A60",
"SM-A805": "Samsung Galaxy A80",
"SM-A705": "Samsung Galaxy A70",
"SM-A405": "Samsung Galaxy A40",
"SM-A205": "Samsung Galaxy A20",
"SM-A202": "Samsung Galaxy A20e",
"SM-A260": "Samsung Galaxy A2 Core",
"SM-G975": "Samsung Galaxy S10+",
"SM-G973": "Samsung Galaxy S10",
"SM-G970": "Samsung Galaxy S10e",
"SM-A505": "Samsung Galaxy A50",
"SM-A305": "Samsung Galaxy A30",
"SM-A105": "Samsung Galaxy A10",
"SM-T720": "Samsung Galaxy Tab S5e",
"SM-T725": "Samsung Galaxy Tab S5e",
"SM-T510": "Samsung Galaxy Tab A 10.1 (2019)",
"SM-T515": "Samsung Galaxy Tab A 10.1 (2019)",
"SM-M305": "Samsung Galaxy M30",
"SM-M105": "Samsung Galaxy M10",
"SM-M205": "Samsung Galaxy M20",
"SM-G887": "Samsung Galaxy A8s",
"SM-G6200": "Samsung Galaxy A6s",
"SM-A920": "Samsung Galaxy A9 (2018)",
"SM-A750": "Samsung Galaxy A7 (2018)",
"SM-J415": "Samsung Galaxy J4+",
"SM-J610": "Samsung Galaxy J6+",
"SM-N960": "Samsung Galaxy Note 9",
"SM-T590": "Samsung Galaxy Tab A 10.5 (2018)",
"; SM-T595": "Samsung Galaxy Tab A 10.5 (2018)",
"SM-T830": "Samsung Galaxy Tab S4",
"; SM-T835": "Samsung Galaxy Tab S4",
"SM-J800": "Samsung Galaxy J8 (2018)",
"SM-J600G": "Samsung Galaxy On6",
"SM-G8850": "Samsung Galaxy A8 Star[16]",
"SM-J737": "Samsung Galaxy J7 (2018)",
"SM-A600": "Samsung Galaxy A6 (2018)",
"SM-A605": "Samsung Galaxy A6+ (2018)",
"SM-J400": "Samsung Galaxy J4 (2018)",
"SM-J600": "Samsung Galaxy J6 (2018)",
"SM-J720": "Samsung Galaxy J3 Duo",
"SM-G611": "Samsung Galaxy J4 Prime 2 Samsung Galaxy J7 (2018)",
"SM-G960": "Samsung Galaxy S9",
"SM-G965": "Samsung Galaxy S9+",
"SM-J250": "Samsung Galaxy J4 Pro (2018)",
"SM-A530": "Samsung Galaxy A5 (2018)[17]",
"SM-A730": "Samsung Galaxy A5+ (2018)",
"SM-J200G": "Samsung Galaxy J2",
"SM-T380": "Samsung Galaxy Tab A 8.0 (2017)",
"SM-T385": "Samsung Galaxy Tab A 8.0 (2017)",
"SM-C710": "Samsung Galaxy C8 / C7 (2017)Samsung Galaxy J7+",
"SM-C8000": "Samsung Galaxy C8 / C7 (2017)Samsung Galaxy J7+",
"SM-N950": "Samsung Galaxy Note 8",
"SM-G892": "Samsung Galaxy S8 Active",
"SM-N935": "Samsung Galaxy Note Fan Edition (FE)",
"SM-J727": "Samsung Galaxy J3 (2017)",
"SM-J730x": "Samsung Galaxy J3 (2017)",
"SM-J530": "Samsung Galaxy J5 (2017)",
"SM-J530Y": "Samsung Galaxy J5 (2017)",
"SM-J327": "Samsung Galaxy J7 (2017)",
"SM-J330x": "Samsung Galaxy J7 (2017)",
"SM-J730": "Samsung Galaxy J7 Pro (2017)",
"SM-G615": "Samsung Galaxy J7 Max",
"SM-G390": "Samsung Galaxy Xcover 4",
"SM-G950": "Samsung Galaxy S8",
"SM-G955": "Samsung Galaxy S8+",
"SM-C5010": "Samsung Galaxy C5 Pro",
"SM-T820": "Samsung Galaxy Tab S3",
"SM-T825": "Samsung Galaxy Tab S3",
"SM-A720": "Samsung Galaxy A7 (2017)",
"SM-A520": "Samsung Galaxy A5 (2017)",
"SM-A320": "Samsung Galaxy A3 (2017)",
"SM-C7010": "Samsung Galaxy C5 Pro",
"SM-J106F": "Samsung Galaxy J1 mini Prime/Galaxy V2 (Indonesia)",
"SM-G532F": "Samsung Galaxy J2 Prime",
"SM-G532M": "Samsung Galaxy J2 Prime",
"SM-G532G": "Samsung Galaxy J2 Prime",
"SM-C900F": "Samsung Galaxy C9 Pro",
"SM-A810": "Samsung Galaxy A8 (2016)",
"SM-G570": "Samsung Galaxy On5 (2016)",
"SM-G610": "Samsung Galaxy On5 (2016)",
"SM-J710": "Samsung Galaxy On7 (2016)",
"SM-G610F": "Samsung Galaxy J7 Prime",
"SM-G610M": "Samsung Galaxy J7 Prime",
"SM-N930": "Samsung Galaxy Note 7",
"SM-G570F": "Samsung Galaxy J2 Prime",
"SM-G570M": "Samsung Galaxy J2 Prime",
"SM-G891A": "Samsung Galaxy S7 Active",
"SM-J310F": "Samsung Galaxy J3 Pro",
"SM-T585": "Samsung Galaxy Tab A 10.1 (2016)",
"SM-C5000": "Samsung Galaxy C5",
"SM-C7000": "Samsung Galaxy C7",
"SM-J5109": "Samsung Galaxy J5 (2016)",
"SM-J510F": "Samsung Galaxy J5 (2016)",
"SM-J510FN": "Samsung Galaxy J5 (2016)",
"SM-J510H": "Samsung Galaxy J5 (2016)",
"SM-J510G": "Samsung Galaxy J5 (2016)",
"SM-J510MN": "Samsung Galaxy J5 (2016)",
"SM-J510Y": "Samsung Galaxy J5 (2016)",
"SM-J5108": "Samsung Galaxy J5 (2016)",
"SM-J510K": "Samsung Galaxy J5 (2016)",
"SM-J510L": "Samsung Galaxy J5 (2016)",
"SM-J510S": "Samsung Galaxy J5 (2016)",
"SM-J510UN": "Samsung Galaxy J5 (2016)",
"SM-J7109": "Samsung Galaxy J7 (2016)",
"SM-J710F": "Samsung Galaxy J7 (2016)",
"SM-J710FN": "Samsung Galaxy J7 (2016)",
"SM-J710H": "Samsung Galaxy J7 (2016)",
"SM-J710MN": "Samsung Galaxy J7 (2016)",
"SM-J710FQ": "Samsung Galaxy J7 (2016)",
"SM-J710K": "Samsung Galaxy J7 (2016)",
"SM-J710GN": "Samsung Galaxy J7 (2016)",
"SM-J3109x": "Samsung Galaxy J3 (2016)",
"SM-J320F": "Samsung Galaxy J3 (2016)",
"SM-J320G": "Samsung Galaxy J3 (2016)",
"SM-J320P": "Samsung Galaxy J3 (2016)",
"SM-J320M": "Samsung Galaxy J3 (2016)",
"SM-T280": "Samsung Galaxy Tab A6",
"SM-T285": "Samsung Galaxy Tab A6",
"SM-A9100": "Samsung Galaxy A9 Pro (2016)",
"SM-A910F": "Samsung Galaxy A9 Pro (2016)",
"SM-J105B": "Samsung Galaxy J1 Mini",
"SM-J105DS": "Samsung Galaxy J1 Mini",
"SM-J105F": "Samsung Galaxy J1 Mini",
"SM-G935F": "Samsung Galaxy S7 Edge",
"SM-G935FD": "Samsung Galaxy S7 Edge",
"SM-G9350": "Samsung Galaxy S7 Edge",
"SM-G935A": "Samsung Galaxy S7 Edge",
"SM-G935V": "Samsung Galaxy S7 Edge",
"SM-G935U": "Samsung Galaxy S7 Edge",
"SM-G935S": "Samsung Galaxy S7 Edge",
"SM-G935K": "Samsung Galaxy S7 Edge",
"SM-G935W8": "Samsung Galaxy S7 Edge",
"SC-02H": "Samsung Galaxy S7 Edge",
"SM-G930F": "Samsung Galaxy S7",
"SM-G930FD": "Samsung Galaxy S7",
"SM-G9300": "Samsung Galaxy S7",
"SM-G930A": "Samsung Galaxy S7",
"SM-G930V": "Samsung Galaxy S7",
"SM-G930AZ": "Samsung Galaxy S7",
"SM-G930S": "Samsung Galaxy S7",
"SM-G930K": "Samsung Galaxy S7",
"SM-G930W8": "Samsung Galaxy S7",
"SM-J120F": "Samsung Galaxy J1 (2016)",
"SM-J120M": "Samsung Galaxy J1 (2016)",
"SM-A9000": "Samsung Galaxy A9 (2016)",
"SM-A7100": "Samsung Galaxy A7 (2016)",
"SM-A710F": "Samsung Galaxy A7 (2016)",
"SM-A710FD": "Samsung Galaxy A7 (2016)",
"SM-A710M": "Samsung Galaxy A7 (2016)",
"SM-A710Y": "Samsung Galaxy A7 (2016)",
"SM-A5100": "Samsung Galaxy A5 (2016)",
"SM-A510F": "Samsung Galaxy A5 (2016)",
"SM-A510FD": "Samsung Galaxy A5 (2016)",
"SM-A510M": "Samsung Galaxy A5 (2016)",
"SM-A510Y": "Samsung Galaxy A5 (2016)",
"SM-A310F": "Samsung Galaxy A3 (2016)",
"SM-A310M": "Samsung Galaxy A3 (2016)",
"SM-T670": "Samsung Galaxy View",
"SC-01H": "Samsung Galaxy Active Neo",
"SM-J200F": "Samsung Galaxy J2",
"SM-J200Y": "Samsung Galaxy J2",
"SM-J200H": "Samsung Galaxy J2",
"SM-J200M": "Samsung Galaxy J2",
"SM-G928A": "Samsung Galaxy S6 Edge+",
"SM-G928AZ": "Samsung Galaxy S6 Edge+",
"SM-G928D": "Samsung Galaxy S6 Edge+",
"SM-G928F": "Samsung Galaxy S6 Edge+",
"SM-G928FD": "Samsung Galaxy S6 Edge+",
"SM-G928I": "Samsung Galaxy S6 Edge+",
"SM-G928K": "Samsung Galaxy S6 Edge+",
"SM-G928L": "Samsung Galaxy S6 Edge+",
"SM-G928P": "Samsung Galaxy S6 Edge+",
"SM-G928PZ": "Samsung Galaxy S6 Edge+",
"SM-G928R4": "Samsung Galaxy S6 Edge+",
"SM-G928R7": "Samsung Galaxy S6 Edge+",
"SM-G928S": "Samsung Galaxy S6 Edge+",
"SM-G928T": "Samsung Galaxy S6 Edge+",
"SM-G928T1": "Samsung Galaxy S6 Edge+",
"SM-G928TR": "Samsung Galaxy S6 Edge+",
"SM-G928V": "Samsung Galaxy S6 Edge+",
"SM-G9280": "Samsung Galaxy S6 Edge+",
"SM-G9288": "Samsung Galaxy S6 Edge+",
"SM-G9289": "Samsung Galaxy S6 Edge+",
"SM-A8000": "Samsung Galaxy A8",
"SM-A800F": "Samsung Galaxy A8",
"SM-A800I": "Samsung Galaxy A8",
"SM-A800S": "Samsung Galaxy A8",
"SM-A800Y": "Samsung Galaxy A8",
"SM-N9200": "Samsung Galaxy Note 5",
"SM-N920C": "Samsung Galaxy Note 5",
"SM-N920T": "Samsung Galaxy Note 5",
"SM-N920A": "Samsung Galaxy Note 5",
"SM-N920I": "Samsung Galaxy Note 5",
"SM-N9208": "Samsung Galaxy Note 5",
"SM-G903F": "Samsung Galaxy S5 Neo",
"SM-G903W": "Samsung Galaxy S5 Neo",
"SM-G318H": "Samsung Galaxy Trend 2 Lite",
"SM-G890A": "Samsung Galaxy S6 Active",
"SM-J500F": "Samsung Galaxy J5",
"SM-J500H": "Samsung Galaxy J5",
"SM-J500M": "Samsung Galaxy J5",
"SM-J500G": "Samsung Galaxy J5",
"SM-J700F": "Samsung Galaxy J7",
"SM-J700H": "Samsung Galaxy J7",
"SM-J700M": "Samsung Galaxy J7",
"SM-J700T": "Samsung Galaxy J7",
"SM-J700P": "Samsung Galaxy J7",
"SM-G925A": "Samsung Galaxy S6 Edge",
"SM-G925AZ": "Samsung Galaxy S6 Edge",
"SM-G925F": "Samsung Galaxy S6 Edge",
"SM-G925I": "Samsung Galaxy S6 Edge",
"SM-G925K": "Samsung Galaxy S6 Edge",
"SM-G925L": "Samsung Galaxy S6 Edge",
"SM-G925P": "Samsung Galaxy S6 Edge",
"SM-G925PZ": "Samsung Galaxy S6 Edge",
"SM-G925R4": "Samsung Galaxy S6 Edge",
"SM-G925R7": "Samsung Galaxy S6 Edge",
"SM-G925S": "Samsung Galaxy S6 Edge",
"SM-G925T": "Samsung Galaxy S6 Edge",
"SM-G925T1": "Samsung Galaxy S6 Edge",
"SM-G925TR": "Samsung Galaxy S6 Edge",
"SM-G925V": "Samsung Galaxy S6 Edge",
"SM-G9250": "Samsung Galaxy S6 Edge",
"SM-G9258": "Samsung Galaxy S6 Edge",
"SM-G9259": "Samsung Galaxy S6 Edge",
"SM-G920A": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920AZ": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920D": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920F": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920FD": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920I": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920K": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920L": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920P": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920PZ": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920R4": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920R7": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920S": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920T": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920T1": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920TR": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920V": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G9200": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G9208": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G9209": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-J100H": "Samsung Galaxy J1",
"SM-J100F": "Samsung Galaxy J1",
"SM-E500H": "Samsung Galaxy E5",
"SM-E500F": "Samsung Galaxy E5",
"SM-A700F": "Samsung Galaxy A7",
"SM-A700FD": "Samsung Galaxy A7",
"SM-A700FQ": "Samsung Galaxy A7",
"SM-A700H": "Samsung Galaxy A7",
"SM-A700K": "Samsung Galaxy A7",
"SM-A700L": "Samsung Galaxy A7",
"SM-A700M": "Samsung Galaxy A7",
"SM-A700S": "Samsung Galaxy A7",
"SM-A700X": "Samsung Galaxy A7",
"SM-A700YD": "Samsung Galaxy A7",
"SM-A700YZ": "Samsung Galaxy A7",
"SM-A7000": "Samsung Galaxy A7",
"SM-A7009": "Samsung Galaxy A7",
"SM-A7009W": "Samsung Galaxy A7",
"SM-E700H": "Samsung Galaxy E7",
"SM-A500F": "Samsung Galaxy A5 (2015)",
"SM-A500F1": "Samsung Galaxy A5 (2015)",
"SM-A500FQ": "Samsung Galaxy A5 (2015)",
"SM-A500FU": "Samsung Galaxy A5 (2015)",
"SM-A500G": "Samsung Galaxy A5 (2015)",
"SM-A500H": "Samsung Galaxy A5 (2015)",
"SM-A500HQ": "Samsung Galaxy A5 (2015)",
"SM-A500K": "Samsung Galaxy A5 (2015)",
"SM-A500L": "Samsung Galaxy A5 (2015)",
"SM-A500M": "Samsung Galaxy A5 (2015)",
"SM-A500S": "Samsung Galaxy A5 (2015)",
"SM-A500X": "Samsung Galaxy A5 (2015)",
"SM-A500XZ": "Samsung Galaxy A5 (2015)",
"SM-A500Y": "Samsung Galaxy A5 (2015)",
"SM-A500YZ": "Samsung Galaxy A5 (2015)",
"SM-A5000": "Samsung Galaxy A5 (2015)",
"SM-A5009": "Samsung Galaxy A5 (2015)",
"SM-A300F": "Samsung Galaxy A3 (2015)",
"SM-A300FU": "Samsung Galaxy A3 (2015)",
"SM-A300G": "Samsung Galaxy A3 (2015)",
"SM-A300H": "Samsung Galaxy A3 (2015)",
"SM-A300HQ": "Samsung Galaxy A3 (2015)",
"SM-A300M": "Samsung Galaxy A3 (2015)",
"SM-A300X": "Samsung Galaxy A3 (2015)",
"SM-A300XU": "Samsung Galaxy A3 (2015)",
"SM-A300XZ": "Samsung Galaxy A3 (2015)",
"SM-A300Y": "Samsung Galaxy A3 (2015)",
"SM-A300YZ": "Samsung Galaxy A3 (2015)",
"SM-A3000": "Samsung Galaxy A3 (2015)",
"SM-A3009": "Samsung Galaxy A3 (2015)",
"SM-G360BT": "Samsung Galaxy Core Prime",
"SM-G360H": "Samsung Galaxy Core Prime",
"SM-N915G": "Samsung Galaxy Note Edge",
"SM-N9150": "Samsung Galaxy Note Edge",
"SM-N910G": "Samsung Galaxy Note 4",
"SM-G130H": "Samsung Galaxy Young 2",
"SM-G850F": "Samsung Galaxy Alpha",
"SM-G850FQ": "Samsung Galaxy Alpha",
"SM-G850K": "Samsung Galaxy Alpha",
"SM-G850L": "Samsung Galaxy Alpha",
"SM-G850M": "Samsung Galaxy Alpha",
"SM-G850S": "Samsung Galaxy Alpha",
"SM-G850W": "Samsung Galaxy Alpha",
"SM-G850Y": "Samsung Galaxy Alpha",
"SM-G530BT": "Samsung Galaxy Grand Prime",
"SM-G530F": "Samsung Galaxy Grand Prime",
"SM-G530FQ": "Samsung Galaxy Grand Prime",
"SM-G530FZ": "Samsung Galaxy Grand Prime",
"SM-G530H": "Samsung Galaxy Grand Prime",
"SM-G530M": "Samsung Galaxy Grand Prime",
"SM-G530MU": "Samsung Galaxy Grand Prime",
"SM-G530P": "Samsung Galaxy Grand Prime",
"SM-G530R4": "Samsung Galaxy Grand Prime",
"SM-G530R7": "Samsung Galaxy Grand Prime",
"SM-G530T": "Samsung Galaxy Grand Prime",
"SM-G530W": "Samsung Galaxy Grand Prime",
"SM-G530Y": "Samsung Galaxy Grand Prime",
"SM-G5306W": "Samsung Galaxy Grand Prime",
"SM-G5308W": "Samsung Galaxy Grand Prime",
"SM-G5309W": "Samsung Galaxy Grand Prime",
"SM-G110B": "Samsung Galaxy Pocket 2",
"SM-G750F": "Samsung Galaxy Mega 2",
"SM-G350E": "Samsung Galaxy Star 2 Plus",
"SM-G313F": "Samsung Galaxy Ace 4",
"SM-G355H": "Samsung Galaxy Core 2",
"GT-S5500": "Samsung Galaxy S5 Mini",
"GT-S5430": "Samsung Galaxy S5 Mini",
"SM-T800": "Samsung Galaxy Tab S 10.5",
"SM-T805": "Samsung Galaxy Tab S 10.5",
"SM-T807": "Samsung Galaxy Tab S 10.5",
"SM-T807P": "Samsung Galaxy Tab S 10.5",
"SM-T807V": "Samsung Galaxy Tab S 10.5",
"SM-G386F": "Samsung Galaxy Core",
"SM-C115": "Samsung Galaxy K Zoom",
"SM-G310": "Samsung Galaxy Ace Style",
"SM-G900": "Samsung Galaxy S5",
"SM-G900FD": "Samsung Galaxy S5",
"GT-I9300I": "Samsung Galaxy S3 Neo",
"GT-I9301I": "Samsung Galaxy S3 Neo",
"GT-I9303I": "Samsung Galaxy S3 Neo",
"SM-N7500": "Samsung Galaxy Note 3 Neo",
"SM-N7502": "Samsung Galaxy Note 3 Neo",
"SM-N7505": "Samsung Galaxy Note 3 Neo",
"SM-G7102": "Samsung Galaxy Grand 2 (SM-G7100)",
"GT-S7262": "Samsung Galaxy Star Pro (GT-S7260)",
"GT-S7392": "Samsung Galaxy Trend Lite (GT-S7390)",
"SM-G3502": "Samsung Galaxy Core Plus (SM-G3500)",
"SM-N9000": "Samsung Galaxy Note 3",
"SM-N9002": "Samsung Galaxy Note 3",
"SM-N9005": "Samsung Galaxy Note 3",
"SM-V700": "Samsung Galaxy Gear",
"GT-S7272": "Samsung Galaxy Ace 3 (GT-S7270)[20]",
"GT-S7275": "Samsung Galaxy Ace 3 (GT-S7270)[20]",
"GT-S5312": "Samsung Galaxy Pocket Neo (GT-S5310)",
"GT-S5282": "Samsung Galaxy Star (GT-S5280)",
"GT-S5283": "Samsung Galaxy Star (GT-S5280)",
"GT-i8262D": "Samsung Galaxy Core (GT-S8262)",
"Galaxy Grand Quattro": "Samsung Galaxy Win (GT-I8550)",
"GT-I9150": "Samsung Galaxy Mega",
"GT-I9152": "Samsung Galaxy Mega",
"GT-I9200": "Samsung Galaxy Mega",
"GT-I9205": "Samsung Galaxy Mega",
"GT-S6810P": "Samsung Galaxy Fame (GT-S6810)",
"GT-I9505": "Samsung Galaxy S4 (GT-I9500)",
"GT-I9506": "Samsung Galaxy S4 (GT-I9500)",
"GT-S6312": "Samsung Galaxy Young (GT-S6310)",
"GT-I9082": "Samsung Galaxy Grand (GT-I9080)",
"SGH-I437": "Samsung Galaxy Express",
"GT-N7100": "Samsung Galaxy Note II",
"GT-N7102": "Samsung Galaxy Note II",
"GT-N7105": "Samsung Galaxy Note II",
"GT-B5512": "Samsung Galaxy Y Pro DUOS (GT-B5510)[33]",
"GT-I5700": "Samsung Galaxy Spica[67]",
"GT-I7500": "Samsung Galaxy[68]",
# OPPO Devices
'CPH1911': 'OPPO F11',
'CPH1909': 'Oppo A5s',
'CPH1913': 'OPPO F11',
'CPH1931': 'OPPO A5 2020',
'CPH1933': 'OPPO A5 2020',
'CPH1937': 'OPPO A9 2020',
'CPH1969': 'OPPO F11 Pro',
'CPH1989': 'OPPO Reno2 F',
'CPH2001': 'OPPO F15',
'CPH2015': 'OPPO A31',
'CPH2023': 'OPPO Find X2',
'CPH2035': 'OPPO Reno3 Pro',
'CPH2061': 'OPPO A52',
'CPH2071': 'OPPO A11k',
'CPH2077': 'OPPO A12',
'CPH2083': 'OPPO A11k',
'CPH2109': 'OPPO Reno4 Pro',
'CPH2127': 'OPPO A53',
'CPH2137': 'OPPO A33',
'CPH2179': 'OPPO A15s',
'CPH2185': 'OPPO A15',
'CPH2201': 'OPPO Reno5 Pro 5G',
'CPH2213': 'OPPO F19 Pro+',
'CPH2219': 'OPPO F19',
'CPH2239': 'OPPO A54',
'CPH2249': ' OPPO Reno6 Pro 5G',
'CPH2251': 'OPPO Reno6 5G',
'CPH2263': 'OPPO A74 5G',
'CPH2269': 'OPPO A16',
'CPH2285': 'OPPO F19 Pro',
'CPH2293': 'OPPO Reno7 Pro 5G',
'CPH2321': 'OPPO A53s 5G',
'CPH2325': 'OPPO A55',
'CPH2349': 'OPPO A16k',
'CPH2371': 'OPPO Reno7 5G',
'Not Available': 'OPPO F17',
# Huawei/ Honor Devices
"YAL-TL00": "Honor 20",
"YAL-L71": "Huawei nova 5T",
"YAL-L61": "Huawei nova 5T",
"YAL-L41": "Honor 20 Pro",
"YAL-L21": "Huawei nova 5T",
"YAL-AL50": "Honor 20S",
"YAL-AL10": "Honor 20 Pro",
"YAL-AL00I": "Honor 20",
"YAL-AL00": "Honor 20",
"Y635-TL00": "Huawei Y635",
"Y635-L21": "Huawei Y635",
"Y635-L03": "Huawei Y635",
"Y635-L02": "Huawei Y635",
"Y635-L01": "Huawei Y635",
"Y635-CL00": "Huawei Y635",
"Y625-U51": "Huawei Y625",
"Y625-U43": "Huawei Y625",
"Y625-U32": "Huawei Y625",
"Y625-U21": "Huawei Y625",
"Y625-U13": "Huawei Y625",
"Y610-U00": "Huawei Y610-U00",
"Y600-U40": "Huawei Ascend Y600",
"Y600-U351": "Huawei Ascend Y600",
"Y600-U20": "Huawei Ascend Y600",
"Y600-U151": "Huawei Ascend Y600",
"Y600-U00": "Huawei Ascend Y600",
"Y560-U23": "Huawei Y560",
"Y560-U02": "Huawei Y560",
"Y560-L23": "Huawei Y560",
"Y560-L03": "Huawei Y560",
"Y560-L02": "Huawei Y560",
"Y560-L01": "Huawei Y560",
"Y550-L03": "Huawei Ascend Y550",
"Y550-L02": "Huawei Ascend Y550",
"Y550-L01": "Huawei Ascend Y550",
"Y541-U02": "Huawei Y541",
"Y540-U01": "Huawei Ascend Y540",
"Y538": "Huawei Union Y538",
"Y536-A1": "Huawei Y536",
"Y530-U051": "Huawei Y530",
"Y530-U00": "Huawei Y530",
"Y520-U33": "Huawei Ascend Y520",
"Y520-U22": "Huawei Ascend Y520",
"Y520-U12": "Huawei Ascend Y520",
"Y520-U03": "Huawei Ascend Y520",
"Y511-U30": "Huawei Ascend Y511",
"Y511-U251": "Huawei Ascend Y511",
"Y511-U10": "Huawei Ascend Y511",
"Y511-U00": "Huawei Y511",
"Y360-U93": "Huawei Y3 lite",
"Y360-U82": "Huawei Y3 Lite",
"Y360-U61": "Huawei Y360",
"Y360-U31": "Huawei Y360",
"Y360-U23": "Huawei Y360",
"Y360-U03": "Huawei Y360",
"Y340-U081": "Huawei Y340",
"Y336-U02": "Huawei Y336",
"Y330-U17": "Huawei Ascend Y330",
"Y330-U15": "Huawei Ascend Y330",
"Y330-U11": "Huawei Ascend Y330",
"Y330-U07": "Huawei Ascend Y330",
"Y330-U05": "Huawei Ascend Y330",
"Y330-U01": "Huawei Ascend Y330",
"Y321-U051": "Huawei Ascend Y321",
"Y320-U351": "Huawei Ascend Y320",
"Y320-U30": "Huawei Ascend Y320",
"Y320-U151": "Huawei Ascend Y320",
"Y320-U10": "Huawei Ascend Y320",
"Y320-U01": "Huawei Ascend Y320",
"Y300-0151": "Huawei Ascend Y300",
"Y300-0100": "Huawei Ascend Y300",
"Y300-0000": "Huawei Ascend Y300",
"Y221-U33": "Huawei Ascend Y221",
"Y221-U22": "Huawei Ascend Y221",
"Y221-U12": "Huawei Ascend Y221",
"Y221-U03": "Huawei Ascend Y221",
"Y220-U10": "Huawei Ascend Y220",
"Y220-U05": "Huawei Ascend Y220",
"Y220-U00": "Huawei Ascend Y220",
"Y210-0200": "Huawei Ascend Y210",
"Y210-0151": "Huawei Ascend Y210",
"WLZ-AN00": "Huawei nova 6 5G",
"WLZ-AL10": "Huawei nova 6",
"WKG-TN00": "Huawei Enjoy 20 SE",
"WKG-L29": "Huawei Enjoy 20 5G",
"WKG-L09": "Huawei Enjoy 20 5G",
"WKG-AN00": "Huawei Enjoy 20 5G",
"WAS-TL10": "Huawei P10 Lite Dual",
"WAS-LX3": "Huawei P10 Lite",
"WAS-LX2": "Huawei P10 Lite",
"WAS-LX1": "Huawei P10 Lite",
"WAS-L23": "Huawei P10 Lite",
"WAS-L22J": "Huawei WAS-L22J",
"WAS-L22": "Huawei P10 Lite",
"WAS-L21": "Huawei P10 Lite",
"WAS-L03": "Huawei P10 Lite",
"WAS-L02": "Huawei P10 Lite",
"WAS-L01": "Huawei P10 Lite",
"WAS-AL00": "Huawei Nova Youth Dual",
"Warsaw-LX2": "Huawei P10",
"Warsaw-LX1": "Huawei Warsaw-LX1",
"Warsaw-L23": "Huawei P10",
"Warsaw-L22": "Huawei P10",
"Warsaw-L21": "Huawei P10",
"Warsaw-L03": "Huawei Warsaw-L03",
"Warsaw-L02": "Huawei Warsaw-L02",
"W1-U00": "Huawei W1",
"VTR-TL00": "Huawei P10",
"VTR-L29": "Huawei P10",
"VTR-L09": "Huawei P10",
"VTR-AL00": "Huawei P10",
"VRD-W10": "Huawei MediaPad M6 Turbo 8.4",
"VRD-W09": "Huawei MediaPad M6 Turbo 8.4",
"VRD-AL10": "Huawei MediaPad M6 Turbo 8.4",
"VRD-AL09": "Huawei MediaPad M6 Turbo 8.4",
"VOG-TL00": "Huawei P30 Pro",
"VOG-L29": "Huawei P30 Pro",
"VOG-L09": "Huawei P30 Pro",
"VOG-L04": "Huawei P30 Pro",
"VOG-AL10": "Huawei P30 Pro",
"VOG-AL00": "Huawei Y6 Pro",
"VNS-TL00": "Huawei G9 Lite",
"VNS-L62": "Huawei P9 Lite",
"VNS-L53": "Huawei P9 lite",
"VNS-L52C": "Huawei VNS-L52C",
"VNS-L31": "Huawei P9 Lite",
"VNS-L23": "Huawei P9 lite",
"VNS-L22": "Huawei P9 Lite",
"VNS-L21": "Huawei P9 Lite",
"VNS-DL00": "Huawei P9",
"VNS-AL00": "Huawei G9 Lite",
"VKY-TL00": "Huawei P10 Plus",
"VKY-L29": "Huawei P10 Plus",
"VKY-L09": "Huawei P10 Plus",
"VKY-AL00": "Huawei P10 Plus",
"VIE-L29": "Huawei P9 Plus",
"VIE-L09": "Huawei P9 Plus",
"VIE-C00": "Huawei P9 Plus",
"VIE-AL10": "Huawei P9 Plus",
"Victoria-L09": "Huawei P10",
"Vicky-L29": "Huawei P10 Plus",
"Vicky-L09": "Huawei P10 Plus",
"VEN-L22": "Huawei Honor 8 Smart",
"VCE-TL00": "Huawei Nova 4",
"VCE-AL00": "Huawei Nova 4",
"U9510E": "Huawei Ascend D1",
"U9508": "Huawei Honor 2",
"U9202L-1": "Huawei Ascend P1 LTE",
"U9200-1": "Huawei Ascend P1",
"TRT-TL10": "Huawei Y7 Prime",
"TRT-LX3": "Huawei Y7",
"TRT-LX1": "Huawei Y7",
"TRT-LX": "Huawei Y7 Prime",
"TRT-L53D": "Huawei Y7 Prime",
"TRT-L53": "Huawei Y7 Prime",
"TRT-L21A": "Huawei Y7 Prime",
"TRT-L21": "Huawei Y7",
"TRT-L03": "Huawei Y7",
"TRT-L02": "Huawei Y7",
"TRT-L01": "Huawei Y7",
"TRT-AL00": "Huawei Enjoy 7 Plus",
"Toronto-L23": "Huawei Y7",
"Toronto-L22": "Huawei Toronto-L22",
"Toronto-L21": "Huawei Y7",
"Toronto-L02": "Huawei Toronto-L02",
"Toronto-AL00": "Huawei Toronto-AL00",
"TNY-TL00": "Huawei Honor Magic 2",
"TNY-AL10": "Honor Magic 2",
"TNY-AL00": "Honor Magic 2",
"TNNH-AN00": "Honor Play4",
"TNN-AN00": "Huawei Enjoy 20s",
"TIT-U02": "Huawei Y6 Pro",
"TIT-L01": "Huawei Y6 Pro",
"TIT-CL10": "Huawei Enjoy 5",
"TIT-CL00": "Huawei Enjoy 5",
"TIT-AL00": "Huawei Y6 Pro",
"TET-AN00": "Huawei Mate X2",
"TEL-AN10": "Honor X10 5G",
"TEL-AN00A": "Honor X10 5G",
"TEL-AN00": "Honor X10 5G",
"TAS-TL00": "Huawei Mate 30",
"TAS-L29": "Huawei Mate 30",
"TAS-AN00": "Huawei Mate 30 5G",
"TAS-AL00": "Huawei Mate 30",
"TAH-N29M": "Huawei Mate Xs",
"TAH-AN00M": "Huawei Mate X",
"TAH-AN00": "Huawei Mate X",
"TAG-TL00": "Huawei Enjoy 5s",
"TAG-L32": "Huawei GR3",
"TAG-L23": "Huawei GR3",
"TAG-L22": "Huawei GR3",
"TAG-L21": "Huawei GR3",
"TAG-L13": "Huawei GR3",
"TAG-L03": "Huawei GR3",
"TAG-L01": "Huawei P8 Lite Smart",
"TAG-CL00": "Huawei Enjoy 5S",
"T1-A22L": "Huawei Mediapad T1",
"T1-A21w": "Huawei MediaPad T1 10",
"T1-A21L": "Huawei MediaPad T1 10",
"T1-821L": "Huawei MediaPad T1 8.0",
"T1-702u": "Huawei MediaPad T1 7.0",
"T1-702": "Huawei MediaPad T1 7.0",
"T1-701w": "Huawei MediaPad T1 7.0",
"T1-701ua": "Huawei MediaPad T1 7.0",
"T1-701u": "Huawei MediaPad T1 7.0",
"T1-701": "Huawei MediaPad T1 7.0",
"STK-TL00": "Huawei Enjoy 10 Plus",
"STK-L23BHN": "Huawei Y9 Prime (2019)",
"STK-L22HN": "Huawei Y9 Prime (2019)",
"STK-L22DV": "Huawei Y9 Prime (2019)",
"STK-L22": "Huawei Y9 Prime (2019)",
"STK-L21VHN": "Huawei Y9 Prime (2019)",
"STK-L21UDV": "Huawei Y9 Prime (2019)",
"STK-L21MDV": "Huawei Y9 Prime (2019)",
"STK-L21M": "Huawei Y9 Prime (2019)",
"STK-L21HN": "Huawei Y9 Prime (2019)",
"STK-L21": "Huawei Y9 Prime (2019)",
"STK-L03DV": "Huawei P Smart Z",
"STK-L03B": "Huawei P Smart Z",
"STK-L01MDV": "Huawei Y9 Prime (2019)",
"STK-L01M": "Huawei P Smart Z",
"STK-AL00": "Huawei P Smart Z",
"STF-TL10": "Huawei Honor 9",
"STF-L09S": "Huawei Honor 9",
"STF-L09": "Huawei Honor 9",
"STF-AL10": "Huawei Honor 9",
"STF-AL00": "Huawei Honor 9",
"SPN-AL10": "Huawei nova 5z",
"SPN-AL00": "Huawei nova 5z",
"Sophia-L12": "Huawei Ascend P7",
"Sophia-L11": "Huawei Ascend P7",
"Sophia-L10": "Huawei Ascend P7",
"Sophia-L09": "Huawei Ascend P7",
"Sophia-L07": "Huawei Ascend P7",
"Sophia-L00": "Huawei Ascend P7",
"SNE-L01": "Huawei Mate 20 lite",
"SLA-L22": "Huawei P9 Lite Mini",
"SLA-L03": "Huawei Y6 Pro 2017",
"SLA-L02": "Huawei Y6 Pro 2017",
"SLA-AL00": "Huawei Enjoy 7",
"SHT-W09": "Huawei MediaPad M5 8.4",
"SHT-AL09": "Huawei MediaPad M5 8.4",
"Selina-L03": "Huawei Y6 Pro 2017",
"SEA-AL10": "Huawei nova 5 Pro",
"SEA-AL00": "Huawei nova 5",
"SCMR-W09": "Huawei MatePad 10.8",
"SCMR-AL09": "Huawei MatePad 10.8",
"SCL-U31": "Huawei Y6",
"SCL-U23": "Huawei Y6",
"SCL-L32": "Huawei Y6",
"SCL-L21": "Huawei Y6",
"SCL-L04": "Huawei Y6",
"SCL-L03": "Huawei Y6",
"SCL-L02": "Huawei Y6",
"SCL-L01": "Huawei Y6",
"SCL-CL00": "Huawei Honor 4A",
"SCL-AL00": "Huawei Honor 4A",
"SCC-U21": "Huawei Y6",
"SC-CL00": "Huawei Ascend GX1",
"S8-701w": "Huawei MediaPad T1 8.0",
"S8-701u": "Huawei MediaPad M1 8.0",
"S8-306L": "Huawei MediaPad M1 8.0",
"S8-303L": "Huawei MediaPad M1",
"S8-301w": "Huawei MediaPad M1 8.0",
"S8-301u": "Huawei MediaPad M1 8.0",
"S8-301L": "Huawei MediaPad M1 8.0",
"S7-931w": "Huawei MediaPad 7 Lite",
"S7-931u": "Huawei MediaPad 7 Lite",
"S7-722u": "Huawei MediaPad 7 Youth 2",
"S7-721w": "Huawei MediaPad 7 Youth 2",
"S7-721u": "Huawei MediaPad 7 Youth 2",
"S7-721g": "Huawei MediaPad 7 Youth 2",
"S7-701w": "Huawei MediaPad 7 Youth",
"S7-701u": "Huawei MediaPad 7 Youth",
"S7-601w": "Huawei MediaPad 7 Vogue",
"S7-601u": "Huawei MediaPad 7 Vogue",
"S7-301u": "Huawei MediaPad 7 Vogue",
"S7-201u": "Huawei IDEOS S7 Slim",
"S10-232L": "Huawei MediaPad 10 Link Plus",
"S10-231w": "Huawei MediaPad10 Link",
"S10-231u": "Huawei MediaPad 10 Link Plus",
"S10-231L": "Huawei MediaPad 10 Link",
"S10-201u": "Huawei MediaPad 10 Link",
"S10-101w": "Huawei MediaPad 10 FHD",
"S10-101u": "Huawei MediaPad 10 FHD",
"RVL-AL09": "Honor Note 10",
"RNE-L23": "Huawei Mate 10 Lite",
"RNE-L22": "Huawei Nova 2i",
"RNE-L21": "Huawei Mate 10 Lite",
"RNE-L03": "Huawei Mate 10 Lite",
"RNE-L02": "Huawei Nova 2i",
"RNE-L01": "Huawei Mate 10 Lite",
"RNE-AL00": "Huawei Maimang 6",
"RIO-UL00": "Huawei G7 Plus",
"RIO-TL00": "Huawei G7 Plus",
"RIO-L33": "Huawei G8",
"RIO-L23": "Huawei G8",
"RIO-L11": "Huawei G8",
"RIO-L03": "Huawei GX8",
"RIO-L02": "Huawei G8",
"RIO-L01,RIO-L11": "Huawei G8",
"RIO-L01": "Huawei G8",
"RIO-CL00": "Huawei Maimang 4",
"RIO-AL00": "Huawei Maimang 4",
"Rhone-L21": "Huawei Mate 10 Lite",
"Rhone-L03": "Huawei Mate 10 Lite",
"Rhone-L01": "Huawei Mate 10 Lite",
"Prague-TL00": "Huawei Prague-TL00",
"Prague-L23": "Huawei P8 Lite 2017",
"Prague-L22": "Huawei P8 Lite 2017",
"Prague-L21": "Huawei P8 Lite 2017",
"Prague-L03": "Huawei Prague-L03",
"PRA-TL10": "Huawei Honor 8 Lite",
"PRA-LX3": "Huawei P9 Lite 2017",
"PRA-LX2": "Huawei Nova Lite",
"PRA-LX1": "Huawei P8 Lite 2017",
"PRA-LA1": "Huawei Honor 8 Lite",
"PRA-L31": "Huawei P8 Lite 2017",
"PRA-L22": "Huawei P8 Lite 2017",
"PRA-L21": "Huawei P8 Lite Dual Sim 2017",
"PRA-L11": "Huawei P8 Lite 2017",
"PRA-L03": "Huawei P8 Lite 2017",
"PRA-L02": "Huawei Nova Lite",
"PRA-L01": "Huawei P8 Lite 2017",
"PRA-AL00X": "Huawei Honor 8 Lite",
"PRA-AL00": "Huawei Honor 8 Lite",
"PPAH-TL20": "Huawei P smart 2021",
"PPAH-L22": "Huawei P smart 2021",
"PPAH-L21": "Huawei P smart 2021",
"PPAH-L02": "Huawei P smart 2021",
"PPAH-AL40": "Huawei P smart 2021",
"PPAH-AL20": "Huawei P smart 2021",
"POT-TL00": "Huawei Enjoy 9s",
"POT-LX3": "Huawei P Smart 2019",
"POT-LX2J": "Huawei Nova Lite 3",
"POT-LX1A": "Huawei P Smart 2020",
"POT-LX1": "Huawei P Smart 2019",
"POT-L41B": "Huawei P Smart S",
"POT-L21RU": "Huawei P smart 2019",
"POT-L21": "Huawei P smart 2019",
"POT-L01": "Huawei P smart 2019",
"POT-AL10C": "Huawei enjoy 9s",
"POT-AL00a": "Huawei Enjoy 9S",
"POT-AL00": "Huawei P Smart 2019",
"PLK-UL00IN": "Huawei Honor 7",
"PLK-UL00": "Huawei Honor 7",
"PLK-TL01H": "Huawei Honor 7",
"PLK-TL00": "Huawei Honor 7",
"PLK-L01": "Huawei Honor 7",
"PLK-CL00": "Huawei Honor 7",
"PLK-AL10": "Huawei Honor 7",
"PLE-703L": "Huawei MediaPad M2 Lite",
"PLE-701L": "Huawei MediaPad T2 7.0",
"Pine-UL00": "Huawei Honor 6 plus",
"Pine-TL10": "Huawei Honor 6 Plus",
"Pine-L04": "Huawei Honor 6 Plus",
"Pine-L02": "Huawei Honor 6 Plus",
"Pine-L00": "Huawei Honor 6 Plus",
"PIC-TL00": "Huawei Nova 2",
"PIC-LX9": "Huawei Nova 2",
"PIC-L29": "Huawei Nova 2",
"PIC-L09": "Huawei Nova 2",
"PIC-AL00": "Huawei Nova 2",
"PE-UL00": "Huawei Honor 6 Plus",
"PE-TL20": "Huawei Honor 6 Plus",
"PE-TL10": "Huawei Honor 6 Plus",
"PE-TL00M": "Huawei Honor 6 Plus",
"PE-CL00": "Huawei Honor 6 Plus",
"PCT-TL10": "Honor View 20",
"PCT-L29D": "Honor View 20",
"PCT-L29": "Honor View 20",
"PCT-AL10D": "Honor View 20",
"PCT-AL10": "Honor View 20",
"Paris-L09A": "Huawei Nova 3",
"PAR-TL20": "Huawei Nova 3",
"PAR-TL00": "Huawei nova 3",
"PAR-AL00": "Huawei nova 3",
"P7-L10": "Huawei Ascend P7",
"P7-L09": "Huawei Ascend P7",
"P7-L07": "Huawei Ascend P7",
"P7-L05": "Huawei Ascend P7",
"P6-U06": "Huawei Ascend P6",
"P6-S-U00": "Huawei Ascend P6 S",
"P2-6011": "Huawei Ascend P2",
"OXF-AN10L": "Honor V30 Pro",
"OXF-AN10": "Honor V30 Pro",
"OXF-AN00L": "Honor V30",
"OXF-AN00": "Honor V30",
"OCE-AN50": "Huawei Mate 40E",
"OCE-AN10": "Huawei Mate 40E",
"NXT-TL00": "Huawei Mate 8",
"NXT-L29B": "Huawei Ascend Mate8",
"NXT-L29A": "Huawei Ascend Mate8",
"NXT-L29": "Huawei Mate 8",
"NXT-L09A": "Huawei Ascend Mate8",
"NXT-L09": "Huawei Mate 8",
"NXT-DL00": "Huawei Mate 8",
"NXT-CL00": "Huawei Mate 8",
"NXT-C00": "Huawei Mate 8",
"NXT-AL10": "Huawei Mate 8",
"MAR-L01MEA": "Huawei P30 lite",
"MAR-L01B": "Huawei P30 lite",
"MAR-L01A": "Huawei P30 lite",
"MAR-AL00": "Huawei nova 4e",
"Madrid-L21": "Huawei Y6 (2019)",
"M2-A01w": "Huawei MediaPad M2 10.0",
"M2-A01L": "Huawei MediaPad M2 10.0",
"M2-803L": "Huawei MediaPad M2 8.0",
"M2-802L": "Huawei MediaPad M2 8.0",
"M2-801w": "Huawei MediaPad M2 8.0",
"M2-801L": "Huawei MediaPad M2 8.0",
"LYO-L21": "Huawei Honor 5A",
"LYO-L02": "Huawei Y6 II",
"LYO-L01": "Huawei Y6 II",
"LYA-TL00L": "Huawei Mate 20 Pro",
"LYA-TL00": "Huawei Mate 20 Pro",
"LYA-L29": "Huawei Mate 20 Pro",
"LYA-L09": "Huawei Mate 20 Pro",
"LYA-AL10": "Huawei Mate 20 Pro",
"LYA-AL00P": "Huawei Mate 20 Pro",
"LYA-AL00L": "Huawei Mate 20 Pro",
"LYA-AL00": "Huawei Mate 20 Pro",
"LUA-U23": "Huawei Y3 II",
"LUA-U22": "Huawei Y3 II",
"LUA-U03": "Huawei Y3 II",
"LUA-U02": "Huawei Y3 II",
"LUA-L23": "Huawei Y3 II",
"LUA-L22HN": "Huawei Honor Bee 2",
"LUA-L22": "Huawei Y3 II",
"LUA-L21": "Huawei Y3 II",
"LUA-L13": "Huawei Y3 II",
"LUA-L03": "Huawei Y3 II",
"LUA-L02": "Huawei Y3 II",
"LUA-L01": "Huawei Y3 II",
"LRA-L21B": "Honor 30i",
"LRA-AL00": "Honor 20 lite (China)",
"LON-L29D": "Huawei Mate 9 Pro",
"LON-L29C": "Huawei Mate 9 Pro",
"LON-L29": "Huawei Mate 9 Pro",
"LON-AL00": "Huawei Mate 9 Pro",
"LLD-TL10": "Honor 9 Lite",
"LLD-L31": "Huawei Honor 9 Lite",
"LLD-L21": "Huawei Honor 9 Lite",
"LLD-AL30": "Honor 9N (9i)",
"LLD-AL20": "Honor 9N (9i)",
"LLD-AL10": "Huawei Honor 9 Lite",
"LLD-AL00": "Huawei Honor 9 Lite",
"LIO-TL00": "Huawei Mate 30 Pro",
"LIO-N29": "Huawei Mate 30 RS Porsche Design",
"LIO-L29": "Huawei Mate 30 Pro 5G",
"LIO-AN00P": "Huawei Mate 30 RS Porsche Design",
"LIO-AN00M": "Huawei Mate 30 Pro",
"LIO-AN00": "Huawei Mate 30 Pro 5G",
"LIO-AL00": "Huawei Mate 30 Pro",
"LDN-TL10": "Huawei Y7 Prime 2018",
"LDN-TL00": "Huawei Enjoy 8",
"LDN-LX3": "Huawei Y7 2018",
"LDN-LX2": "Huawei Y7 Prime 2018",
"LDN-L22": "Huawei nova 2 lite",
"LDN-L21": "Huawei Y7 2018",
"LDN-L03": "Huawei Y7 2018",
"LDN-L01": "Huawei Y7 2018",
"LDN-AL00": "Huawei Enjoy 8",
"KSA-L29": "Honor 8S",
"KSA-L22": "Honor 8S",
"KSA-L09": "Honor 8S",
"KSA-AL10": "Honor 8S",
"KSA-AL00": "Honor 8S",
"KRJ-W09": "Honor V6",
"KRJ-AN00": "Honor V6",
"KOB2-W09B": "Huawei MatePad T8",
"KOB2-W09": "Huawei MatePad T8",
"KOB2-L09B": "Huawei MatePad T8",
"KOB2-L09": "Huawei MatePad T8",
"KOB-W09": "HUAWEI MediaPad T3",
"KOB-L09": "Huawei Mediapad T3",
"KNT-UL10": "Huawei Honor V8",
"KNT-TL10": "Huawei Honor V8",
"KNT-C00": "Huawei Honor V8",
"KNT-AL20": "Huawei Honor V8",
"KNT-AL10": "Huawei Honor V8",
"KKG-TN00": "Honor X10 Max 5G",
"KKG-AN00": "Honor X10 Max 5G",
"KIW-UL00": "Huawei Honor 5X",
"KIW-TL00": "Huawei Honor 5X",
"KIW-L24": "Huawei Honor 5X",
"KIW-L22": "Huawei Honor 5X",
"KIW-L21": "Huawei HONOR 5X",
"KIW-CL00": "Huawei Honor 5X",
"KIW-AL10": "Huawei Honor 5X",
"KII-L33": "Huawei GR5",
"KII-L23": "Huawei GR5",
"KII-L22": "Huawei GR5",
"KII-L21": "Huawei GR5",
"KII-L05": "Huawei GR5",
"JSN-TL00": "Honor 8X",
"JSN-L22X": "Honor 8X",
"JSN-L21X": "Honor 8X",
"JSN-L21": "Honor 8X",
"JSN-AL00": "Honor 8X",
"JSC-AN00A": "Huawei nova 8 SE",
"JSC-AN00": "Huawei nova 8 SE",
"JNY-L22": "Huawei nova 7i",
"JNY-L21": "Huawei nova 7i",
"JNY-L01": "Huawei nova 7i",
"JNY-AL10": "Huawei nova 6 SE",
"JMM-TL00": "Huawei Honor 6C Pro",
"JMM-L22": "Huawei Honor 6C Pro",
"JMM-AL00": "Huawei Honor 6C Pro",
"JKM-TL00": "Huawei Y9 (2019)",
"JKM-LX3": "Huawei Y9 (2019)",
"JKM-LX2": "Huawei Y9 (2019)",
"JKM-LX1": "Huawei Y9 (2019)",
"JKM-L21X": "Huawei Y9 (2019)",
"JKM-L21": "Huawei Y9 (2019)",
"JKM-L01X": "Huawei Y9 (2019)",
"JKM-AL20": "Huawei Y9 (2019)",
"JKM-AL10": "Huawei Y9 (2019)",
"JKM-AL00": "Huawei Y9 (2019)",
"Jimmy-TL00": "Huawei Jimmy TL00",
"Jimmy-AL00": "Huawei Jimmy-AL00",
"JER-TN20": "Huawei nova 7 Pro 5G",
"JER-TN10": "Huawei nova 7 Pro 5G",
"JER-AN20": "Huawei nova 7 Pro 5G",
"JER-AN10": "Huawei Nova 7 Pro",
"JEF-TN20": "Huawei nova 7 5G",
"JEF-TN00": "Huawei nova 7 5G",
"JEF-AN20": "Huawei nova 7 5G",
"JEF-AN00": "Huawei Nova 7 Pro",
"JDN2-W09HN": "Honor Tab 5",
"JDN2-W09": "Honor Tab 5",
"JDN2-L09": "Huawei MediaPad M5 Lite 8",
"JDN2-AL50HN": "Huawei MediaPad M5 lite",
"JDN2-AL50": "Huawei MediaPad M5 lite",
"JDN2-AL00HN": "Honor Pad 5 8",
"JDN2-AL00": "Honor Pad 5 8",
"JDN-W09": "Huawei Honor Pad 2",
"JDN-L01": "Huawei MediaPad T2 8.0",
"JDN-AL00": "Huawei Honor Pad 2",
"Jazz-TL10": "Huawei Ascend Mate 7",
"Jazz-L11": "Huawei Ascend Mate 7",
"Jazz-L09": "Huawei Ascend Mate 7",
"Jazz-J1": "Huawei Ascend Mate 7",
"JAT-TL00": "Huawei Honor 8A",
"JAT-L41HW": "Honor 8A Pro",
"JAT-L41": "Honor 8A Pro",
"JAT-L29HW": "Honor Play 8A",
"JAT-L29": "Honor Play 8A",
"JAT-L23HW": "Honor Play 8A",
"JAT-L21AHW": "Honor 8A Pro",
"JAT-AL00": "Honor Play 8A",
"Jakarta-LGRP2": "Huawei Y9 (2019)",
"Jackman-L22": "Huawei Y9 (2019)",
"INE-TL00": "Huawei Nova 3i",
"INE-LX2": "Huawei Nova 3i",
"INE-LX1": "Huawei Nova 3i",
"INE-LGRP1": "Huawei Nova 3i",
"INE-L22rr": "Huawei Nova 3i",
"INE-L22": "Huawei Nova 3i",
"INE-L21": "Huawei Nova 3i",
"INE-AL00": "Huawei nova 3i",
"HWI-TL00": "Huawei Nova 2S",
"HWI-LGRP1": "Huawei Nova 2S",
"HWI-AL00": "Huawei Nova 2s",
"HRY-TL00": "Honor 10 Lite",
"HRY-L21T": "Honor 10 Lite",
"HRY-L21D": "Honor 10 Lite",
"HRY-L21": "Honor 10 Lite",
"HRY-LX1": "Honor 10 Lite",
"HRY-LX2": "Honor 10 Lite",
"HRY-AL00a": "Honor 10 Lite",
"HRY-LX1MEB": "Honor 10 Lite",
"HRY-AL00TA": "Honor 20i",
"HRY-AL00T": "Honor 10 Lite",
"HRY-AL00A": "Honor 10 Lite",
"HRY-AL00": "Honor 10 Lite",
"Holly-U19": "Huawei Holly",
"Holly-U10": "Huawei Holly",
"Holly-U00": "Huawei Honor 3C",
"HMA-TL00": "Huawei Mate 20",
"HMA-L29": "Huawei Mate 20",
"HMA-L09": "Huawei Mate 20",
"HMA-AL00": "Huawei Mate 20",
"HLK-L42": "Honor 9X Pro",
"HLK-L41": "Honor 9X Pro",
"HLK-AL10": "Honor 9X",
"HLK-AL00A": "Honor 9X",
"HLK-AL00": "Honor 9X (China)",
"HDN-W09": "Huawei Honor",
"H60-L12": "Huawei Honor 6",
"H60-L04": "Huawei Honor 6",
"H60-L03": "Huawei Honor 6",
"H60-L02": "Huawei Honor 6",
"H60-L01": "Huawei Honor 6",
"H60-J1": "Huawei Honor 6",
"H30-U10": "Huawei 3C",
"H30-L02": "Huawei Honor 3C",
"H30-L01": "Huawei Honor 3C",
"GRA-UL10": "Huawei P8",
"GRA-UL00": "Huawei P8",
"GRA-TL00": "Huawei P8",
"GRA-L13": "Huawei P8",
"GRA-L09": "Huawei P8",
"GRA-L03": "Huawei P8",
"GRA-CL10": "Huawei P8",
"GRA-CL00": "Huawei P8 Standard Edition",
"GLK-TL00": "Huawei nova 5i",
"GLK-AL00": "Huawei nova 5i",
"GEM-703L": "HUAWEI Honor X2",
"GEM-703": "Huawei MediaPad X2",
"GEM-702L": "Huawei MediaPad X2",
"GEM-702": "Huawei MediaPad X2",
"GEM-701L": "Huawei MediaPad X2",
"GEM-701": "Huawei MediaPad X2",
"G760-TL00": "Huawei Ascend G7",
"G760-L03": "Huawei Ascend G7",
"G760-L01": "Huawei Ascend G7",
"G750-U10": "Huawei Honor 3X",
"G750-T20": "Huawei Honor 3X",
"G750-C00": "Huawei Honor 3X",
"G740-L00": "Huawei G740",
"G735-L23": "Huawei G Play",
"G735-L12": "Huawei G Play",
"G735-L03": "Huawei G Play",
"G730-U251": "Huawei G730",
"G730-U10": "Huawei G730",
"G700-U20": "Huawei Ascend G700",
"G700-U10": "Huawei Ascend G700",
"G7-L11": "Huawei Ascend G7",
"G7-L01": "Huawei Ascend G7",
"G630-U251": "Huawei G630",
"G630-U20": "Huawei G630",
"G630-U10": "Huawei G630",
"G630-U00": "Huawei G630",
"G629-UL00": "Huawei G629",
"G628-TL00": "Huawei Ascend G628",
"G620S-UL00": "Huawei Ascend G620s",
"G620S-L03": "Huawei Ascend G620s",
"G620S-L02": "Huawei Ascend G620s",
"G620S-L01": "Huawei Ascend G620s",
"G620-UL01": "Huawei G620",
"G620-L75": "Huawei Ascend G620s",
"G620-L72": "Huawei G620",
"G615-U10": "Huawei Ascend G615",
"G610-U20": "Huawei G610",
"G610-U15": "Huawei G610",
"G610-U00": "Huawei Ascend G6",
"G6-U251": "Huawei Ascend G6",
"G6-U10": "Huawei Ascend G6",
"G6-L33": "Huawei Ascend G6",
"G6-L22": "Huawei Ascend G6",
"G6-L11": "Huawei Ascend G6",
"G527-U081": "Huawei Ascend G527",
"G526-L33": "Huawei Ascend G526",
"G525-U00": "Huawei Ascend G525",
"G510-0251": "Huawei Ascend G510",
"G510-0200": "Huawei Ascend G510",
"G510-0100": "Huawei Ascend G510",
"FRLM-TN00": "Huawei Enjoy 20 SE",
"FRLM-L22": "Huawei Enjoy 20 SE",
"FRLM-L03": "Huawei Enjoy 20 SE",
"FRLM-AN00A": "Huawei Enjoy 20 SE",
"FRD-L19": "Huawei Honor 8",
"FRD-L14": "Huawei Honor 8",
"FRD-L09": "Huawei HONOR 8",
"FRD-L04": "Huawei Honor 8",
"FRD-L02": "Huawei HONOR 8",
"FRD-DL00": "Huawei MediaPad T2 10.0 Pro",
"FRD-C00": "Huawei Honor 8",
"FRD-AL10": "Huawei Honor 8",
"FRD-AL00": "Huawei Honor 8",
"FLA-TL10": "Huawei Y9 (2018)",
"FLA-AL20": "Huawei Y9 2018",
"FLA-AL10": "Huawei Y9 2018",
"FLA-AL00": "Huawei Y9 2018",
"Figo-L31": "Huawei P Smart",
"FIG-TL10": "Huawei Enjoy 7S Dual",
"FIG-TL00": "Huawei P smart",
"FIG-LX3": "Huawei P Smart",
"FIG-LX2": "Huawei P Smart",
"FIG-LX1": "Huawei P Smart Dual SIM",
"FIG-LA1": "Huawei P Smart",
"FIG-L31": "Huawei P Smart",
"FIG-L22": "Huawei P Smart",
"FIG-L21": "Huawei P Smart",
"FIG-L11": "Huawei P Smart",
"FIG-L03": "Huawei P Smart",
"FIG-L02": "Huawei P Smart",
"FIG-AL10": "Huawei Enjoy 7S",
"FIG-AL00": "Huawei P smart",
"FDR-A05": "Huawei MediaPad T2 10.0 Pro",
"FDR-A04": "Huawei MediaPad T2 10.0 Pro",
"FDR-A03L": "Huawei M2",
"FDR-A03": "Huawei MediaPad T2 10.0 Pro",
"FDR-A01w": "Huawei MediaPad T2 10.0 Pro",
"FDR-A01": "Huawei MediaPad T2 10.0 Pro",
"EVR-TL00": "Huawei Mate 20 X",
"EVR-N29": "Huawei Mate 20 X (5G)",
"EVR-L29": "Huawei Mate 20 X",
"EVR-AN00": "Huawei Mate 20 X (5G)",
"EVR-AL00": "Huawei Mate 20 X",
"EVA-L29": "Huawei P9",
"EVA-L19": "Huawei P9",
"EVA-L09": "Huawei P9",
"EVA-DL00": "Huawei P9",
"EVA-CL00": "Huawei P9",
"EVA-C00": "Huawei P9",
"EVA-AL10": "Huawei P9",
"EVA-AL00": "Huawei P9",
"EML-L29": "Huawei P20",
"EML-L09": "Huawei P20",
"EML-AL00": "Huawei P20",
"ELS-TN00": "Huawei P40 Pro",
"ELS-N39": "Huawei P40 Pro+",
"ELS-N29": "Huawei P40 Pro+",
"ELS-N04": "Huawei P40 Pro",
"ELS-AN10": "Huawei P40 Pro+",
"ELS-AN00": "Huawei P40 Pro",
"ELE-TL00": "Huawei P30",
"ELE-L29": "Huawei P30",
"ELE-L09": "Huawei P30",
"ELE-L04": "Huawei P30",
"ELE-AL00": "Huawei P30",
"EDI-AL10": "Huawei Honor Note 8",
"EDGE-U00": "Huawei Ascend P6",
"EDGE-C00": "Huawei Ascend P6",
"EBG-TN00": "Honor 30 Pro",
"EBG-N19": "Honor 30 Pro+",
"EBG-AN10": "Honor 30 Pro+",
"EBG-AN00": "Honor 30 Pro",
"DVCM-TN20": "",
"DVCM-AN20": "Huawei Enjoy 20 Pro",
"DVCM-AN00": "Huawei Enjoy 20 Pro",
"DUK-TL30": "Huawei Honor V9",
"DUK-L09": "Huawei Honor 8 Pro",
"DUK-AL30": "Huawei Honor V9",
"DUK-AL20": "Huawei Honor V9",
"DUB-LX3": "Huawei Y7 Prime 2019",
"DUB-LX1": "Huawei Y7 Prime 2019",
"DUB-L01": "Huawei Y7 2019",
"DUB-AL00": "Huawei Enjoy 9",
"DUA-L29": "Honor 9S",
"DRA-LX5": "Huawei Y5 Lite (2018)",
"DRA-L29": "Huawei Y5p",
"DRA-L21": "Huawei Y5 Prime 2018",
"DRA-L09": "Huawei Y5p",
"DNN-L29": "Honor 10X Lite",
"Diego-TL10": "Huawei Enjoy 6S",
"Diego-L23": "Huawei Diego-L23",
"Diego-L21": "Huawei Honor 6C",
"Diego-L03": "Huawei Diego-L03",
"Diego-L01": "Huawei Diego-L01",
"Diego-AL00": "Huawei Diego-AL00",
"Delhi-TL20": "Huawei Honor 6A",
"Delhi-L42": "Huawei Honor 6A",
"Delhi-L22": "Huawei Honor 6A",
"Delhi-AL10": "Huawei Honor 6A",
"DAV-703": "Huawei P8 MAX",
"DAV-702L": "Huawei P8 max",
"DAV-701L": "Huawei P8 max",
"D2-0082": "Huawei Ascend D2",
"CUN-U29": "Huawei Y5 II",
"CUN-TL00": "Huawei Honor 5",
"CUN-L33": "Huawei Y5 II",
"CUN-L23": "Huawei Y5 II",
"CUN-L22": "Huawei Y5 II",
"CUN-L21": "Huawei Y5 II",
"CUN-L03": "Huawei Y5 II",
"CUN-L02": "Huawei Y5 II",
"CUN-L01": "Huawei Y5 II",
"CUN-AL00": "Huawei Honor 5",
"CRR-UL20": "Huawei Mate S",
"CRR-UL00": "Huawei Mate S",
"CRR-TL00": "Huawei Mate S",
"CRR-L13": "Huawei Mate S",
"CRR-L09": "Huawei Mate S",
"CRR-CL20": "Huawei Mate S",
"CRR-CL00": "Huawei Mate S",
"CRO-UL00": "Huawei Y3 2017",
"CRO-L03": "Huawei Y3 2017",
"CRO-L02": "Huawei Y3 2017",
"CPN-W09": "Huawei M3 Lite",
"CPN-L09": "Huawei MediaPad M3 Lite",
"CPN-AL00": "Huawei M3 Lite",
"COR-TL10": "Honor Play",
"COR-AL10": "Honor Play",
"COR-AL00": "Honor Play",
"COL-TL10": "Huawei Honor 10",
"COL-TL00": "Huawei Honor 10",
"COL-L29": "Huawei Honor 10",
"COL-AL10": "Honor 10",
"CND-AN00": "Huawei nova 7 SE 5G Youth",
"CMR-W19": "Huawei MediaPad M5 Pro 10.8",
"CMR-W09TWN": "Huawei MediaPad M5",
"CMR-W09": "Huawei MediaPad M5 10.8",
"CMR-AL19": "Huawei MediaPad M5 Pro 10.8",
"CMR-AL09": "Huawei MediaPad M5 10.8",
"CM990": "Huawei CM990",
"CLT-TL00": "Huawei P20 Pro",
"CLT-L29": "Huawei P20 Pro Dual SIM",
"CLT-L09": "Huawei P20 Pro Dual SIM",
"CLT-L04": "Huawei P20 Pro Dual SIM",
"CLT-AL01": "Huawei P20 Pro Dual SIM",
"CLT-AL00": "Huawei P20 Pro Dual SIM",
"CHM-UL00": "Huawei Honor 4C",
"CHM-U01": "Huawei Honor 4C",
"CHM-TL00H": "Huawei Honor 4C",
"CHM-TL00": "Huawei Honor 4C",
"CHM-CL00": "Huawei Honor 4C",
"CHL-AL60CH": "Huawei nova 8 SE",
"CherryPlus-TL00": "Huawei Honor 4X",
"CherryPlus-L23": "Huawei Honor 4X",
"CherryPlus-L12": "Huawei Honor 4X LTE",
"CherryPlus-L11": "Huawei HONOR 4X",
"Cherry-L04": "Huawei Honor 4X",
"Cherry-CL20": "Huawei Honor 4X",
"Cherry-CL10": "Huawei Honor 4X",
"CHE2-L12": "Huawei Honor 4X",
"Che2-L11": "Huawei Honor 4X",
"CHE1-L04": "Huawei Honor 4X",
"CHE1-CL20": "Huawei Honor 4X",
"CHE1-CL10": "Huawei Honor 4X",
"CHE-TL00H": "Huawei Honor 4x",
"CHE-TL00": "Huawei Honor 4X",
"Che-L11": "Huawei Honor 4X",
"CHC-U23": "Huawei G Play Mini",
"CHC-U03": "Huawei G Play mini",
"CHC-U01": "Huawei G Play Mini",
"CDY-TN90": "Honor 30S",
"CDY-TN20": "Huawei nova 7 SE",
"CDY-TN00": "Huawei nova 7 SE",
"CDY-N29H": "Huawei nova 7 SE",
"CDY-N29B": "Huawei nova 7 SE",
"CDY-N29": "Huawei nova 7 SE",
"CDY-AN95": "Huawei nova 7 SE",
"CDY-AN90": "Honor 30S",
"CDY-AN20": "Huawei nova 7 SE",
"CDY-AN00": "Huawei nova 7 SE",
"CDL-AN50": "Huawei nova 7 SE",
"CAZ-TL20": "Huawei Nova",
"CAZ-TL10": "Huawei Nova",
"CAZ-AL10": "Huawei Nova",
"Cannes-L12": "Huawei Nova",
"Cannes-L11": "Huawei Nova",
"Cannes-L01": "Huawei Nova",
"Cannes-AL10": "Huawei Nova Cannes-AL10",
"CAN-L13": "Huawei Nova",
"CAN-L12": "Huawei Nova",
"CAN-L11": "Huawei nova",
"CAN-L03": "Huawei Nova",
"CAN-L01": "Huawei Nova",
"Cameron-W19": "Huawei MediaPad M5 Pro 10.8",
"CAM-UL00": "Huawei Honor 5A",
"CAM-TL00": "Huawei Honor 5A",
"CAM-L23": "Huawei Y6 II",
"CAM-L21": "Huawei Y6 II",
"CAM-L03": "Huawei Y6 II Compact",
"CAM-AL00": "Huawei Honor 5A",
"CairoGO-L22": "Huawei CairoGO-L22",
"CairoGO-L02": "Huawei Y3 2018",
"Cairo-U00": "Huawei Cairo-U00",
"Cairo-L23": "Huawei Cairo-L23",
"Cairo-L22": "Huawei Cairo-L22",
"Cairo-L03": "Huawei Cairo-L03",
"Cairo-L02": "Huawei Cairo-L02",
"CAG-L02": "Huawei Y3 2018",
"C8860V": "Huawei Honor",
"C8817E": "Huawei C8817E",
"C8817D": "Huawei Honor 6 Pro",
"C8816D": "Huawei C8816D",
"C8816": "Huawei C8816",
"C199s": "Huawei C199S",
"BZT3-W59": "Huawei C5 10.4",
"BZT3-W09": "",
"BZT3-AL00": "Honor 5c",
"BZT-W09": "Huawei MediaPad C5 10.1",
"BZD-W00": "Huawei MediaPad C3",
"BZD-AL00": "Huawei MediaPad C3",
"BZC-W00": "",
"BZC-AL00": "",
"BTV-W09": "Huawei M3",
"BTV-DL09": "Huawei MediaPad M3",
"BRQ-AN00CG": "Huawei nova 8 Pro 4G",
"BRQ-AN00": "Huawei nova 8 Pro 5G",
"BRQ-AL00": "Huawei nova 8 Pro 5G",
"Bond-L24": "Huawei Honor 7X",
"BOND-L21": "Huawei Honor 7X",
"BND-TL10": "Huawei Honor 7X",
"BND-L34": "Huawei Mate SE",
"BND-L31A": "Huawei Honor 7X",
"BND-L31": "Huawei Honor 7X",
"BND-L24A": "Huawei Honor 7x",
"BND-L21": "Huawei Honor 7X",
"BND-AL10": "Huawei Honor 7X",
"BND-AL00": "Huawei Honor 7X",
"BMH-TN10": "Honor 30",
"BMH-N19": "Honor 30",
"BMH-AN20": "Honor 30",
"BMH-AN10": "Honor 30",
"BLN-TL10": "Huawei Honor 6X",
"BLN-TL00": "Huawei Honor 6X",
"BLN-L24": "Huawei Honor 6X",
"BLN-L22HN": "Huawei Honor 6X",
"BLN-L22": "Huawei Honor 6X",
"BLN-L21": "Huawei Honor 6X",
"BLN-AL40": "Huawei Honor 6X",
"BLN-AL30": "Huawei Honor 6X",
"BLN-AL20": "Huawei Honor 6X",
"BLN-AL10": "Huawei Honor 6X",
"BLL-L23": "Huawei Mate 9 Lite",
"BLL-L22": "Huawei GR5 2017",
"BLL-L21": "Huawei GR5 2017",
"BLA-TL00": "Huawei Mate 10 Pro",
"BLA-L29": "Huawei Mate 10 Pro",
"BLA-L09": "Huawei Mate 10 pro",
"BLA-AL00": "Huawei Mate 10 pro",
"BLA-A09": "Huawei Mate 10 pro",
"BKL-TL10": "Huawei Honor View 10",
"BKL-L09": "Huawei Honor View 10 Global",
"BKL-L04": "Huawei Honor View 10",
"BKL-AL20": "Huawei Honor V10",
"BKL-AL00": "Huawei Honor V10",
"BKK-TL00": "Huawei Honor 8C",
"BKK-L22": "Huawei Honor 8C",
"BKK-L21": "Huawei Honor 8C",
"BKK-AL10": "Huawei Honor 8C",
"BKK-AL00": "Honor 8C",
"BGO-L03": "Huawei MediaPad T2 7.0",
"BGO-DL09": "Huawei MediaPad T2 7.0",
"BG2-W09": "Huawei MediaPad T3",
"BG2-U03": "Huawei MediaPad T3",
"BG2-U01": "Huawei MediaPad T3 7 3G",
"Berlin-L23": "Huawei Honor 6X",
"Berlin-L22": "Huawei GR5 2017",
"Berlin-L21HN": "Huawei Honor 6X",
"Berlin-L21": "Huawei Honor 6X",
"Berkeley-LGRP2": "Huawei Honor V10",
"Barca-L22": "Huawei Barca-L22",
"Barca-L21": "Huawei Nova 2 Plus",
"Barca-L03": "Huawei Nova 2 Plus",
"BAH3-W59": "Huawei MatePad 10.4",
"BAH3-W09": "Huawei MatePad 10.4",
"BAH3-L09": "Huawei MatePad 10.4",
"BAH3-AN10": "Huawei MatePad 5G",
"BAH3-AL00": "Huawei MatePad 10.4",
"BAH2-W19": "Huawei MediaPad M5 lite",
"BAH2-W09": "Huawei MediaPad M5 lite",
"BAH2-L09": "Huawei MediaPad M5 Lite",
"BAH2-AL10": "Huawei MediaPad M5 lite",
"BAH-W09": "Huawei M3 Lite",
"BAH-L09": "Huawei MediaPad M3 Lite 10",
"BAH-AL00": "Huawei M3 Lite",
"BAC-TL00": "Huawei nova 2 plus",
"BAC-L23": "Huawei nova 2 plus",
"BAC-L22": "Huawei nova 2 plus",
"BAC-L21": "Huawei nova 2 plus",
"BAC-L03": "Huawei nova 2 plus",
"BAC-AL00": "Huawei Nova 2 Plus",
"AUM-L41": "Huawei Honor 7C (Enjoy 8)",
"AUM-L29": "Huawei Honor 7A Pro",
"ATU-LX3": "Huawei Y6 2018",
"ATU-L42": "Huawei Y6 Prime 2018",
"ATU-L22": "Huawei Y6 2018",
"ATU-L21": "Huawei Y6 2018",
"ATU-L11": "Huawei Y6 2018",
"ATU-L03": "Huawei Y6 2018",
"ATU-AL10": "Huawei Enjoy 8e",
"Atomu-L21": "Huawei Y6 Prime 2018",
"Atomu-L03": "Huawei Honor 7A",
"Atomu-AL20IND": "Huawei Honor 7A",
"ATH-UL06": "Huawei ShotX",
"ATH-UL01": "Huawei ShotX",
"ATH-UL00": "Huawei Honor 7i",
"ATH-TL00": "Huawei Honor 7i",
"ATH-CL00": "Huawei Honor 7i",
"ATH-AL00": "Huawei Honor 7i",
"ASKH-TL00": "Honor Play 3",
"ASKH-AL00": "Honor Play 3",
"ARTH-TL00": "Huawei Enjoy 10",
"ARTH-L29N": "Huawei Y7p",
"ARTH-L29": "Huawei Y7p",
"ARTH-L28": "Huawei Y7p",
"ARTH-L09": "Huawei Enjoy 10",
"ARTH-L08": "Huawei Enjoy 10",
"ARTH-AL00M": "Huawei Enjoy 10",
"ARTH-AL00": "Huawei Enjoy 10",
"ARS-TL00": "Huawei Enjoy 9 Max",
"ARS-L22": "Huawei Y Max",
"Ares-L22HW": "Huawei Y Max",
"ARE-TL00": "Huawei Honor 8X Max",
"ARE-L22HN": "Huawei Honor 8X Max",
"AQM-TL00": "Huawei Enjoy 10s",
"AQM-L21A": "Huawei Y8P",
"AQM-L01": "Huawei Y8p",
"AQM-AL10HN": "Honor Play 4T Pro",
"AQM-AL00": "Huawei Enjoy 10s",
"ANG-AN00": "Huawei nova 8 5G",
"ANE-TL00": "Huawei P20 lite",
"ANE-LX3": "Huawei P20 Lite",
"ANE-LX2JOT": "Huawei P20 Lite",
"ANE-LX2J": "Huawei P20 Lite",
"ANE-LX2": "Huawei Nova 3e",
"ANE-LX1": "Huawei P20 Lite",
"ANE-LGRP1": "Huawei P20 Lite",
"ANE-L21": "Huawei P20 Lite",
"ANE-L12JPZ": "Huawei Nova 3e",
"ANE-L12": "Huawei Nova 3e",
"ANE-L03": "Huawei Nova 3e",
"ANE-L02J": "Huawei Nova 3e",
"ANE-L02": "Huawei Nova 3e",
"ANE-AL00I": "Huawei P20 Lite",
"ANE-AL00": "Huawei P20 Lite",
"ANA-TN00": "Huawei P40",
"ANA-N29": "Huawei P40",
"ANA-L04": "Huawei P40",
"ANA-AN00": "Huawei P40",
"ANA-AL00": "Huawei P40 4G",
"AMN-L29": "Huawei Y5 (2019)",
"AMN-L22": "Huawei Y5 (2019)",
"AMN-L09": "Huawei Y5 (2019)",
"ALP-TL00ZZB51": "Huawei Mate 10",
"ALP-TL00B": "Huawei Mate 10",
"ALP-TL00": "Huawei Mate 10",
"ALP-LGRP2": "Huawei Mate 10",
"ALP-LGRP1": "Huawei Mate 10",
"ALP-L29": "Huawei Mate 10",
"ALP-L09": "Huawei Mate 10",
"ALP-AL00ZZB54": "Huawei Mate 10",
"ALP-AL00ZZB02": "Huawei Mate 10",
"ALP-AL00": "Huawei Mate 10",
"ALE-TL00": "Huawei P8 Lite",
"ALE-L32": "Huawei P8 Lite",
"ALE-L23URY": "Huawei P8 Lite",
"ALE-L23": "Huawei P8 Lite",
"ALE-L21TUR": "Huawei P8 Lite",
"ALE-L21S": "Huawei P8 Lite",
"ALE-L21POL": "Huawei P8 Lite",
"ALE-L21MKD": "Huawei P8 Lite",
"ALE-L21HUN": "Huawei P8 Lite",
"ALE-L21HR": "Huawei P8 Lite",
"ALE-L21GR": "Huawei P8 Lite",
"ALE-L21FRA": "Huawei P8 Lite",
"ALE-L21DEU": "Huawei P8 Lite",
"ALE-L21AUT": "Huawei P8 Lite",
"ALE-L21": "Huawei P8 Lite",
"ALE-L03": "Huawei P8 Lite",
"ALE-L02": "Huawei P8 Lite",
"ALE-L01": "Huawei P8 Lite",
"ALE-CL00": "Huawei P8 Lite",
"AKA-L29": "Honor Play 4T",
"AKA-AL20": "Honor Play 4T",
"AKA-AL10": "Honor Play 4T",
"AGS3K-W10": "Huawei MatePad T 10s",
"AGS3K-W09": "Huawei MatePad T 10s",
"AGS3K-L09": "Huawei MatePad T 10s",
"AGS3-W09HN": "Huawei Enjoy Tablet 2",
"AGS3-W09": "Huawei MatePad T 10s",
"AGS3-W00E": "Huawei Enjoy Tablet 2",
"AGS3-W00D": "Huawei Enjoy Tablet 2",
"AGS3-W00B": "Huawei Enjoy Tablet 2",
"AGS3-L09": "Huawei MatePad T 10s",
"AGS3-AL09HN": "Huawei Enjoy Tablet 2",
"AGS3-AL00": "Huawei Enjoy Tablet 2",
"AGS2-W09HN": "Huawei MediaPad T5",
"AGS2-W09AUS": "Huawei MediaPad T5",
"AGS2-W09": "Huawei MediaPad T5",
"AGS2-L09": "Huawei MediaPad T5",
"AGS2-L03": "Huawei MediaPad T5",
"AGS2-AL00HN": "Huawei MediaPad T5",
"AGS2-AL00": "Honor Pad 5 10.1",
"AGS-W09": "Huawei MediaPad T3 10",
"AGS-L09": "Huawei MediaPad T3 10",
"AGRK-W09K": "Huawei MatePad T 10s",
"AGRK-W09": "Huawei AGRK-W09",
"AGRK-L09K": "Huawei MatePad T 10s",
"AGRK-L09": "Huawei MatePad T 10s",
"AGR-W09K": "Honor Pad X6",
"AGR-W09HN": "Huawei Enjoy Tablet 2",
"AGR-W09": "Honor Pad X6",
"AGR-L09": "Huawei MatePad T 10s",
"AGR-AL09HN": "Honor Pad X6",
"7D-504L": "Huawei MediaPad X1 7.0",
"7D-501u": "Huawei MediaPad X1 7.0",
"7D-501L": "Huawei MediaPad X1 7.0",
"704HW": "Huawei Nova Lite 2",
"608HW": "Huawei nova lite",
"NOP-AN01P": "Huawei Mate 40 Pro+",
"NOP-AN00P": "Huawei Mate 40 Pro+",
"NOP-AN00": "Huawei Mate 40 Pro+",
"NOH-N29": "Huawei Mate 40 Pro",
"NOH-AN01": "Huawei Mate 40 Pro",
"NOH-AN00": "Huawei Mate 40 Pro",
"NMO-L31": "Huawei GT3",
"NMO-L22": "Huawei GT3",
"NMO-L02": "Huawei NMO-L02",
"NICE-TL10": "Huawei Nice-TL10",
"NICE-AL10": "Huawei Nice-AL10",
"NICE-AL00": "Huawei Nice-AL00",
"NEO-L29": "Huawei Mate RS",
"NEN-L23CQ": "Huawei nova 8 5G",
"NEN-L22CQ": "Huawei nova 8 5G",
"NEN-L21CQ": "Huawei nova 8 5G",
"NEN-L03CQ": "Huawei nova 8 5G",
"NEN-L01CQ": "Huawei nova 8 5G",
"NEM-UL10": "Huawei Honor 5C",
"NEM-TL00": "Huawei Honor 5C",
"NEM-L51": "Huawei Honor 5C",
"NEM-L22": "Huawei Honor 5C",
"NEM-L21": "Huawei HONOR 7 Lite",
"NEM-AL10": "Huawei Honor 5C",
"MXWM-TN00": "Honor 30 Youth",
"MXWM-AN00": "Honor 30 Youth",
"MT7-UL00": "Huawei Ascend Mate 7",
"MT7-TL10": "Huawei Ascend Mate7",
"MT7-TL00": "Huawei Mate 7",
"MT7-L11": "Huawei Ascend Mate7",
"MT7-L09": "Huawei Ascend Mate7",
"MT7-J1": "Huawei Ascend Mate 7",
"MT7-CL00": "Huawei Ascend Mate 7",
"MT2-L05": "Huawei Ascend Mate2",
"MT1-U06": "Huawei Ascend Mate7",
"MT-L09": "Huawei Ascend Mate7",
"MRX-W39": "Huawei MatePad Pro",
"MRX-W29": "Huawei MatePad Pro",
"MRX-W19": "Huawei MatePad Pro",
"MRX-W09": "Huawei MatePad Pro",
"MRX-AN19": "Huawei MatePad Pro 5G",
"MRX-AL19": "Huawei MatePad Pro",
"MRX-AL09": "Huawei MatePad Pro",
"MRD-TL00": "Huawei Enjoy 9e",
"MRD-LX3": "Huawei Y6 2019",
"MRD-L41A": "Huawei Y6 (2019)",
"MRD-L41": "Huawei Y6 2019",
"MRD-L23": "Huawei Y6 2019",
"MRD-L22": "Huawei Y6 Pro (2019)",
"MRD-L21A": "Huawei Y6 Pro (2019)",
"MRD-L21": "Huawei Y6 2019",
"MRD-L11": "Huawei Y6 2019",
"MRD-L01": "Huawei Y6 2019",
"MRD-AL00": "Huawei Enjoy 9e",
"MOA-TL00": "Honor Play 9A",
"MOA-L49I": "Honor Play 9A",
"MOA-L49": "Honor 9A",
"MOA-AL20": "Honor Play 9A",
"MOA-AL00": "Honor Play 9A",
"MLA-UL00": "Huawei G9 Plus",
"MLA-TL10": "Huawei G9 Plus",
"MLA-TL00": "Huawei G9 Plus",
"MLA-L13": "Huawei nova plus",
"MLA-L12": "Huawei nova plus",
"MLA-L11": "Huawei nova plus",
"MLA-L03": "Huawei Nova plus",
"MLA-L02": "Huawei Nova Plus",
"MLA-L01": "Huawei Nova Plus",
"MLA-AL10": "Huawei Nova Plus",
"MLA-AL00": "Huawei Maimang 5",
"MHA-TL00": "Huawei Mate 9",
"MHA-L29": "Huawei Mate 9",
"MHA-L09": "Huawei Mate 9",
"MHA-AL00": "Huawei Mate 9 Pro",
"MED-TL00": "Huawei Enjoy 10",
"MED-L49": "Huawei Y6p",
"MED-L29II": "Honor 9A",
"MED-L29": "Honor 9A",
"MED-L09": "Huawei Y6p",
"MED-AL20": "Honor Play 9A",
"MED-AL10": "Honor Play 9A",
"MED-AL00": "Honor Play 9A",
"Maya-U29": "Huawei Honor Maya",
"Maya-TL10": "Huawei Honor Maya",
"Maya-L41": "Huawei Y6 2017",
"Maya-L13": "Huawei Honor Maya",
"Maya-L11": "Huawei Y6 2017",
"Maya-L03": "Huawei Maya L03",
"Maya-AL10": "Huawei Honor Maya",
"MAR-TL00": "Huawei nova 4e",
"MAR-L22BX": "Huawei P30 lite",
"MAR-L22B": "Huawei P30 lite",
"MAR-L22A": "Huawei P30 lite",
"MAR-L21MEB": "Huawei P30 lite",
"MAR-L21MEA": "Huawei P30 lite",
"MAR-L21H": "Huawei P30 lite",
"MAR-L21B": "Huawei P30 lite",
"MAR-L21A": "Huawei P30 lite",
"MAR-L03A": "Huawei P30 lite",
"MAR-L01MEB": "Huawei P30 lite",
# redmi and Mi and POCO devices
'2014215': 'Xiaomi Mi 4',
'2014712': 'Xiaomi Redmi Note',
'2014817': 'Xiaomi Redmi 2',
'2014818': 'Xiaomi Redmi 2',
'2015015': 'Xiaomi Mi 4i',
'2015051': 'Xiaomi Redmi Note 2',
'2015105': 'Xiaomi Mi 5',
'2015116': 'Xiaomi Redmi Note 3',
'2015161': 'Xiaomi Redmi Note 3',
'2015213': 'Xiaomi Mi Note 2',
'2015711': 'Xiaomi Mi 5s',
'2015816': 'Xiaomi Redmi 3',
'2016001': 'Xiaomi Mi Max',
'2016002': 'Xiaomi Mi Max',
'2016007': 'Xiaomi Mi Max',
'2016031': 'Xiaomi Redmi 3s',
'2016060': 'Xiaomi Redmi 4 (4X)',
'2016070': 'Xiaomi Mi 5s Plus',
'2016090': 'Xiaomi Redmi 4 (4X)',
'2016100': 'Xiaomi Redmi Note 4',
'2016117': 'Xiaomi Redmi 4A',
'AWM-A0': 'Xiaomi Black Shark Helo',
'DLT-A0': 'Xiaomi Black Shark 2 Pro',
'DLT-H0': 'Xiaomi Black Shark 2 Pro',
'M1803D5XA': 'Xiaomi Mi Mix 2S',
'M1803E1A': 'Xiaomi Mi 8',
'M1803E6G': 'Xiaomi Redmi S2 (Redmi Y2)',
'M1803E6H': 'Xiaomi Redmi S2 (Redmi Y2)',
'M1803E6I': 'Xiaomi Redmi S2 (Redmi Y2)',
'M1803E7SG': 'Xiaomi Redmi Note 5 AI Dual Camera',
'M1803E7SH': 'Xiaomi Redmi Note 5 AI Dual Camera',
'M1804C3CG': 'Xiaomi Redmi 6A',
'M1804C3CH': 'Xiaomi Redmi 6A',
'M1804C3CI': 'Xiaomi Redmi 6A',
'M1804C3DG': 'Xiaomi Redmi 6',
'M1804C3DH': 'Xiaomi Redmi 6',
'M1804C3DI;': 'Xiaomi Redmi 6',
'M1804D2SG': 'Xiaomi Mi A2 (Mi 6X)',
'M1804D2SI': 'Xiaomi Mi A2 (Mi 6X)',
'M1804E4A': 'Xiaomi Mi Max 3',
'M1805D1SG': 'Xiaomi Mi A2 Lite (Redmi 6 Pro)',
'M1805E10A': 'Xiaomi Pocophone F1',
'M1806E7TG': 'Xiaomi Redmi Note 6 Pro',
'M1806E7TH': 'Xiaomi Redmi Note 6 Pro',
'M1806E7TI': 'Xiaomi Redmi Note 6 Pro',
'M1807E8A': 'Xiaomi Mi 8 Pro',
'M1808D2TG': 'Xiaomi Mi 8 Lite',
'M1810F6LG': 'Xiaomi Redmi 7',
'M1810F6LH': 'Xiaomi Redmi 7',
'M1810F6LI': 'Xiaomi Redmi 7',
'M1901F71': 'Xiaomi Redmi Note 7S',
'M1901F7G': 'Xiaomi Redmi Note 7',
'M1901F7H': 'Xiaomi Redmi Note 7',
'M1901F7I': 'Xiaomi Redmi Note 7',
'M1901F7S': 'Xiaomi Redmi Note 7 Pro',
'M1901F9E': 'Xiaomi Mi Play',
'M1902F1G': 'Xiaomi Mi 9',
'M1903C3EG': 'Xiaomi Redmi 7A',
'M1903C3EH': 'Xiaomi Redmi 7A',
'M1903C3EI': 'Xiaomi Redmi 7A',
'M1903C3GG': 'Xiaomi Redmi Go',
'M1903C3GH': 'Xiaomi Redmi Go',
'M1903C3GI': 'Xiaomi Redmi Go',
'M1903F10G': 'Xiaomi Mi 9T',
'M1903F10I': 'Xiaomi Redmi K20',
'M1903F11G': 'Xiaomi Mi 9T Pro',
'M1903F2G': 'Xiaomi Mi 9 SE',
'M1904F3BG': 'Xiaomi Mi 9 Lite',
'M1906F9SH': 'Xiaomi Mi A3',
'M1906F9SI': 'Xiaomi Mi A3',
'M1906G7G': 'Xiaomi Redmi Note 8 Pro',
'M1906G7I': 'Xiaomi Redmi Note 8 Pro',
'M1908C3JG': 'Xiaomi Redmi Note 8',
'M1908C3JH': 'Xiaomi Redmi Note 8',
'M1908C3JI': 'Xiaomi Redmi Note 8',
'M1908C3KG': 'Xiaomi Redmi 8A',
'M1908C3KH': 'Xiaomi Redmi 8A',
'M1908C3XG': 'Xiaomi Redmi Note 8T',
'M1910F4E': 'Xiaomi Mi CC9 Pro',
'M1910F4G': 'Xiaomi Mi Note 10 Lite',
'M1910F4S': 'Xiaomi Mi Note 10 Pro',
'M1912G7BC': 'Xiaomi Redmi K30',
'M1912G7BE': 'Xiaomi Redmi K30',
'M2001C3K3I': 'Xiaomi Redmi 8A Dual',
'M2001J1G': 'Xiaomi Mi 10 Pro 5G',
'M2001J2G': 'Xiaomi Mi 10 5G',
'M2001J2I': 'Xiaomi Mi 10 5G',
'M2002F4LG': 'Xiaomi Mi Note 10 Lite',
'M2002J9E': 'Xiaomi Mi 10 Youth 5G',
'M2002J9G': 'Xiaomi Mi 10 Lite 5G',
'M2003J15SC': 'Xiaomi Redmi 10X 4G',
'M2003J15SG': 'Xiaomi Redmi Note 9',
'M2003J15SS': 'Xiaomi Redmi Note 9',
'M2003J6A1G': 'Xiaomi Redmi Note 9S',
'M2003J6A1I': 'Xiaomi Redmi Note 9 Pro (India)',
'M2003J6B1I': 'Xiaomi Redmi Note 9 Pro Max',
'M2004C3MI': 'Xiaomi Redmi 9 (India)',
'M2004J11G': 'Xiaomi Poco F2 Pro',
'M2004J19C': 'Xiaomi Redmi 9',
'M2004J19G': 'Xiaomi Redmi 9',
'M2010J19SI': 'Xiaomi Redmi 9 Power',
'M2004J19PI': 'Xiaomi Poco M2',
'M2004J7AC': 'Xiaomi Redmi Note 10',
'M2101K6I': 'Xiaomi Redmi Note 10 Pro Max',
'M2103K19G': 'Xiaomi Redmi Note 10 5G',
'M2004J7BC': 'Xiaomi Redmi 10X Pro 5G',
'M2006C3LC': 'Xiaomi Redmi 9A',
'M2006C3LG': 'Xiaomi Redmi 9A',
'M2006C3LI': 'Xiaomi Redmi 9A',
'M2006C3LII': 'Xiaomi Redmi 9i',
'M2006C3LVG': 'Xiaomi Redmi 9AT',
'M2006C3MG': 'Xiaomi Redmi 9C',
'M2006C3MII': 'Xiaomi Redmi 9 (India)',
'M2006C3MNG': 'Xiaomi Redmi 9C NFC',
'M2006J10C': 'Xiaomi Redmi K30 Ultra',
'M2007J17C': 'Xiaomi Redmi Note 9 Pro 5G',
'M2007J17G': 'Xiaomi Mi 10T Lite 5G',
'M2007J17I': 'Xiaomi Mi 10i',
'M2007J1SC': 'Xiaomi Mi 10 Ultra',
'M2007J20CG': 'Xiaomi Poco X3 NFC',
'M2007J20CI': 'Xiaomi Poco X3',
'M2007J20CT': 'Xiaomi Poco X3 NFC',
'M2007J22C': 'Xiaomi Redmi Note 9 5G',
'M2007J3SC': 'Xiaomi Redmi K30S',
'M2007J3SG': 'Xiaomi Mi 10T Pro 5G',
'M2007J3SY': 'Xiaomi Mi 10T 5G',
'M2010J19CG': 'Xiaomi Poco M3',
'M2010J19CI': 'Xiaomi Poco M3',
'MAE136': 'Xiaomi Redmi 4 (4X)',
'MAG138': 'Xiaomi Redmi 4 (4X)',
'MCE16': 'Xiaomi Mi 6',
'MCE8': 'Xiaomi Mi Note 3',
'MCG3B': 'Xiaomi Redmi 5A',
'MCI3B': 'Xiaomi Redmi 5A',
'MDE40': 'Xiaomi Mi Max 2',
'MDE5': 'Xiaomi Mi Mix 2',
'MDG1': 'Xiaomi Redmi 5',
'MDG2': 'Xiaomi Mi A1 (Mi 5X)',
'MDI1': 'Xiaomi Redmi 5',
'MDI2': 'Xiaomi Mi A1 (Mi 5X)',
'MDI40': 'Xiaomi Mi Max 2',
'MDI6': 'Xiaomi Redmi Y1 Lite',
'MDI6S': 'Xiaomi Redmi Y1 (Note 5A)',
'MEG7': 'Xiaomi Redmi 5 Plus (Redmi Note 5)',
'MEI7': 'Xiaomi Redmi Note 5 Pro',
'MEI7S': 'Xiaomi Redmi Note 5 Pro',
'MZB07QAIN': 'Xiaomi Poco C3',
'MZB07RHIN': 'Xiaomi Poco C3',
'MZB07RIIN': 'Xiaomi Poco C3',
'MZB07RJIN': 'Xiaomi Poco C3',
'MZB07RKIN': 'Xiaomi Poco C3',
'MZB07RLIN': 'Xiaomi Poco C3',
'MZB07Z0IN': 'Xiaomi Poco X3',
'MZB07Z1IN': 'Xiaomi Poco X3',
'MZB07Z2IN': 'Xiaomi Poco X3',
'MZB07Z3IN': 'Xiaomi Poco X3',
'MZB07Z4IN': 'Xiaomi Poco X3',
'MZB7995IN': 'Xiaomi Redmi 7A',
'MZB8458IN': 'Xiaomi Redmi 8A',
'MZB8741IN': 'Xiaomi Poco X2',
'MZB8742IN': 'Xiaomi Poco X2',
'MZB8743IN': 'Xiaomi Poco X2',
'MZB8744IN': 'Xiaomi Poco X2',
'MZB8745IN': 'Xiaomi Poco X2',
'MZB8746IN': 'Xiaomi Poco X2',
'MZB9011IN': 'Xiaomi Poco X2',
'MZB9012IN': 'Xiaomi Poco X2',
'MZB9013IN': 'Xiaomi Poco X2',
'MZB9919IN': 'Xiaomi Poco M2',
'MZB9965IN': 'Xiaomi Poco X3',
'SHARK MBU-A0': 'Xiaomi Black Shark 3 Pro',
'SHARK MBU-H0': 'Xiaomi Black Shark 3 Pro',
'SKW-A0': 'Xiaomi Black Shark 2',
'SKW-H0': 'Xiaomi Black Shark 2',
# Realme devices
"RMX1931": "Realme X",
"RMX1901": "Realme X",
"RMX1941": "Realme C2",
"RMX2156": "Realme Narzo 30",
"RMX3360": "Realme GT Master Edition",
"RMX1851": "Realme 3 Pro",
"RMX2030": "Realme 5i",
# BlackBerry devices
"BBG100-1": "BlackBerry Evolve",
# Asus
"ASUS_X00TD": "ASUS Zenfone Max Pro M1",
"ASUS_Z017DB": "ASUS Zenfone 3",
"ASUS_X00HD": "Asus Zenfone 4 Max",
"ASUS_X00TDA": "ASUS Zenfone Max Pro M1",
"ASUS_I01WD": "Asus Zenfone 6",
"ASUS_Z01RD": "Asus Zenfone 5Z",
"ZS630KL": "Asus Zenfone 6",
"I01WD": "Asus Zenfone 6",
# Others
"V2037": "vivo Y20G",
"I2012": "vivo"
}
| 36.676969
| 69
| 0.58378
|
__author__ = 'Prakash14'
class OS:
WINDOWS = "Windows"
WINDOWS_PHONE = "Windows Phone"
ANDROID = "Android"
MAC_OS = "Mac Os"
LINUX = "Linux"
IOS = "iOS"
CHROME_OS = "Chrome OS"
class DEVICE_TYPE:
COMPUTER = "Computer"
MOBILE = "Mobile"
SERVER = "Server"
BOT = "Bot"
class DEVICE_NAME:
IPHONE = "iPhone"
IPAD = "iPad"
MAC = "Mac"
CHROME_BOOK = "Chrome Book"
ANDROID = "Android Phone"
MOBILE_DEVICE_CODE_NAME = {
"AC2003": "OnePlus Nord 5G",
"EB2101": "OnePlus Nord CE 5G",
"EB2103": "OnePlus Nord CE 5G",
"DN2101": "OnePlus Nord 5G",
"DN2103": "OnePlus Nord 5G",
"AC2001": "OnePlus Nord",
"GM1901": "OnePlus 7",
"A6000": "OnePlus 6",
"A6010": "OnePlus 6T",
"A6003": "OnePlus 6",
"A5010": "OnePlus 5T",
"A5000": "OnePlus 5",
"LE2101": "OnePlus 9R",
"LE2100": "OnePlus 9R",
"LE2113": "OnePlus 9",
"LE2111": "OnePlus 9",
"LE2110": "OnePlus 9",
"LE2117": "OnePlus 9",
"LE2121": "OnePlus 9 Pro",
"LE2125": "OnePlus 9 Pro",
"LE2123": "OnePlus 9 Pro",
"LE2120": "OnePlus 9 Pro",
"LE2127": "OnePlus 9 Pro",
"GM1911": "OnePlus 7 Pro",
"GM1913": "OnePlus 7 Pro",
"GM1917": "OnePlus 7 Pro",
"GM1910": "OnePlus 7 Pro",
"GM1915": "OnePlus 7 Pro",
"HD1901": "OnePlus 7T",
"HD1903": "OnePlus 7T",
"HD1900": "OnePlus 7T",
"HD1907": "OnePlus 7T",
"HD1905": "OnePlus 7T",
"HD1911": "OnePlus 7T",
"KB2001": "OnePlus 8T",
"KB2000": "OnePlus 8T",
"KB2003": "OnePlus 8T",
"KB2005": "OnePlus 8T",
"IN2013": "OnePlus 8",
"IN2017": "OnePlus 8",
"IN2019": "OnePlus 8",
"IN2010": "OnePlus 8",
"IN2011": "OnePlus 8",
"IN2021": "OnePlus 8 Pro",
"IN2023": "OnePlus 8 Pro",
"IN2020": "OnePlus 8 Pro",
"IN2025": "OnePlus 8 Pro",
"SM-X900": "Samsung Galaxy Tab S8 Ultra",
"SM-X906": "Samsung Galaxy Tab S8 Ultra",
"SM-X800": "Samsung Galaxy Tab S8+",
"SM-X806": "Samsung Galaxy Tab S8+",
"SM-X700": "Samsung Galaxy Tab S8",
"SM-X706": "Samsung Galaxy Tab S8",
"SM-S908": "Samsung Galaxy S22 Ultra",
"SM-S906": "Samsung Galaxy S22+",
"SM-S901": "Samsung Galaxy S22",
"SM-G990": "Samsung Galaxy S21 FE",
"SM-A136": "Samsung Galaxy A13 5G",
"SM-X200": "Samsung Galaxy Tab A8 10.5",
"SM-X205": "Samsung Galaxy Tab A8 10.5",
"SM-A032": "Samsung Galaxy A03 Core",
"SM-E426": "Samsung Galaxy F42 5G",
"SM-M526": "Samsung Galaxy M52 5G",
"SM-M225": "Samsung Galaxy M22",
"SM-M326": "Samsung Galaxy M32 5G",
"SM-A037": "Samsung Galaxy A03s",
"SM-A528": "Samsung Galaxy A52s 5G",
"SM-F926B": "Samsung Galaxy Z Fold3 5G",
"SM-F711B": "Samsung Galaxy Z Flip3 5G",
"SM-E225": "Samsung Galaxy F22",
"SM-M325": "Samsung Galaxy M32",
"SM-A226": "Samsung Galaxy A22 5G",
"SM-A225": "Samsung Galaxy A22",
"SM-T730": "Samsung Galaxy Tab S7 FE",
"SM-T736B": "Samsung Galaxy Tab S7 FE",
"SM-T220": "Samsung Galaxy Tab A7 Lite",
"SM-T225": "Samsung Galaxy Tab A7 Lite",
"SM-E526": "Samsung Galaxy F52 5G",
"SM-M426": "Samsung Galaxy M42 5G",
"SM-E025": "Samsung Galaxy F02s",
"SM-F127": "Samsung Galaxy F12",
"SM-A725": "Samsung Galaxy A72",
"SM-A526": "Samsung Galaxy A52 5G",
"SM-A525": "Samsung Galaxy A52",
"SM-A325": "Samsung Galaxy A32",
"SM-M625": "Samsung Galaxy M62",
"SM-E625": "Samsung Galaxy F62",
"SM-M127": "Samsung Galaxy M12",
"SM-M022": "Samsung Galaxy M02",
"SM-A022": "Samsung Galaxy A02",
"SM-G991": "Samsung Galaxy S21",
"SM-G996": "Samsung Galaxy S21+",
"SM-G998": "Samsung Galaxy S21 Ultra",
"SM-A326": "Samsung Galaxy A32 5G",
"SM-M025": "Samsung Galaxy M02s",
"SM-A025": "Samsung Galaxy A02s",
"SM-A125": "Samsung Galaxy A12",
"SM-M217": "Samsung Galaxy M21s",
"SM-A426": "Samsung Galaxy A42 5G",
"SM-F415": "Samsung Galaxy F41",
"SM-G780": "Samsung Galaxy S20 FE",
"SM-G781": "Samsung Galaxy S20 FE",
"SM-F916B": "Samsung Galaxy Z Fold2 5G",
"SM-M515": "Samsung Galaxy M51",
"SM-N980": "Samsung Galaxy Note 20",
"SM-N981": "Samsung Galaxy Note 20",
"SM-N985": "Samsung Galaxy Note 20 Ultra",
"SM-N986": "Samsung Galaxy Note 20 Ultra",
"SM-F707": "Samsung Galaxy Z Flip 5G",
"SM-T870": "Samsung Galaxy Tab S7",
"SM-T875": "Samsung Galaxy Tab S7",
"SM-T876B": "Samsung Galaxy Tab S7",
"SM-T970": "Samsung Galaxy Tab S7+",
"SM-T976B": "Samsung Galaxy Tab S7+",
"SM-M317": "Samsung Galaxy M31s",
"SM-A013": "Samsung Galaxy A01 Core",
"SM-M017": "Samsung Galaxy M01s",
"SM-M015": "Samsung Galaxy M01",
"SM-A217": "Samsung Galaxy A21s",
"SM-A716F": "Samsung Galaxy A71 5G",
"SM-A516F": "Samsung Galaxy A51 5G",
"SM-A215": "Samsung Galaxy A21",
"SM-P610N": "Samsung Galaxy Tab S6 Lite",
"SM-P615": "Samsung Galaxy Tab S6 Lite",
"SM-G980": "Samsung Galaxy S20",
"SM-G981": "Samsung Galaxy S20",
"SM-G985": "Samsung Galaxy S20+",
"SM-G986": "Samsung Galaxy S20+",
"SM-G988": "Samsung Galaxy S20 Ultra",
"SM-M115": "Samsung Galaxy M11",
"SM-M115F": "Samsung Galaxy M11",
"SM-A315": "Samsung Galaxy A31",
"SM-A415": "Samsung Galaxy A41",
"SM-M215": "Samsung Galaxy M21",
"SM-A115": "Samsung Galaxy A11",
"SM-M315": "Samsung Galaxy M31",
"SM-F700": "Samsung Galaxy Z Flip",
"SM-T866N": "Samsung Galaxy Tab S6 5G",
"SM-G715F": "Samsung Galaxy Xcover Pro",
"SM-N770F": "Samsung Galaxy Note 10 Lite",
"SM-G770F": "Samsung Galaxy S10 Lite",
"SM-A015": "Samsung Galaxy A01",
"SM-A715": "Samsung Galaxy A71",
"SM-A515": "Samsung Galaxy A51",
"SM-M307": "Samsung Galaxy M30s",
"SM-A207": "Samsung Galaxy A20s",
"SM-M107": "Samsung Galaxy M10s",
"SM-A707": "Samsung Galaxy A70s",
"SM-A507": "Samsung Galaxy A50s",
"SM-A307": "Samsung Galaxy A30s",
"SM-A908": "Samsung Galaxy A90 5G",
"SM-F900": "Samsung Galaxy Z Fold",
"SM-F907": "Samsung Galaxy Z Fold",
"SM-A107": "Samsung Galaxy A10s",
"SM-A102": "Samsung Galaxy A10e",
"SM-N970": "Samsung Galaxy Note 10",
"SM-N971": "Samsung Galaxy Note 10",
"SM-N975F": "Samsung Galaxy Note 10+",
"SM-N976": "Samsung Galaxy Note 10+",
"SM-M405": "Samsung Galaxy M40",
"SM-G977": "Samsung Galaxy S10 5G",
"SM-T920": "Samsung Galaxy View 2",
"SM-T927": "Samsung Galaxy View 2",
"SM-T927A": "Samsung Galaxy View 2",
"SM-A606": "Samsung Galaxy A60",
"SM-A805": "Samsung Galaxy A80",
"SM-A705": "Samsung Galaxy A70",
"SM-A405": "Samsung Galaxy A40",
"SM-A205": "Samsung Galaxy A20",
"SM-A202": "Samsung Galaxy A20e",
"SM-A260": "Samsung Galaxy A2 Core",
"SM-G975": "Samsung Galaxy S10+",
"SM-G973": "Samsung Galaxy S10",
"SM-G970": "Samsung Galaxy S10e",
"SM-A505": "Samsung Galaxy A50",
"SM-A305": "Samsung Galaxy A30",
"SM-A105": "Samsung Galaxy A10",
"SM-T720": "Samsung Galaxy Tab S5e",
"SM-T725": "Samsung Galaxy Tab S5e",
"SM-T510": "Samsung Galaxy Tab A 10.1 (2019)",
"SM-T515": "Samsung Galaxy Tab A 10.1 (2019)",
"SM-M305": "Samsung Galaxy M30",
"SM-M105": "Samsung Galaxy M10",
"SM-M205": "Samsung Galaxy M20",
"SM-G887": "Samsung Galaxy A8s",
"SM-G6200": "Samsung Galaxy A6s",
"SM-A920": "Samsung Galaxy A9 (2018)",
"SM-A750": "Samsung Galaxy A7 (2018)",
"SM-J415": "Samsung Galaxy J4+",
"SM-J610": "Samsung Galaxy J6+",
"SM-N960": "Samsung Galaxy Note 9",
"SM-T590": "Samsung Galaxy Tab A 10.5 (2018)",
"; SM-T595": "Samsung Galaxy Tab A 10.5 (2018)",
"SM-T830": "Samsung Galaxy Tab S4",
"; SM-T835": "Samsung Galaxy Tab S4",
"SM-J800": "Samsung Galaxy J8 (2018)",
"SM-J600G": "Samsung Galaxy On6",
"SM-G8850": "Samsung Galaxy A8 Star[16]",
"SM-J737": "Samsung Galaxy J7 (2018)",
"SM-A600": "Samsung Galaxy A6 (2018)",
"SM-A605": "Samsung Galaxy A6+ (2018)",
"SM-J400": "Samsung Galaxy J4 (2018)",
"SM-J600": "Samsung Galaxy J6 (2018)",
"SM-J720": "Samsung Galaxy J3 Duo",
"SM-G611": "Samsung Galaxy J4 Prime 2 Samsung Galaxy J7 (2018)",
"SM-G960": "Samsung Galaxy S9",
"SM-G965": "Samsung Galaxy S9+",
"SM-J250": "Samsung Galaxy J4 Pro (2018)",
"SM-A530": "Samsung Galaxy A5 (2018)[17]",
"SM-A730": "Samsung Galaxy A5+ (2018)",
"SM-J200G": "Samsung Galaxy J2",
"SM-T380": "Samsung Galaxy Tab A 8.0 (2017)",
"SM-T385": "Samsung Galaxy Tab A 8.0 (2017)",
"SM-C710": "Samsung Galaxy C8 / C7 (2017)Samsung Galaxy J7+",
"SM-C8000": "Samsung Galaxy C8 / C7 (2017)Samsung Galaxy J7+",
"SM-N950": "Samsung Galaxy Note 8",
"SM-G892": "Samsung Galaxy S8 Active",
"SM-N935": "Samsung Galaxy Note Fan Edition (FE)",
"SM-J727": "Samsung Galaxy J3 (2017)",
"SM-J730x": "Samsung Galaxy J3 (2017)",
"SM-J530": "Samsung Galaxy J5 (2017)",
"SM-J530Y": "Samsung Galaxy J5 (2017)",
"SM-J327": "Samsung Galaxy J7 (2017)",
"SM-J330x": "Samsung Galaxy J7 (2017)",
"SM-J730": "Samsung Galaxy J7 Pro (2017)",
"SM-G615": "Samsung Galaxy J7 Max",
"SM-G390": "Samsung Galaxy Xcover 4",
"SM-G950": "Samsung Galaxy S8",
"SM-G955": "Samsung Galaxy S8+",
"SM-C5010": "Samsung Galaxy C5 Pro",
"SM-T820": "Samsung Galaxy Tab S3",
"SM-T825": "Samsung Galaxy Tab S3",
"SM-A720": "Samsung Galaxy A7 (2017)",
"SM-A520": "Samsung Galaxy A5 (2017)",
"SM-A320": "Samsung Galaxy A3 (2017)",
"SM-C7010": "Samsung Galaxy C5 Pro",
"SM-J106F": "Samsung Galaxy J1 mini Prime/Galaxy V2 (Indonesia)",
"SM-G532F": "Samsung Galaxy J2 Prime",
"SM-G532M": "Samsung Galaxy J2 Prime",
"SM-G532G": "Samsung Galaxy J2 Prime",
"SM-C900F": "Samsung Galaxy C9 Pro",
"SM-A810": "Samsung Galaxy A8 (2016)",
"SM-G570": "Samsung Galaxy On5 (2016)",
"SM-G610": "Samsung Galaxy On5 (2016)",
"SM-J710": "Samsung Galaxy On7 (2016)",
"SM-G610F": "Samsung Galaxy J7 Prime",
"SM-G610M": "Samsung Galaxy J7 Prime",
"SM-N930": "Samsung Galaxy Note 7",
"SM-G570F": "Samsung Galaxy J2 Prime",
"SM-G570M": "Samsung Galaxy J2 Prime",
"SM-G891A": "Samsung Galaxy S7 Active",
"SM-J310F": "Samsung Galaxy J3 Pro",
"SM-T585": "Samsung Galaxy Tab A 10.1 (2016)",
"SM-C5000": "Samsung Galaxy C5",
"SM-C7000": "Samsung Galaxy C7",
"SM-J5109": "Samsung Galaxy J5 (2016)",
"SM-J510F": "Samsung Galaxy J5 (2016)",
"SM-J510FN": "Samsung Galaxy J5 (2016)",
"SM-J510H": "Samsung Galaxy J5 (2016)",
"SM-J510G": "Samsung Galaxy J5 (2016)",
"SM-J510MN": "Samsung Galaxy J5 (2016)",
"SM-J510Y": "Samsung Galaxy J5 (2016)",
"SM-J5108": "Samsung Galaxy J5 (2016)",
"SM-J510K": "Samsung Galaxy J5 (2016)",
"SM-J510L": "Samsung Galaxy J5 (2016)",
"SM-J510S": "Samsung Galaxy J5 (2016)",
"SM-J510UN": "Samsung Galaxy J5 (2016)",
"SM-J7109": "Samsung Galaxy J7 (2016)",
"SM-J710F": "Samsung Galaxy J7 (2016)",
"SM-J710FN": "Samsung Galaxy J7 (2016)",
"SM-J710H": "Samsung Galaxy J7 (2016)",
"SM-J710MN": "Samsung Galaxy J7 (2016)",
"SM-J710FQ": "Samsung Galaxy J7 (2016)",
"SM-J710K": "Samsung Galaxy J7 (2016)",
"SM-J710GN": "Samsung Galaxy J7 (2016)",
"SM-J3109x": "Samsung Galaxy J3 (2016)",
"SM-J320F": "Samsung Galaxy J3 (2016)",
"SM-J320G": "Samsung Galaxy J3 (2016)",
"SM-J320P": "Samsung Galaxy J3 (2016)",
"SM-J320M": "Samsung Galaxy J3 (2016)",
"SM-T280": "Samsung Galaxy Tab A6",
"SM-T285": "Samsung Galaxy Tab A6",
"SM-A9100": "Samsung Galaxy A9 Pro (2016)",
"SM-A910F": "Samsung Galaxy A9 Pro (2016)",
"SM-J105B": "Samsung Galaxy J1 Mini",
"SM-J105DS": "Samsung Galaxy J1 Mini",
"SM-J105F": "Samsung Galaxy J1 Mini",
"SM-G935F": "Samsung Galaxy S7 Edge",
"SM-G935FD": "Samsung Galaxy S7 Edge",
"SM-G9350": "Samsung Galaxy S7 Edge",
"SM-G935A": "Samsung Galaxy S7 Edge",
"SM-G935V": "Samsung Galaxy S7 Edge",
"SM-G935U": "Samsung Galaxy S7 Edge",
"SM-G935S": "Samsung Galaxy S7 Edge",
"SM-G935K": "Samsung Galaxy S7 Edge",
"SM-G935W8": "Samsung Galaxy S7 Edge",
"SC-02H": "Samsung Galaxy S7 Edge",
"SM-G930F": "Samsung Galaxy S7",
"SM-G930FD": "Samsung Galaxy S7",
"SM-G9300": "Samsung Galaxy S7",
"SM-G930A": "Samsung Galaxy S7",
"SM-G930V": "Samsung Galaxy S7",
"SM-G930AZ": "Samsung Galaxy S7",
"SM-G930S": "Samsung Galaxy S7",
"SM-G930K": "Samsung Galaxy S7",
"SM-G930W8": "Samsung Galaxy S7",
"SM-J120F": "Samsung Galaxy J1 (2016)",
"SM-J120M": "Samsung Galaxy J1 (2016)",
"SM-A9000": "Samsung Galaxy A9 (2016)",
"SM-A7100": "Samsung Galaxy A7 (2016)",
"SM-A710F": "Samsung Galaxy A7 (2016)",
"SM-A710FD": "Samsung Galaxy A7 (2016)",
"SM-A710M": "Samsung Galaxy A7 (2016)",
"SM-A710Y": "Samsung Galaxy A7 (2016)",
"SM-A5100": "Samsung Galaxy A5 (2016)",
"SM-A510F": "Samsung Galaxy A5 (2016)",
"SM-A510FD": "Samsung Galaxy A5 (2016)",
"SM-A510M": "Samsung Galaxy A5 (2016)",
"SM-A510Y": "Samsung Galaxy A5 (2016)",
"SM-A310F": "Samsung Galaxy A3 (2016)",
"SM-A310M": "Samsung Galaxy A3 (2016)",
"SM-T670": "Samsung Galaxy View",
"SC-01H": "Samsung Galaxy Active Neo",
"SM-J200F": "Samsung Galaxy J2",
"SM-J200Y": "Samsung Galaxy J2",
"SM-J200H": "Samsung Galaxy J2",
"SM-J200M": "Samsung Galaxy J2",
"SM-G928A": "Samsung Galaxy S6 Edge+",
"SM-G928AZ": "Samsung Galaxy S6 Edge+",
"SM-G928D": "Samsung Galaxy S6 Edge+",
"SM-G928F": "Samsung Galaxy S6 Edge+",
"SM-G928FD": "Samsung Galaxy S6 Edge+",
"SM-G928I": "Samsung Galaxy S6 Edge+",
"SM-G928K": "Samsung Galaxy S6 Edge+",
"SM-G928L": "Samsung Galaxy S6 Edge+",
"SM-G928P": "Samsung Galaxy S6 Edge+",
"SM-G928PZ": "Samsung Galaxy S6 Edge+",
"SM-G928R4": "Samsung Galaxy S6 Edge+",
"SM-G928R7": "Samsung Galaxy S6 Edge+",
"SM-G928S": "Samsung Galaxy S6 Edge+",
"SM-G928T": "Samsung Galaxy S6 Edge+",
"SM-G928T1": "Samsung Galaxy S6 Edge+",
"SM-G928TR": "Samsung Galaxy S6 Edge+",
"SM-G928V": "Samsung Galaxy S6 Edge+",
"SM-G9280": "Samsung Galaxy S6 Edge+",
"SM-G9288": "Samsung Galaxy S6 Edge+",
"SM-G9289": "Samsung Galaxy S6 Edge+",
"SM-A8000": "Samsung Galaxy A8",
"SM-A800F": "Samsung Galaxy A8",
"SM-A800I": "Samsung Galaxy A8",
"SM-A800S": "Samsung Galaxy A8",
"SM-A800Y": "Samsung Galaxy A8",
"SM-N9200": "Samsung Galaxy Note 5",
"SM-N920C": "Samsung Galaxy Note 5",
"SM-N920T": "Samsung Galaxy Note 5",
"SM-N920A": "Samsung Galaxy Note 5",
"SM-N920I": "Samsung Galaxy Note 5",
"SM-N9208": "Samsung Galaxy Note 5",
"SM-G903F": "Samsung Galaxy S5 Neo",
"SM-G903W": "Samsung Galaxy S5 Neo",
"SM-G318H": "Samsung Galaxy Trend 2 Lite",
"SM-G890A": "Samsung Galaxy S6 Active",
"SM-J500F": "Samsung Galaxy J5",
"SM-J500H": "Samsung Galaxy J5",
"SM-J500M": "Samsung Galaxy J5",
"SM-J500G": "Samsung Galaxy J5",
"SM-J700F": "Samsung Galaxy J7",
"SM-J700H": "Samsung Galaxy J7",
"SM-J700M": "Samsung Galaxy J7",
"SM-J700T": "Samsung Galaxy J7",
"SM-J700P": "Samsung Galaxy J7",
"SM-G925A": "Samsung Galaxy S6 Edge",
"SM-G925AZ": "Samsung Galaxy S6 Edge",
"SM-G925F": "Samsung Galaxy S6 Edge",
"SM-G925I": "Samsung Galaxy S6 Edge",
"SM-G925K": "Samsung Galaxy S6 Edge",
"SM-G925L": "Samsung Galaxy S6 Edge",
"SM-G925P": "Samsung Galaxy S6 Edge",
"SM-G925PZ": "Samsung Galaxy S6 Edge",
"SM-G925R4": "Samsung Galaxy S6 Edge",
"SM-G925R7": "Samsung Galaxy S6 Edge",
"SM-G925S": "Samsung Galaxy S6 Edge",
"SM-G925T": "Samsung Galaxy S6 Edge",
"SM-G925T1": "Samsung Galaxy S6 Edge",
"SM-G925TR": "Samsung Galaxy S6 Edge",
"SM-G925V": "Samsung Galaxy S6 Edge",
"SM-G9250": "Samsung Galaxy S6 Edge",
"SM-G9258": "Samsung Galaxy S6 Edge",
"SM-G9259": "Samsung Galaxy S6 Edge",
"SM-G920A": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920AZ": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920D": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920F": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920FD": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920I": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920K": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920L": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920P": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920PZ": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920R4": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920R7": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920S": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920T": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920T1": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920TR": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G920V": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G9200": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G9208": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-G9209": "Samsung Galaxy S6Samsung Galaxy Gear 2",
"SM-J100H": "Samsung Galaxy J1",
"SM-J100F": "Samsung Galaxy J1",
"SM-E500H": "Samsung Galaxy E5",
"SM-E500F": "Samsung Galaxy E5",
"SM-A700F": "Samsung Galaxy A7",
"SM-A700FD": "Samsung Galaxy A7",
"SM-A700FQ": "Samsung Galaxy A7",
"SM-A700H": "Samsung Galaxy A7",
"SM-A700K": "Samsung Galaxy A7",
"SM-A700L": "Samsung Galaxy A7",
"SM-A700M": "Samsung Galaxy A7",
"SM-A700S": "Samsung Galaxy A7",
"SM-A700X": "Samsung Galaxy A7",
"SM-A700YD": "Samsung Galaxy A7",
"SM-A700YZ": "Samsung Galaxy A7",
"SM-A7000": "Samsung Galaxy A7",
"SM-A7009": "Samsung Galaxy A7",
"SM-A7009W": "Samsung Galaxy A7",
"SM-E700H": "Samsung Galaxy E7",
"SM-A500F": "Samsung Galaxy A5 (2015)",
"SM-A500F1": "Samsung Galaxy A5 (2015)",
"SM-A500FQ": "Samsung Galaxy A5 (2015)",
"SM-A500FU": "Samsung Galaxy A5 (2015)",
"SM-A500G": "Samsung Galaxy A5 (2015)",
"SM-A500H": "Samsung Galaxy A5 (2015)",
"SM-A500HQ": "Samsung Galaxy A5 (2015)",
"SM-A500K": "Samsung Galaxy A5 (2015)",
"SM-A500L": "Samsung Galaxy A5 (2015)",
"SM-A500M": "Samsung Galaxy A5 (2015)",
"SM-A500S": "Samsung Galaxy A5 (2015)",
"SM-A500X": "Samsung Galaxy A5 (2015)",
"SM-A500XZ": "Samsung Galaxy A5 (2015)",
"SM-A500Y": "Samsung Galaxy A5 (2015)",
"SM-A500YZ": "Samsung Galaxy A5 (2015)",
"SM-A5000": "Samsung Galaxy A5 (2015)",
"SM-A5009": "Samsung Galaxy A5 (2015)",
"SM-A300F": "Samsung Galaxy A3 (2015)",
"SM-A300FU": "Samsung Galaxy A3 (2015)",
"SM-A300G": "Samsung Galaxy A3 (2015)",
"SM-A300H": "Samsung Galaxy A3 (2015)",
"SM-A300HQ": "Samsung Galaxy A3 (2015)",
"SM-A300M": "Samsung Galaxy A3 (2015)",
"SM-A300X": "Samsung Galaxy A3 (2015)",
"SM-A300XU": "Samsung Galaxy A3 (2015)",
"SM-A300XZ": "Samsung Galaxy A3 (2015)",
"SM-A300Y": "Samsung Galaxy A3 (2015)",
"SM-A300YZ": "Samsung Galaxy A3 (2015)",
"SM-A3000": "Samsung Galaxy A3 (2015)",
"SM-A3009": "Samsung Galaxy A3 (2015)",
"SM-G360BT": "Samsung Galaxy Core Prime",
"SM-G360H": "Samsung Galaxy Core Prime",
"SM-N915G": "Samsung Galaxy Note Edge",
"SM-N9150": "Samsung Galaxy Note Edge",
"SM-N910G": "Samsung Galaxy Note 4",
"SM-G130H": "Samsung Galaxy Young 2",
"SM-G850F": "Samsung Galaxy Alpha",
"SM-G850FQ": "Samsung Galaxy Alpha",
"SM-G850K": "Samsung Galaxy Alpha",
"SM-G850L": "Samsung Galaxy Alpha",
"SM-G850M": "Samsung Galaxy Alpha",
"SM-G850S": "Samsung Galaxy Alpha",
"SM-G850W": "Samsung Galaxy Alpha",
"SM-G850Y": "Samsung Galaxy Alpha",
"SM-G530BT": "Samsung Galaxy Grand Prime",
"SM-G530F": "Samsung Galaxy Grand Prime",
"SM-G530FQ": "Samsung Galaxy Grand Prime",
"SM-G530FZ": "Samsung Galaxy Grand Prime",
"SM-G530H": "Samsung Galaxy Grand Prime",
"SM-G530M": "Samsung Galaxy Grand Prime",
"SM-G530MU": "Samsung Galaxy Grand Prime",
"SM-G530P": "Samsung Galaxy Grand Prime",
"SM-G530R4": "Samsung Galaxy Grand Prime",
"SM-G530R7": "Samsung Galaxy Grand Prime",
"SM-G530T": "Samsung Galaxy Grand Prime",
"SM-G530W": "Samsung Galaxy Grand Prime",
"SM-G530Y": "Samsung Galaxy Grand Prime",
"SM-G5306W": "Samsung Galaxy Grand Prime",
"SM-G5308W": "Samsung Galaxy Grand Prime",
"SM-G5309W": "Samsung Galaxy Grand Prime",
"SM-G110B": "Samsung Galaxy Pocket 2",
"SM-G750F": "Samsung Galaxy Mega 2",
"SM-G350E": "Samsung Galaxy Star 2 Plus",
"SM-G313F": "Samsung Galaxy Ace 4",
"SM-G355H": "Samsung Galaxy Core 2",
"GT-S5500": "Samsung Galaxy S5 Mini",
"GT-S5430": "Samsung Galaxy S5 Mini",
"SM-T800": "Samsung Galaxy Tab S 10.5",
"SM-T805": "Samsung Galaxy Tab S 10.5",
"SM-T807": "Samsung Galaxy Tab S 10.5",
"SM-T807P": "Samsung Galaxy Tab S 10.5",
"SM-T807V": "Samsung Galaxy Tab S 10.5",
"SM-G386F": "Samsung Galaxy Core",
"SM-C115": "Samsung Galaxy K Zoom",
"SM-G310": "Samsung Galaxy Ace Style",
"SM-G900": "Samsung Galaxy S5",
"SM-G900FD": "Samsung Galaxy S5",
"GT-I9300I": "Samsung Galaxy S3 Neo",
"GT-I9301I": "Samsung Galaxy S3 Neo",
"GT-I9303I": "Samsung Galaxy S3 Neo",
"SM-N7500": "Samsung Galaxy Note 3 Neo",
"SM-N7502": "Samsung Galaxy Note 3 Neo",
"SM-N7505": "Samsung Galaxy Note 3 Neo",
"SM-G7102": "Samsung Galaxy Grand 2 (SM-G7100)",
"GT-S7262": "Samsung Galaxy Star Pro (GT-S7260)",
"GT-S7392": "Samsung Galaxy Trend Lite (GT-S7390)",
"SM-G3502": "Samsung Galaxy Core Plus (SM-G3500)",
"SM-N9000": "Samsung Galaxy Note 3",
"SM-N9002": "Samsung Galaxy Note 3",
"SM-N9005": "Samsung Galaxy Note 3",
"SM-V700": "Samsung Galaxy Gear",
"GT-S7272": "Samsung Galaxy Ace 3 (GT-S7270)[20]",
"GT-S7275": "Samsung Galaxy Ace 3 (GT-S7270)[20]",
"GT-S5312": "Samsung Galaxy Pocket Neo (GT-S5310)",
"GT-S5282": "Samsung Galaxy Star (GT-S5280)",
"GT-S5283": "Samsung Galaxy Star (GT-S5280)",
"GT-i8262D": "Samsung Galaxy Core (GT-S8262)",
"Galaxy Grand Quattro": "Samsung Galaxy Win (GT-I8550)",
"GT-I9150": "Samsung Galaxy Mega",
"GT-I9152": "Samsung Galaxy Mega",
"GT-I9200": "Samsung Galaxy Mega",
"GT-I9205": "Samsung Galaxy Mega",
"GT-S6810P": "Samsung Galaxy Fame (GT-S6810)",
"GT-I9505": "Samsung Galaxy S4 (GT-I9500)",
"GT-I9506": "Samsung Galaxy S4 (GT-I9500)",
"GT-S6312": "Samsung Galaxy Young (GT-S6310)",
"GT-I9082": "Samsung Galaxy Grand (GT-I9080)",
"SGH-I437": "Samsung Galaxy Express",
"GT-N7100": "Samsung Galaxy Note II",
"GT-N7102": "Samsung Galaxy Note II",
"GT-N7105": "Samsung Galaxy Note II",
"GT-B5512": "Samsung Galaxy Y Pro DUOS (GT-B5510)[33]",
"GT-I5700": "Samsung Galaxy Spica[67]",
"GT-I7500": "Samsung Galaxy[68]",
'CPH1911': 'OPPO F11',
'CPH1909': 'Oppo A5s',
'CPH1913': 'OPPO F11',
'CPH1931': 'OPPO A5 2020',
'CPH1933': 'OPPO A5 2020',
'CPH1937': 'OPPO A9 2020',
'CPH1969': 'OPPO F11 Pro',
'CPH1989': 'OPPO Reno2 F',
'CPH2001': 'OPPO F15',
'CPH2015': 'OPPO A31',
'CPH2023': 'OPPO Find X2',
'CPH2035': 'OPPO Reno3 Pro',
'CPH2061': 'OPPO A52',
'CPH2071': 'OPPO A11k',
'CPH2077': 'OPPO A12',
'CPH2083': 'OPPO A11k',
'CPH2109': 'OPPO Reno4 Pro',
'CPH2127': 'OPPO A53',
'CPH2137': 'OPPO A33',
'CPH2179': 'OPPO A15s',
'CPH2185': 'OPPO A15',
'CPH2201': 'OPPO Reno5 Pro 5G',
'CPH2213': 'OPPO F19 Pro+',
'CPH2219': 'OPPO F19',
'CPH2239': 'OPPO A54',
'CPH2249': ' OPPO Reno6 Pro 5G',
'CPH2251': 'OPPO Reno6 5G',
'CPH2263': 'OPPO A74 5G',
'CPH2269': 'OPPO A16',
'CPH2285': 'OPPO F19 Pro',
'CPH2293': 'OPPO Reno7 Pro 5G',
'CPH2321': 'OPPO A53s 5G',
'CPH2325': 'OPPO A55',
'CPH2349': 'OPPO A16k',
'CPH2371': 'OPPO Reno7 5G',
'Not Available': 'OPPO F17',
"YAL-TL00": "Honor 20",
"YAL-L71": "Huawei nova 5T",
"YAL-L61": "Huawei nova 5T",
"YAL-L41": "Honor 20 Pro",
"YAL-L21": "Huawei nova 5T",
"YAL-AL50": "Honor 20S",
"YAL-AL10": "Honor 20 Pro",
"YAL-AL00I": "Honor 20",
"YAL-AL00": "Honor 20",
"Y635-TL00": "Huawei Y635",
"Y635-L21": "Huawei Y635",
"Y635-L03": "Huawei Y635",
"Y635-L02": "Huawei Y635",
"Y635-L01": "Huawei Y635",
"Y635-CL00": "Huawei Y635",
"Y625-U51": "Huawei Y625",
"Y625-U43": "Huawei Y625",
"Y625-U32": "Huawei Y625",
"Y625-U21": "Huawei Y625",
"Y625-U13": "Huawei Y625",
"Y610-U00": "Huawei Y610-U00",
"Y600-U40": "Huawei Ascend Y600",
"Y600-U351": "Huawei Ascend Y600",
"Y600-U20": "Huawei Ascend Y600",
"Y600-U151": "Huawei Ascend Y600",
"Y600-U00": "Huawei Ascend Y600",
"Y560-U23": "Huawei Y560",
"Y560-U02": "Huawei Y560",
"Y560-L23": "Huawei Y560",
"Y560-L03": "Huawei Y560",
"Y560-L02": "Huawei Y560",
"Y560-L01": "Huawei Y560",
"Y550-L03": "Huawei Ascend Y550",
"Y550-L02": "Huawei Ascend Y550",
"Y550-L01": "Huawei Ascend Y550",
"Y541-U02": "Huawei Y541",
"Y540-U01": "Huawei Ascend Y540",
"Y538": "Huawei Union Y538",
"Y536-A1": "Huawei Y536",
"Y530-U051": "Huawei Y530",
"Y530-U00": "Huawei Y530",
"Y520-U33": "Huawei Ascend Y520",
"Y520-U22": "Huawei Ascend Y520",
"Y520-U12": "Huawei Ascend Y520",
"Y520-U03": "Huawei Ascend Y520",
"Y511-U30": "Huawei Ascend Y511",
"Y511-U251": "Huawei Ascend Y511",
"Y511-U10": "Huawei Ascend Y511",
"Y511-U00": "Huawei Y511",
"Y360-U93": "Huawei Y3 lite",
"Y360-U82": "Huawei Y3 Lite",
"Y360-U61": "Huawei Y360",
"Y360-U31": "Huawei Y360",
"Y360-U23": "Huawei Y360",
"Y360-U03": "Huawei Y360",
"Y340-U081": "Huawei Y340",
"Y336-U02": "Huawei Y336",
"Y330-U17": "Huawei Ascend Y330",
"Y330-U15": "Huawei Ascend Y330",
"Y330-U11": "Huawei Ascend Y330",
"Y330-U07": "Huawei Ascend Y330",
"Y330-U05": "Huawei Ascend Y330",
"Y330-U01": "Huawei Ascend Y330",
"Y321-U051": "Huawei Ascend Y321",
"Y320-U351": "Huawei Ascend Y320",
"Y320-U30": "Huawei Ascend Y320",
"Y320-U151": "Huawei Ascend Y320",
"Y320-U10": "Huawei Ascend Y320",
"Y320-U01": "Huawei Ascend Y320",
"Y300-0151": "Huawei Ascend Y300",
"Y300-0100": "Huawei Ascend Y300",
"Y300-0000": "Huawei Ascend Y300",
"Y221-U33": "Huawei Ascend Y221",
"Y221-U22": "Huawei Ascend Y221",
"Y221-U12": "Huawei Ascend Y221",
"Y221-U03": "Huawei Ascend Y221",
"Y220-U10": "Huawei Ascend Y220",
"Y220-U05": "Huawei Ascend Y220",
"Y220-U00": "Huawei Ascend Y220",
"Y210-0200": "Huawei Ascend Y210",
"Y210-0151": "Huawei Ascend Y210",
"WLZ-AN00": "Huawei nova 6 5G",
"WLZ-AL10": "Huawei nova 6",
"WKG-TN00": "Huawei Enjoy 20 SE",
"WKG-L29": "Huawei Enjoy 20 5G",
"WKG-L09": "Huawei Enjoy 20 5G",
"WKG-AN00": "Huawei Enjoy 20 5G",
"WAS-TL10": "Huawei P10 Lite Dual",
"WAS-LX3": "Huawei P10 Lite",
"WAS-LX2": "Huawei P10 Lite",
"WAS-LX1": "Huawei P10 Lite",
"WAS-L23": "Huawei P10 Lite",
"WAS-L22J": "Huawei WAS-L22J",
"WAS-L22": "Huawei P10 Lite",
"WAS-L21": "Huawei P10 Lite",
"WAS-L03": "Huawei P10 Lite",
"WAS-L02": "Huawei P10 Lite",
"WAS-L01": "Huawei P10 Lite",
"WAS-AL00": "Huawei Nova Youth Dual",
"Warsaw-LX2": "Huawei P10",
"Warsaw-LX1": "Huawei Warsaw-LX1",
"Warsaw-L23": "Huawei P10",
"Warsaw-L22": "Huawei P10",
"Warsaw-L21": "Huawei P10",
"Warsaw-L03": "Huawei Warsaw-L03",
"Warsaw-L02": "Huawei Warsaw-L02",
"W1-U00": "Huawei W1",
"VTR-TL00": "Huawei P10",
"VTR-L29": "Huawei P10",
"VTR-L09": "Huawei P10",
"VTR-AL00": "Huawei P10",
"VRD-W10": "Huawei MediaPad M6 Turbo 8.4",
"VRD-W09": "Huawei MediaPad M6 Turbo 8.4",
"VRD-AL10": "Huawei MediaPad M6 Turbo 8.4",
"VRD-AL09": "Huawei MediaPad M6 Turbo 8.4",
"VOG-TL00": "Huawei P30 Pro",
"VOG-L29": "Huawei P30 Pro",
"VOG-L09": "Huawei P30 Pro",
"VOG-L04": "Huawei P30 Pro",
"VOG-AL10": "Huawei P30 Pro",
"VOG-AL00": "Huawei Y6 Pro",
"VNS-TL00": "Huawei G9 Lite",
"VNS-L62": "Huawei P9 Lite",
"VNS-L53": "Huawei P9 lite",
"VNS-L52C": "Huawei VNS-L52C",
"VNS-L31": "Huawei P9 Lite",
"VNS-L23": "Huawei P9 lite",
"VNS-L22": "Huawei P9 Lite",
"VNS-L21": "Huawei P9 Lite",
"VNS-DL00": "Huawei P9",
"VNS-AL00": "Huawei G9 Lite",
"VKY-TL00": "Huawei P10 Plus",
"VKY-L29": "Huawei P10 Plus",
"VKY-L09": "Huawei P10 Plus",
"VKY-AL00": "Huawei P10 Plus",
"VIE-L29": "Huawei P9 Plus",
"VIE-L09": "Huawei P9 Plus",
"VIE-C00": "Huawei P9 Plus",
"VIE-AL10": "Huawei P9 Plus",
"Victoria-L09": "Huawei P10",
"Vicky-L29": "Huawei P10 Plus",
"Vicky-L09": "Huawei P10 Plus",
"VEN-L22": "Huawei Honor 8 Smart",
"VCE-TL00": "Huawei Nova 4",
"VCE-AL00": "Huawei Nova 4",
"U9510E": "Huawei Ascend D1",
"U9508": "Huawei Honor 2",
"U9202L-1": "Huawei Ascend P1 LTE",
"U9200-1": "Huawei Ascend P1",
"TRT-TL10": "Huawei Y7 Prime",
"TRT-LX3": "Huawei Y7",
"TRT-LX1": "Huawei Y7",
"TRT-LX": "Huawei Y7 Prime",
"TRT-L53D": "Huawei Y7 Prime",
"TRT-L53": "Huawei Y7 Prime",
"TRT-L21A": "Huawei Y7 Prime",
"TRT-L21": "Huawei Y7",
"TRT-L03": "Huawei Y7",
"TRT-L02": "Huawei Y7",
"TRT-L01": "Huawei Y7",
"TRT-AL00": "Huawei Enjoy 7 Plus",
"Toronto-L23": "Huawei Y7",
"Toronto-L22": "Huawei Toronto-L22",
"Toronto-L21": "Huawei Y7",
"Toronto-L02": "Huawei Toronto-L02",
"Toronto-AL00": "Huawei Toronto-AL00",
"TNY-TL00": "Huawei Honor Magic 2",
"TNY-AL10": "Honor Magic 2",
"TNY-AL00": "Honor Magic 2",
"TNNH-AN00": "Honor Play4",
"TNN-AN00": "Huawei Enjoy 20s",
"TIT-U02": "Huawei Y6 Pro",
"TIT-L01": "Huawei Y6 Pro",
"TIT-CL10": "Huawei Enjoy 5",
"TIT-CL00": "Huawei Enjoy 5",
"TIT-AL00": "Huawei Y6 Pro",
"TET-AN00": "Huawei Mate X2",
"TEL-AN10": "Honor X10 5G",
"TEL-AN00A": "Honor X10 5G",
"TEL-AN00": "Honor X10 5G",
"TAS-TL00": "Huawei Mate 30",
"TAS-L29": "Huawei Mate 30",
"TAS-AN00": "Huawei Mate 30 5G",
"TAS-AL00": "Huawei Mate 30",
"TAH-N29M": "Huawei Mate Xs",
"TAH-AN00M": "Huawei Mate X",
"TAH-AN00": "Huawei Mate X",
"TAG-TL00": "Huawei Enjoy 5s",
"TAG-L32": "Huawei GR3",
"TAG-L23": "Huawei GR3",
"TAG-L22": "Huawei GR3",
"TAG-L21": "Huawei GR3",
"TAG-L13": "Huawei GR3",
"TAG-L03": "Huawei GR3",
"TAG-L01": "Huawei P8 Lite Smart",
"TAG-CL00": "Huawei Enjoy 5S",
"T1-A22L": "Huawei Mediapad T1",
"T1-A21w": "Huawei MediaPad T1 10",
"T1-A21L": "Huawei MediaPad T1 10",
"T1-821L": "Huawei MediaPad T1 8.0",
"T1-702u": "Huawei MediaPad T1 7.0",
"T1-702": "Huawei MediaPad T1 7.0",
"T1-701w": "Huawei MediaPad T1 7.0",
"T1-701ua": "Huawei MediaPad T1 7.0",
"T1-701u": "Huawei MediaPad T1 7.0",
"T1-701": "Huawei MediaPad T1 7.0",
"STK-TL00": "Huawei Enjoy 10 Plus",
"STK-L23BHN": "Huawei Y9 Prime (2019)",
"STK-L22HN": "Huawei Y9 Prime (2019)",
"STK-L22DV": "Huawei Y9 Prime (2019)",
"STK-L22": "Huawei Y9 Prime (2019)",
"STK-L21VHN": "Huawei Y9 Prime (2019)",
"STK-L21UDV": "Huawei Y9 Prime (2019)",
"STK-L21MDV": "Huawei Y9 Prime (2019)",
"STK-L21M": "Huawei Y9 Prime (2019)",
"STK-L21HN": "Huawei Y9 Prime (2019)",
"STK-L21": "Huawei Y9 Prime (2019)",
"STK-L03DV": "Huawei P Smart Z",
"STK-L03B": "Huawei P Smart Z",
"STK-L01MDV": "Huawei Y9 Prime (2019)",
"STK-L01M": "Huawei P Smart Z",
"STK-AL00": "Huawei P Smart Z",
"STF-TL10": "Huawei Honor 9",
"STF-L09S": "Huawei Honor 9",
"STF-L09": "Huawei Honor 9",
"STF-AL10": "Huawei Honor 9",
"STF-AL00": "Huawei Honor 9",
"SPN-AL10": "Huawei nova 5z",
"SPN-AL00": "Huawei nova 5z",
"Sophia-L12": "Huawei Ascend P7",
"Sophia-L11": "Huawei Ascend P7",
"Sophia-L10": "Huawei Ascend P7",
"Sophia-L09": "Huawei Ascend P7",
"Sophia-L07": "Huawei Ascend P7",
"Sophia-L00": "Huawei Ascend P7",
"SNE-L01": "Huawei Mate 20 lite",
"SLA-L22": "Huawei P9 Lite Mini",
"SLA-L03": "Huawei Y6 Pro 2017",
"SLA-L02": "Huawei Y6 Pro 2017",
"SLA-AL00": "Huawei Enjoy 7",
"SHT-W09": "Huawei MediaPad M5 8.4",
"SHT-AL09": "Huawei MediaPad M5 8.4",
"Selina-L03": "Huawei Y6 Pro 2017",
"SEA-AL10": "Huawei nova 5 Pro",
"SEA-AL00": "Huawei nova 5",
"SCMR-W09": "Huawei MatePad 10.8",
"SCMR-AL09": "Huawei MatePad 10.8",
"SCL-U31": "Huawei Y6",
"SCL-U23": "Huawei Y6",
"SCL-L32": "Huawei Y6",
"SCL-L21": "Huawei Y6",
"SCL-L04": "Huawei Y6",
"SCL-L03": "Huawei Y6",
"SCL-L02": "Huawei Y6",
"SCL-L01": "Huawei Y6",
"SCL-CL00": "Huawei Honor 4A",
"SCL-AL00": "Huawei Honor 4A",
"SCC-U21": "Huawei Y6",
"SC-CL00": "Huawei Ascend GX1",
"S8-701w": "Huawei MediaPad T1 8.0",
"S8-701u": "Huawei MediaPad M1 8.0",
"S8-306L": "Huawei MediaPad M1 8.0",
"S8-303L": "Huawei MediaPad M1",
"S8-301w": "Huawei MediaPad M1 8.0",
"S8-301u": "Huawei MediaPad M1 8.0",
"S8-301L": "Huawei MediaPad M1 8.0",
"S7-931w": "Huawei MediaPad 7 Lite",
"S7-931u": "Huawei MediaPad 7 Lite",
"S7-722u": "Huawei MediaPad 7 Youth 2",
"S7-721w": "Huawei MediaPad 7 Youth 2",
"S7-721u": "Huawei MediaPad 7 Youth 2",
"S7-721g": "Huawei MediaPad 7 Youth 2",
"S7-701w": "Huawei MediaPad 7 Youth",
"S7-701u": "Huawei MediaPad 7 Youth",
"S7-601w": "Huawei MediaPad 7 Vogue",
"S7-601u": "Huawei MediaPad 7 Vogue",
"S7-301u": "Huawei MediaPad 7 Vogue",
"S7-201u": "Huawei IDEOS S7 Slim",
"S10-232L": "Huawei MediaPad 10 Link Plus",
"S10-231w": "Huawei MediaPad10 Link",
"S10-231u": "Huawei MediaPad 10 Link Plus",
"S10-231L": "Huawei MediaPad 10 Link",
"S10-201u": "Huawei MediaPad 10 Link",
"S10-101w": "Huawei MediaPad 10 FHD",
"S10-101u": "Huawei MediaPad 10 FHD",
"RVL-AL09": "Honor Note 10",
"RNE-L23": "Huawei Mate 10 Lite",
"RNE-L22": "Huawei Nova 2i",
"RNE-L21": "Huawei Mate 10 Lite",
"RNE-L03": "Huawei Mate 10 Lite",
"RNE-L02": "Huawei Nova 2i",
"RNE-L01": "Huawei Mate 10 Lite",
"RNE-AL00": "Huawei Maimang 6",
"RIO-UL00": "Huawei G7 Plus",
"RIO-TL00": "Huawei G7 Plus",
"RIO-L33": "Huawei G8",
"RIO-L23": "Huawei G8",
"RIO-L11": "Huawei G8",
"RIO-L03": "Huawei GX8",
"RIO-L02": "Huawei G8",
"RIO-L01,RIO-L11": "Huawei G8",
"RIO-L01": "Huawei G8",
"RIO-CL00": "Huawei Maimang 4",
"RIO-AL00": "Huawei Maimang 4",
"Rhone-L21": "Huawei Mate 10 Lite",
"Rhone-L03": "Huawei Mate 10 Lite",
"Rhone-L01": "Huawei Mate 10 Lite",
"Prague-TL00": "Huawei Prague-TL00",
"Prague-L23": "Huawei P8 Lite 2017",
"Prague-L22": "Huawei P8 Lite 2017",
"Prague-L21": "Huawei P8 Lite 2017",
"Prague-L03": "Huawei Prague-L03",
"PRA-TL10": "Huawei Honor 8 Lite",
"PRA-LX3": "Huawei P9 Lite 2017",
"PRA-LX2": "Huawei Nova Lite",
"PRA-LX1": "Huawei P8 Lite 2017",
"PRA-LA1": "Huawei Honor 8 Lite",
"PRA-L31": "Huawei P8 Lite 2017",
"PRA-L22": "Huawei P8 Lite 2017",
"PRA-L21": "Huawei P8 Lite Dual Sim 2017",
"PRA-L11": "Huawei P8 Lite 2017",
"PRA-L03": "Huawei P8 Lite 2017",
"PRA-L02": "Huawei Nova Lite",
"PRA-L01": "Huawei P8 Lite 2017",
"PRA-AL00X": "Huawei Honor 8 Lite",
"PRA-AL00": "Huawei Honor 8 Lite",
"PPAH-TL20": "Huawei P smart 2021",
"PPAH-L22": "Huawei P smart 2021",
"PPAH-L21": "Huawei P smart 2021",
"PPAH-L02": "Huawei P smart 2021",
"PPAH-AL40": "Huawei P smart 2021",
"PPAH-AL20": "Huawei P smart 2021",
"POT-TL00": "Huawei Enjoy 9s",
"POT-LX3": "Huawei P Smart 2019",
"POT-LX2J": "Huawei Nova Lite 3",
"POT-LX1A": "Huawei P Smart 2020",
"POT-LX1": "Huawei P Smart 2019",
"POT-L41B": "Huawei P Smart S",
"POT-L21RU": "Huawei P smart 2019",
"POT-L21": "Huawei P smart 2019",
"POT-L01": "Huawei P smart 2019",
"POT-AL10C": "Huawei enjoy 9s",
"POT-AL00a": "Huawei Enjoy 9S",
"POT-AL00": "Huawei P Smart 2019",
"PLK-UL00IN": "Huawei Honor 7",
"PLK-UL00": "Huawei Honor 7",
"PLK-TL01H": "Huawei Honor 7",
"PLK-TL00": "Huawei Honor 7",
"PLK-L01": "Huawei Honor 7",
"PLK-CL00": "Huawei Honor 7",
"PLK-AL10": "Huawei Honor 7",
"PLE-703L": "Huawei MediaPad M2 Lite",
"PLE-701L": "Huawei MediaPad T2 7.0",
"Pine-UL00": "Huawei Honor 6 plus",
"Pine-TL10": "Huawei Honor 6 Plus",
"Pine-L04": "Huawei Honor 6 Plus",
"Pine-L02": "Huawei Honor 6 Plus",
"Pine-L00": "Huawei Honor 6 Plus",
"PIC-TL00": "Huawei Nova 2",
"PIC-LX9": "Huawei Nova 2",
"PIC-L29": "Huawei Nova 2",
"PIC-L09": "Huawei Nova 2",
"PIC-AL00": "Huawei Nova 2",
"PE-UL00": "Huawei Honor 6 Plus",
"PE-TL20": "Huawei Honor 6 Plus",
"PE-TL10": "Huawei Honor 6 Plus",
"PE-TL00M": "Huawei Honor 6 Plus",
"PE-CL00": "Huawei Honor 6 Plus",
"PCT-TL10": "Honor View 20",
"PCT-L29D": "Honor View 20",
"PCT-L29": "Honor View 20",
"PCT-AL10D": "Honor View 20",
"PCT-AL10": "Honor View 20",
"Paris-L09A": "Huawei Nova 3",
"PAR-TL20": "Huawei Nova 3",
"PAR-TL00": "Huawei nova 3",
"PAR-AL00": "Huawei nova 3",
"P7-L10": "Huawei Ascend P7",
"P7-L09": "Huawei Ascend P7",
"P7-L07": "Huawei Ascend P7",
"P7-L05": "Huawei Ascend P7",
"P6-U06": "Huawei Ascend P6",
"P6-S-U00": "Huawei Ascend P6 S",
"P2-6011": "Huawei Ascend P2",
"OXF-AN10L": "Honor V30 Pro",
"OXF-AN10": "Honor V30 Pro",
"OXF-AN00L": "Honor V30",
"OXF-AN00": "Honor V30",
"OCE-AN50": "Huawei Mate 40E",
"OCE-AN10": "Huawei Mate 40E",
"NXT-TL00": "Huawei Mate 8",
"NXT-L29B": "Huawei Ascend Mate8",
"NXT-L29A": "Huawei Ascend Mate8",
"NXT-L29": "Huawei Mate 8",
"NXT-L09A": "Huawei Ascend Mate8",
"NXT-L09": "Huawei Mate 8",
"NXT-DL00": "Huawei Mate 8",
"NXT-CL00": "Huawei Mate 8",
"NXT-C00": "Huawei Mate 8",
"NXT-AL10": "Huawei Mate 8",
"MAR-L01MEA": "Huawei P30 lite",
"MAR-L01B": "Huawei P30 lite",
"MAR-L01A": "Huawei P30 lite",
"MAR-AL00": "Huawei nova 4e",
"Madrid-L21": "Huawei Y6 (2019)",
"M2-A01w": "Huawei MediaPad M2 10.0",
"M2-A01L": "Huawei MediaPad M2 10.0",
"M2-803L": "Huawei MediaPad M2 8.0",
"M2-802L": "Huawei MediaPad M2 8.0",
"M2-801w": "Huawei MediaPad M2 8.0",
"M2-801L": "Huawei MediaPad M2 8.0",
"LYO-L21": "Huawei Honor 5A",
"LYO-L02": "Huawei Y6 II",
"LYO-L01": "Huawei Y6 II",
"LYA-TL00L": "Huawei Mate 20 Pro",
"LYA-TL00": "Huawei Mate 20 Pro",
"LYA-L29": "Huawei Mate 20 Pro",
"LYA-L09": "Huawei Mate 20 Pro",
"LYA-AL10": "Huawei Mate 20 Pro",
"LYA-AL00P": "Huawei Mate 20 Pro",
"LYA-AL00L": "Huawei Mate 20 Pro",
"LYA-AL00": "Huawei Mate 20 Pro",
"LUA-U23": "Huawei Y3 II",
"LUA-U22": "Huawei Y3 II",
"LUA-U03": "Huawei Y3 II",
"LUA-U02": "Huawei Y3 II",
"LUA-L23": "Huawei Y3 II",
"LUA-L22HN": "Huawei Honor Bee 2",
"LUA-L22": "Huawei Y3 II",
"LUA-L21": "Huawei Y3 II",
"LUA-L13": "Huawei Y3 II",
"LUA-L03": "Huawei Y3 II",
"LUA-L02": "Huawei Y3 II",
"LUA-L01": "Huawei Y3 II",
"LRA-L21B": "Honor 30i",
"LRA-AL00": "Honor 20 lite (China)",
"LON-L29D": "Huawei Mate 9 Pro",
"LON-L29C": "Huawei Mate 9 Pro",
"LON-L29": "Huawei Mate 9 Pro",
"LON-AL00": "Huawei Mate 9 Pro",
"LLD-TL10": "Honor 9 Lite",
"LLD-L31": "Huawei Honor 9 Lite",
"LLD-L21": "Huawei Honor 9 Lite",
"LLD-AL30": "Honor 9N (9i)",
"LLD-AL20": "Honor 9N (9i)",
"LLD-AL10": "Huawei Honor 9 Lite",
"LLD-AL00": "Huawei Honor 9 Lite",
"LIO-TL00": "Huawei Mate 30 Pro",
"LIO-N29": "Huawei Mate 30 RS Porsche Design",
"LIO-L29": "Huawei Mate 30 Pro 5G",
"LIO-AN00P": "Huawei Mate 30 RS Porsche Design",
"LIO-AN00M": "Huawei Mate 30 Pro",
"LIO-AN00": "Huawei Mate 30 Pro 5G",
"LIO-AL00": "Huawei Mate 30 Pro",
"LDN-TL10": "Huawei Y7 Prime 2018",
"LDN-TL00": "Huawei Enjoy 8",
"LDN-LX3": "Huawei Y7 2018",
"LDN-LX2": "Huawei Y7 Prime 2018",
"LDN-L22": "Huawei nova 2 lite",
"LDN-L21": "Huawei Y7 2018",
"LDN-L03": "Huawei Y7 2018",
"LDN-L01": "Huawei Y7 2018",
"LDN-AL00": "Huawei Enjoy 8",
"KSA-L29": "Honor 8S",
"KSA-L22": "Honor 8S",
"KSA-L09": "Honor 8S",
"KSA-AL10": "Honor 8S",
"KSA-AL00": "Honor 8S",
"KRJ-W09": "Honor V6",
"KRJ-AN00": "Honor V6",
"KOB2-W09B": "Huawei MatePad T8",
"KOB2-W09": "Huawei MatePad T8",
"KOB2-L09B": "Huawei MatePad T8",
"KOB2-L09": "Huawei MatePad T8",
"KOB-W09": "HUAWEI MediaPad T3",
"KOB-L09": "Huawei Mediapad T3",
"KNT-UL10": "Huawei Honor V8",
"KNT-TL10": "Huawei Honor V8",
"KNT-C00": "Huawei Honor V8",
"KNT-AL20": "Huawei Honor V8",
"KNT-AL10": "Huawei Honor V8",
"KKG-TN00": "Honor X10 Max 5G",
"KKG-AN00": "Honor X10 Max 5G",
"KIW-UL00": "Huawei Honor 5X",
"KIW-TL00": "Huawei Honor 5X",
"KIW-L24": "Huawei Honor 5X",
"KIW-L22": "Huawei Honor 5X",
"KIW-L21": "Huawei HONOR 5X",
"KIW-CL00": "Huawei Honor 5X",
"KIW-AL10": "Huawei Honor 5X",
"KII-L33": "Huawei GR5",
"KII-L23": "Huawei GR5",
"KII-L22": "Huawei GR5",
"KII-L21": "Huawei GR5",
"KII-L05": "Huawei GR5",
"JSN-TL00": "Honor 8X",
"JSN-L22X": "Honor 8X",
"JSN-L21X": "Honor 8X",
"JSN-L21": "Honor 8X",
"JSN-AL00": "Honor 8X",
"JSC-AN00A": "Huawei nova 8 SE",
"JSC-AN00": "Huawei nova 8 SE",
"JNY-L22": "Huawei nova 7i",
"JNY-L21": "Huawei nova 7i",
"JNY-L01": "Huawei nova 7i",
"JNY-AL10": "Huawei nova 6 SE",
"JMM-TL00": "Huawei Honor 6C Pro",
"JMM-L22": "Huawei Honor 6C Pro",
"JMM-AL00": "Huawei Honor 6C Pro",
"JKM-TL00": "Huawei Y9 (2019)",
"JKM-LX3": "Huawei Y9 (2019)",
"JKM-LX2": "Huawei Y9 (2019)",
"JKM-LX1": "Huawei Y9 (2019)",
"JKM-L21X": "Huawei Y9 (2019)",
"JKM-L21": "Huawei Y9 (2019)",
"JKM-L01X": "Huawei Y9 (2019)",
"JKM-AL20": "Huawei Y9 (2019)",
"JKM-AL10": "Huawei Y9 (2019)",
"JKM-AL00": "Huawei Y9 (2019)",
"Jimmy-TL00": "Huawei Jimmy TL00",
"Jimmy-AL00": "Huawei Jimmy-AL00",
"JER-TN20": "Huawei nova 7 Pro 5G",
"JER-TN10": "Huawei nova 7 Pro 5G",
"JER-AN20": "Huawei nova 7 Pro 5G",
"JER-AN10": "Huawei Nova 7 Pro",
"JEF-TN20": "Huawei nova 7 5G",
"JEF-TN00": "Huawei nova 7 5G",
"JEF-AN20": "Huawei nova 7 5G",
"JEF-AN00": "Huawei Nova 7 Pro",
"JDN2-W09HN": "Honor Tab 5",
"JDN2-W09": "Honor Tab 5",
"JDN2-L09": "Huawei MediaPad M5 Lite 8",
"JDN2-AL50HN": "Huawei MediaPad M5 lite",
"JDN2-AL50": "Huawei MediaPad M5 lite",
"JDN2-AL00HN": "Honor Pad 5 8",
"JDN2-AL00": "Honor Pad 5 8",
"JDN-W09": "Huawei Honor Pad 2",
"JDN-L01": "Huawei MediaPad T2 8.0",
"JDN-AL00": "Huawei Honor Pad 2",
"Jazz-TL10": "Huawei Ascend Mate 7",
"Jazz-L11": "Huawei Ascend Mate 7",
"Jazz-L09": "Huawei Ascend Mate 7",
"Jazz-J1": "Huawei Ascend Mate 7",
"JAT-TL00": "Huawei Honor 8A",
"JAT-L41HW": "Honor 8A Pro",
"JAT-L41": "Honor 8A Pro",
"JAT-L29HW": "Honor Play 8A",
"JAT-L29": "Honor Play 8A",
"JAT-L23HW": "Honor Play 8A",
"JAT-L21AHW": "Honor 8A Pro",
"JAT-AL00": "Honor Play 8A",
"Jakarta-LGRP2": "Huawei Y9 (2019)",
"Jackman-L22": "Huawei Y9 (2019)",
"INE-TL00": "Huawei Nova 3i",
"INE-LX2": "Huawei Nova 3i",
"INE-LX1": "Huawei Nova 3i",
"INE-LGRP1": "Huawei Nova 3i",
"INE-L22rr": "Huawei Nova 3i",
"INE-L22": "Huawei Nova 3i",
"INE-L21": "Huawei Nova 3i",
"INE-AL00": "Huawei nova 3i",
"HWI-TL00": "Huawei Nova 2S",
"HWI-LGRP1": "Huawei Nova 2S",
"HWI-AL00": "Huawei Nova 2s",
"HRY-TL00": "Honor 10 Lite",
"HRY-L21T": "Honor 10 Lite",
"HRY-L21D": "Honor 10 Lite",
"HRY-L21": "Honor 10 Lite",
"HRY-LX1": "Honor 10 Lite",
"HRY-LX2": "Honor 10 Lite",
"HRY-AL00a": "Honor 10 Lite",
"HRY-LX1MEB": "Honor 10 Lite",
"HRY-AL00TA": "Honor 20i",
"HRY-AL00T": "Honor 10 Lite",
"HRY-AL00A": "Honor 10 Lite",
"HRY-AL00": "Honor 10 Lite",
"Holly-U19": "Huawei Holly",
"Holly-U10": "Huawei Holly",
"Holly-U00": "Huawei Honor 3C",
"HMA-TL00": "Huawei Mate 20",
"HMA-L29": "Huawei Mate 20",
"HMA-L09": "Huawei Mate 20",
"HMA-AL00": "Huawei Mate 20",
"HLK-L42": "Honor 9X Pro",
"HLK-L41": "Honor 9X Pro",
"HLK-AL10": "Honor 9X",
"HLK-AL00A": "Honor 9X",
"HLK-AL00": "Honor 9X (China)",
"HDN-W09": "Huawei Honor",
"H60-L12": "Huawei Honor 6",
"H60-L04": "Huawei Honor 6",
"H60-L03": "Huawei Honor 6",
"H60-L02": "Huawei Honor 6",
"H60-L01": "Huawei Honor 6",
"H60-J1": "Huawei Honor 6",
"H30-U10": "Huawei 3C",
"H30-L02": "Huawei Honor 3C",
"H30-L01": "Huawei Honor 3C",
"GRA-UL10": "Huawei P8",
"GRA-UL00": "Huawei P8",
"GRA-TL00": "Huawei P8",
"GRA-L13": "Huawei P8",
"GRA-L09": "Huawei P8",
"GRA-L03": "Huawei P8",
"GRA-CL10": "Huawei P8",
"GRA-CL00": "Huawei P8 Standard Edition",
"GLK-TL00": "Huawei nova 5i",
"GLK-AL00": "Huawei nova 5i",
"GEM-703L": "HUAWEI Honor X2",
"GEM-703": "Huawei MediaPad X2",
"GEM-702L": "Huawei MediaPad X2",
"GEM-702": "Huawei MediaPad X2",
"GEM-701L": "Huawei MediaPad X2",
"GEM-701": "Huawei MediaPad X2",
"G760-TL00": "Huawei Ascend G7",
"G760-L03": "Huawei Ascend G7",
"G760-L01": "Huawei Ascend G7",
"G750-U10": "Huawei Honor 3X",
"G750-T20": "Huawei Honor 3X",
"G750-C00": "Huawei Honor 3X",
"G740-L00": "Huawei G740",
"G735-L23": "Huawei G Play",
"G735-L12": "Huawei G Play",
"G735-L03": "Huawei G Play",
"G730-U251": "Huawei G730",
"G730-U10": "Huawei G730",
"G700-U20": "Huawei Ascend G700",
"G700-U10": "Huawei Ascend G700",
"G7-L11": "Huawei Ascend G7",
"G7-L01": "Huawei Ascend G7",
"G630-U251": "Huawei G630",
"G630-U20": "Huawei G630",
"G630-U10": "Huawei G630",
"G630-U00": "Huawei G630",
"G629-UL00": "Huawei G629",
"G628-TL00": "Huawei Ascend G628",
"G620S-UL00": "Huawei Ascend G620s",
"G620S-L03": "Huawei Ascend G620s",
"G620S-L02": "Huawei Ascend G620s",
"G620S-L01": "Huawei Ascend G620s",
"G620-UL01": "Huawei G620",
"G620-L75": "Huawei Ascend G620s",
"G620-L72": "Huawei G620",
"G615-U10": "Huawei Ascend G615",
"G610-U20": "Huawei G610",
"G610-U15": "Huawei G610",
"G610-U00": "Huawei Ascend G6",
"G6-U251": "Huawei Ascend G6",
"G6-U10": "Huawei Ascend G6",
"G6-L33": "Huawei Ascend G6",
"G6-L22": "Huawei Ascend G6",
"G6-L11": "Huawei Ascend G6",
"G527-U081": "Huawei Ascend G527",
"G526-L33": "Huawei Ascend G526",
"G525-U00": "Huawei Ascend G525",
"G510-0251": "Huawei Ascend G510",
"G510-0200": "Huawei Ascend G510",
"G510-0100": "Huawei Ascend G510",
"FRLM-TN00": "Huawei Enjoy 20 SE",
"FRLM-L22": "Huawei Enjoy 20 SE",
"FRLM-L03": "Huawei Enjoy 20 SE",
"FRLM-AN00A": "Huawei Enjoy 20 SE",
"FRD-L19": "Huawei Honor 8",
"FRD-L14": "Huawei Honor 8",
"FRD-L09": "Huawei HONOR 8",
"FRD-L04": "Huawei Honor 8",
"FRD-L02": "Huawei HONOR 8",
"FRD-DL00": "Huawei MediaPad T2 10.0 Pro",
"FRD-C00": "Huawei Honor 8",
"FRD-AL10": "Huawei Honor 8",
"FRD-AL00": "Huawei Honor 8",
"FLA-TL10": "Huawei Y9 (2018)",
"FLA-AL20": "Huawei Y9 2018",
"FLA-AL10": "Huawei Y9 2018",
"FLA-AL00": "Huawei Y9 2018",
"Figo-L31": "Huawei P Smart",
"FIG-TL10": "Huawei Enjoy 7S Dual",
"FIG-TL00": "Huawei P smart",
"FIG-LX3": "Huawei P Smart",
"FIG-LX2": "Huawei P Smart",
"FIG-LX1": "Huawei P Smart Dual SIM",
"FIG-LA1": "Huawei P Smart",
"FIG-L31": "Huawei P Smart",
"FIG-L22": "Huawei P Smart",
"FIG-L21": "Huawei P Smart",
"FIG-L11": "Huawei P Smart",
"FIG-L03": "Huawei P Smart",
"FIG-L02": "Huawei P Smart",
"FIG-AL10": "Huawei Enjoy 7S",
"FIG-AL00": "Huawei P smart",
"FDR-A05": "Huawei MediaPad T2 10.0 Pro",
"FDR-A04": "Huawei MediaPad T2 10.0 Pro",
"FDR-A03L": "Huawei M2",
"FDR-A03": "Huawei MediaPad T2 10.0 Pro",
"FDR-A01w": "Huawei MediaPad T2 10.0 Pro",
"FDR-A01": "Huawei MediaPad T2 10.0 Pro",
"EVR-TL00": "Huawei Mate 20 X",
"EVR-N29": "Huawei Mate 20 X (5G)",
"EVR-L29": "Huawei Mate 20 X",
"EVR-AN00": "Huawei Mate 20 X (5G)",
"EVR-AL00": "Huawei Mate 20 X",
"EVA-L29": "Huawei P9",
"EVA-L19": "Huawei P9",
"EVA-L09": "Huawei P9",
"EVA-DL00": "Huawei P9",
"EVA-CL00": "Huawei P9",
"EVA-C00": "Huawei P9",
"EVA-AL10": "Huawei P9",
"EVA-AL00": "Huawei P9",
"EML-L29": "Huawei P20",
"EML-L09": "Huawei P20",
"EML-AL00": "Huawei P20",
"ELS-TN00": "Huawei P40 Pro",
"ELS-N39": "Huawei P40 Pro+",
"ELS-N29": "Huawei P40 Pro+",
"ELS-N04": "Huawei P40 Pro",
"ELS-AN10": "Huawei P40 Pro+",
"ELS-AN00": "Huawei P40 Pro",
"ELE-TL00": "Huawei P30",
"ELE-L29": "Huawei P30",
"ELE-L09": "Huawei P30",
"ELE-L04": "Huawei P30",
"ELE-AL00": "Huawei P30",
"EDI-AL10": "Huawei Honor Note 8",
"EDGE-U00": "Huawei Ascend P6",
"EDGE-C00": "Huawei Ascend P6",
"EBG-TN00": "Honor 30 Pro",
"EBG-N19": "Honor 30 Pro+",
"EBG-AN10": "Honor 30 Pro+",
"EBG-AN00": "Honor 30 Pro",
"DVCM-TN20": "",
"DVCM-AN20": "Huawei Enjoy 20 Pro",
"DVCM-AN00": "Huawei Enjoy 20 Pro",
"DUK-TL30": "Huawei Honor V9",
"DUK-L09": "Huawei Honor 8 Pro",
"DUK-AL30": "Huawei Honor V9",
"DUK-AL20": "Huawei Honor V9",
"DUB-LX3": "Huawei Y7 Prime 2019",
"DUB-LX1": "Huawei Y7 Prime 2019",
"DUB-L01": "Huawei Y7 2019",
"DUB-AL00": "Huawei Enjoy 9",
"DUA-L29": "Honor 9S",
"DRA-LX5": "Huawei Y5 Lite (2018)",
"DRA-L29": "Huawei Y5p",
"DRA-L21": "Huawei Y5 Prime 2018",
"DRA-L09": "Huawei Y5p",
"DNN-L29": "Honor 10X Lite",
"Diego-TL10": "Huawei Enjoy 6S",
"Diego-L23": "Huawei Diego-L23",
"Diego-L21": "Huawei Honor 6C",
"Diego-L03": "Huawei Diego-L03",
"Diego-L01": "Huawei Diego-L01",
"Diego-AL00": "Huawei Diego-AL00",
"Delhi-TL20": "Huawei Honor 6A",
"Delhi-L42": "Huawei Honor 6A",
"Delhi-L22": "Huawei Honor 6A",
"Delhi-AL10": "Huawei Honor 6A",
"DAV-703": "Huawei P8 MAX",
"DAV-702L": "Huawei P8 max",
"DAV-701L": "Huawei P8 max",
"D2-0082": "Huawei Ascend D2",
"CUN-U29": "Huawei Y5 II",
"CUN-TL00": "Huawei Honor 5",
"CUN-L33": "Huawei Y5 II",
"CUN-L23": "Huawei Y5 II",
"CUN-L22": "Huawei Y5 II",
"CUN-L21": "Huawei Y5 II",
"CUN-L03": "Huawei Y5 II",
"CUN-L02": "Huawei Y5 II",
"CUN-L01": "Huawei Y5 II",
"CUN-AL00": "Huawei Honor 5",
"CRR-UL20": "Huawei Mate S",
"CRR-UL00": "Huawei Mate S",
"CRR-TL00": "Huawei Mate S",
"CRR-L13": "Huawei Mate S",
"CRR-L09": "Huawei Mate S",
"CRR-CL20": "Huawei Mate S",
"CRR-CL00": "Huawei Mate S",
"CRO-UL00": "Huawei Y3 2017",
"CRO-L03": "Huawei Y3 2017",
"CRO-L02": "Huawei Y3 2017",
"CPN-W09": "Huawei M3 Lite",
"CPN-L09": "Huawei MediaPad M3 Lite",
"CPN-AL00": "Huawei M3 Lite",
"COR-TL10": "Honor Play",
"COR-AL10": "Honor Play",
"COR-AL00": "Honor Play",
"COL-TL10": "Huawei Honor 10",
"COL-TL00": "Huawei Honor 10",
"COL-L29": "Huawei Honor 10",
"COL-AL10": "Honor 10",
"CND-AN00": "Huawei nova 7 SE 5G Youth",
"CMR-W19": "Huawei MediaPad M5 Pro 10.8",
"CMR-W09TWN": "Huawei MediaPad M5",
"CMR-W09": "Huawei MediaPad M5 10.8",
"CMR-AL19": "Huawei MediaPad M5 Pro 10.8",
"CMR-AL09": "Huawei MediaPad M5 10.8",
"CM990": "Huawei CM990",
"CLT-TL00": "Huawei P20 Pro",
"CLT-L29": "Huawei P20 Pro Dual SIM",
"CLT-L09": "Huawei P20 Pro Dual SIM",
"CLT-L04": "Huawei P20 Pro Dual SIM",
"CLT-AL01": "Huawei P20 Pro Dual SIM",
"CLT-AL00": "Huawei P20 Pro Dual SIM",
"CHM-UL00": "Huawei Honor 4C",
"CHM-U01": "Huawei Honor 4C",
"CHM-TL00H": "Huawei Honor 4C",
"CHM-TL00": "Huawei Honor 4C",
"CHM-CL00": "Huawei Honor 4C",
"CHL-AL60CH": "Huawei nova 8 SE",
"CherryPlus-TL00": "Huawei Honor 4X",
"CherryPlus-L23": "Huawei Honor 4X",
"CherryPlus-L12": "Huawei Honor 4X LTE",
"CherryPlus-L11": "Huawei HONOR 4X",
"Cherry-L04": "Huawei Honor 4X",
"Cherry-CL20": "Huawei Honor 4X",
"Cherry-CL10": "Huawei Honor 4X",
"CHE2-L12": "Huawei Honor 4X",
"Che2-L11": "Huawei Honor 4X",
"CHE1-L04": "Huawei Honor 4X",
"CHE1-CL20": "Huawei Honor 4X",
"CHE1-CL10": "Huawei Honor 4X",
"CHE-TL00H": "Huawei Honor 4x",
"CHE-TL00": "Huawei Honor 4X",
"Che-L11": "Huawei Honor 4X",
"CHC-U23": "Huawei G Play Mini",
"CHC-U03": "Huawei G Play mini",
"CHC-U01": "Huawei G Play Mini",
"CDY-TN90": "Honor 30S",
"CDY-TN20": "Huawei nova 7 SE",
"CDY-TN00": "Huawei nova 7 SE",
"CDY-N29H": "Huawei nova 7 SE",
"CDY-N29B": "Huawei nova 7 SE",
"CDY-N29": "Huawei nova 7 SE",
"CDY-AN95": "Huawei nova 7 SE",
"CDY-AN90": "Honor 30S",
"CDY-AN20": "Huawei nova 7 SE",
"CDY-AN00": "Huawei nova 7 SE",
"CDL-AN50": "Huawei nova 7 SE",
"CAZ-TL20": "Huawei Nova",
"CAZ-TL10": "Huawei Nova",
"CAZ-AL10": "Huawei Nova",
"Cannes-L12": "Huawei Nova",
"Cannes-L11": "Huawei Nova",
"Cannes-L01": "Huawei Nova",
"Cannes-AL10": "Huawei Nova Cannes-AL10",
"CAN-L13": "Huawei Nova",
"CAN-L12": "Huawei Nova",
"CAN-L11": "Huawei nova",
"CAN-L03": "Huawei Nova",
"CAN-L01": "Huawei Nova",
"Cameron-W19": "Huawei MediaPad M5 Pro 10.8",
"CAM-UL00": "Huawei Honor 5A",
"CAM-TL00": "Huawei Honor 5A",
"CAM-L23": "Huawei Y6 II",
"CAM-L21": "Huawei Y6 II",
"CAM-L03": "Huawei Y6 II Compact",
"CAM-AL00": "Huawei Honor 5A",
"CairoGO-L22": "Huawei CairoGO-L22",
"CairoGO-L02": "Huawei Y3 2018",
"Cairo-U00": "Huawei Cairo-U00",
"Cairo-L23": "Huawei Cairo-L23",
"Cairo-L22": "Huawei Cairo-L22",
"Cairo-L03": "Huawei Cairo-L03",
"Cairo-L02": "Huawei Cairo-L02",
"CAG-L02": "Huawei Y3 2018",
"C8860V": "Huawei Honor",
"C8817E": "Huawei C8817E",
"C8817D": "Huawei Honor 6 Pro",
"C8816D": "Huawei C8816D",
"C8816": "Huawei C8816",
"C199s": "Huawei C199S",
"BZT3-W59": "Huawei C5 10.4",
"BZT3-W09": "",
"BZT3-AL00": "Honor 5c",
"BZT-W09": "Huawei MediaPad C5 10.1",
"BZD-W00": "Huawei MediaPad C3",
"BZD-AL00": "Huawei MediaPad C3",
"BZC-W00": "",
"BZC-AL00": "",
"BTV-W09": "Huawei M3",
"BTV-DL09": "Huawei MediaPad M3",
"BRQ-AN00CG": "Huawei nova 8 Pro 4G",
"BRQ-AN00": "Huawei nova 8 Pro 5G",
"BRQ-AL00": "Huawei nova 8 Pro 5G",
"Bond-L24": "Huawei Honor 7X",
"BOND-L21": "Huawei Honor 7X",
"BND-TL10": "Huawei Honor 7X",
"BND-L34": "Huawei Mate SE",
"BND-L31A": "Huawei Honor 7X",
"BND-L31": "Huawei Honor 7X",
"BND-L24A": "Huawei Honor 7x",
"BND-L21": "Huawei Honor 7X",
"BND-AL10": "Huawei Honor 7X",
"BND-AL00": "Huawei Honor 7X",
"BMH-TN10": "Honor 30",
"BMH-N19": "Honor 30",
"BMH-AN20": "Honor 30",
"BMH-AN10": "Honor 30",
"BLN-TL10": "Huawei Honor 6X",
"BLN-TL00": "Huawei Honor 6X",
"BLN-L24": "Huawei Honor 6X",
"BLN-L22HN": "Huawei Honor 6X",
"BLN-L22": "Huawei Honor 6X",
"BLN-L21": "Huawei Honor 6X",
"BLN-AL40": "Huawei Honor 6X",
"BLN-AL30": "Huawei Honor 6X",
"BLN-AL20": "Huawei Honor 6X",
"BLN-AL10": "Huawei Honor 6X",
"BLL-L23": "Huawei Mate 9 Lite",
"BLL-L22": "Huawei GR5 2017",
"BLL-L21": "Huawei GR5 2017",
"BLA-TL00": "Huawei Mate 10 Pro",
"BLA-L29": "Huawei Mate 10 Pro",
"BLA-L09": "Huawei Mate 10 pro",
"BLA-AL00": "Huawei Mate 10 pro",
"BLA-A09": "Huawei Mate 10 pro",
"BKL-TL10": "Huawei Honor View 10",
"BKL-L09": "Huawei Honor View 10 Global",
"BKL-L04": "Huawei Honor View 10",
"BKL-AL20": "Huawei Honor V10",
"BKL-AL00": "Huawei Honor V10",
"BKK-TL00": "Huawei Honor 8C",
"BKK-L22": "Huawei Honor 8C",
"BKK-L21": "Huawei Honor 8C",
"BKK-AL10": "Huawei Honor 8C",
"BKK-AL00": "Honor 8C",
"BGO-L03": "Huawei MediaPad T2 7.0",
"BGO-DL09": "Huawei MediaPad T2 7.0",
"BG2-W09": "Huawei MediaPad T3",
"BG2-U03": "Huawei MediaPad T3",
"BG2-U01": "Huawei MediaPad T3 7 3G",
"Berlin-L23": "Huawei Honor 6X",
"Berlin-L22": "Huawei GR5 2017",
"Berlin-L21HN": "Huawei Honor 6X",
"Berlin-L21": "Huawei Honor 6X",
"Berkeley-LGRP2": "Huawei Honor V10",
"Barca-L22": "Huawei Barca-L22",
"Barca-L21": "Huawei Nova 2 Plus",
"Barca-L03": "Huawei Nova 2 Plus",
"BAH3-W59": "Huawei MatePad 10.4",
"BAH3-W09": "Huawei MatePad 10.4",
"BAH3-L09": "Huawei MatePad 10.4",
"BAH3-AN10": "Huawei MatePad 5G",
"BAH3-AL00": "Huawei MatePad 10.4",
"BAH2-W19": "Huawei MediaPad M5 lite",
"BAH2-W09": "Huawei MediaPad M5 lite",
"BAH2-L09": "Huawei MediaPad M5 Lite",
"BAH2-AL10": "Huawei MediaPad M5 lite",
"BAH-W09": "Huawei M3 Lite",
"BAH-L09": "Huawei MediaPad M3 Lite 10",
"BAH-AL00": "Huawei M3 Lite",
"BAC-TL00": "Huawei nova 2 plus",
"BAC-L23": "Huawei nova 2 plus",
"BAC-L22": "Huawei nova 2 plus",
"BAC-L21": "Huawei nova 2 plus",
"BAC-L03": "Huawei nova 2 plus",
"BAC-AL00": "Huawei Nova 2 Plus",
"AUM-L41": "Huawei Honor 7C (Enjoy 8)",
"AUM-L29": "Huawei Honor 7A Pro",
"ATU-LX3": "Huawei Y6 2018",
"ATU-L42": "Huawei Y6 Prime 2018",
"ATU-L22": "Huawei Y6 2018",
"ATU-L21": "Huawei Y6 2018",
"ATU-L11": "Huawei Y6 2018",
"ATU-L03": "Huawei Y6 2018",
"ATU-AL10": "Huawei Enjoy 8e",
"Atomu-L21": "Huawei Y6 Prime 2018",
"Atomu-L03": "Huawei Honor 7A",
"Atomu-AL20IND": "Huawei Honor 7A",
"ATH-UL06": "Huawei ShotX",
"ATH-UL01": "Huawei ShotX",
"ATH-UL00": "Huawei Honor 7i",
"ATH-TL00": "Huawei Honor 7i",
"ATH-CL00": "Huawei Honor 7i",
"ATH-AL00": "Huawei Honor 7i",
"ASKH-TL00": "Honor Play 3",
"ASKH-AL00": "Honor Play 3",
"ARTH-TL00": "Huawei Enjoy 10",
"ARTH-L29N": "Huawei Y7p",
"ARTH-L29": "Huawei Y7p",
"ARTH-L28": "Huawei Y7p",
"ARTH-L09": "Huawei Enjoy 10",
"ARTH-L08": "Huawei Enjoy 10",
"ARTH-AL00M": "Huawei Enjoy 10",
"ARTH-AL00": "Huawei Enjoy 10",
"ARS-TL00": "Huawei Enjoy 9 Max",
"ARS-L22": "Huawei Y Max",
"Ares-L22HW": "Huawei Y Max",
"ARE-TL00": "Huawei Honor 8X Max",
"ARE-L22HN": "Huawei Honor 8X Max",
"AQM-TL00": "Huawei Enjoy 10s",
"AQM-L21A": "Huawei Y8P",
"AQM-L01": "Huawei Y8p",
"AQM-AL10HN": "Honor Play 4T Pro",
"AQM-AL00": "Huawei Enjoy 10s",
"ANG-AN00": "Huawei nova 8 5G",
"ANE-TL00": "Huawei P20 lite",
"ANE-LX3": "Huawei P20 Lite",
"ANE-LX2JOT": "Huawei P20 Lite",
"ANE-LX2J": "Huawei P20 Lite",
"ANE-LX2": "Huawei Nova 3e",
"ANE-LX1": "Huawei P20 Lite",
"ANE-LGRP1": "Huawei P20 Lite",
"ANE-L21": "Huawei P20 Lite",
"ANE-L12JPZ": "Huawei Nova 3e",
"ANE-L12": "Huawei Nova 3e",
"ANE-L03": "Huawei Nova 3e",
"ANE-L02J": "Huawei Nova 3e",
"ANE-L02": "Huawei Nova 3e",
"ANE-AL00I": "Huawei P20 Lite",
"ANE-AL00": "Huawei P20 Lite",
"ANA-TN00": "Huawei P40",
"ANA-N29": "Huawei P40",
"ANA-L04": "Huawei P40",
"ANA-AN00": "Huawei P40",
"ANA-AL00": "Huawei P40 4G",
"AMN-L29": "Huawei Y5 (2019)",
"AMN-L22": "Huawei Y5 (2019)",
"AMN-L09": "Huawei Y5 (2019)",
"ALP-TL00ZZB51": "Huawei Mate 10",
"ALP-TL00B": "Huawei Mate 10",
"ALP-TL00": "Huawei Mate 10",
"ALP-LGRP2": "Huawei Mate 10",
"ALP-LGRP1": "Huawei Mate 10",
"ALP-L29": "Huawei Mate 10",
"ALP-L09": "Huawei Mate 10",
"ALP-AL00ZZB54": "Huawei Mate 10",
"ALP-AL00ZZB02": "Huawei Mate 10",
"ALP-AL00": "Huawei Mate 10",
"ALE-TL00": "Huawei P8 Lite",
"ALE-L32": "Huawei P8 Lite",
"ALE-L23URY": "Huawei P8 Lite",
"ALE-L23": "Huawei P8 Lite",
"ALE-L21TUR": "Huawei P8 Lite",
"ALE-L21S": "Huawei P8 Lite",
"ALE-L21POL": "Huawei P8 Lite",
"ALE-L21MKD": "Huawei P8 Lite",
"ALE-L21HUN": "Huawei P8 Lite",
"ALE-L21HR": "Huawei P8 Lite",
"ALE-L21GR": "Huawei P8 Lite",
"ALE-L21FRA": "Huawei P8 Lite",
"ALE-L21DEU": "Huawei P8 Lite",
"ALE-L21AUT": "Huawei P8 Lite",
"ALE-L21": "Huawei P8 Lite",
"ALE-L03": "Huawei P8 Lite",
"ALE-L02": "Huawei P8 Lite",
"ALE-L01": "Huawei P8 Lite",
"ALE-CL00": "Huawei P8 Lite",
"AKA-L29": "Honor Play 4T",
"AKA-AL20": "Honor Play 4T",
"AKA-AL10": "Honor Play 4T",
"AGS3K-W10": "Huawei MatePad T 10s",
"AGS3K-W09": "Huawei MatePad T 10s",
"AGS3K-L09": "Huawei MatePad T 10s",
"AGS3-W09HN": "Huawei Enjoy Tablet 2",
"AGS3-W09": "Huawei MatePad T 10s",
"AGS3-W00E": "Huawei Enjoy Tablet 2",
"AGS3-W00D": "Huawei Enjoy Tablet 2",
"AGS3-W00B": "Huawei Enjoy Tablet 2",
"AGS3-L09": "Huawei MatePad T 10s",
"AGS3-AL09HN": "Huawei Enjoy Tablet 2",
"AGS3-AL00": "Huawei Enjoy Tablet 2",
"AGS2-W09HN": "Huawei MediaPad T5",
"AGS2-W09AUS": "Huawei MediaPad T5",
"AGS2-W09": "Huawei MediaPad T5",
"AGS2-L09": "Huawei MediaPad T5",
"AGS2-L03": "Huawei MediaPad T5",
"AGS2-AL00HN": "Huawei MediaPad T5",
"AGS2-AL00": "Honor Pad 5 10.1",
"AGS-W09": "Huawei MediaPad T3 10",
"AGS-L09": "Huawei MediaPad T3 10",
"AGRK-W09K": "Huawei MatePad T 10s",
"AGRK-W09": "Huawei AGRK-W09",
"AGRK-L09K": "Huawei MatePad T 10s",
"AGRK-L09": "Huawei MatePad T 10s",
"AGR-W09K": "Honor Pad X6",
"AGR-W09HN": "Huawei Enjoy Tablet 2",
"AGR-W09": "Honor Pad X6",
"AGR-L09": "Huawei MatePad T 10s",
"AGR-AL09HN": "Honor Pad X6",
"7D-504L": "Huawei MediaPad X1 7.0",
"7D-501u": "Huawei MediaPad X1 7.0",
"7D-501L": "Huawei MediaPad X1 7.0",
"704HW": "Huawei Nova Lite 2",
"608HW": "Huawei nova lite",
"NOP-AN01P": "Huawei Mate 40 Pro+",
"NOP-AN00P": "Huawei Mate 40 Pro+",
"NOP-AN00": "Huawei Mate 40 Pro+",
"NOH-N29": "Huawei Mate 40 Pro",
"NOH-AN01": "Huawei Mate 40 Pro",
"NOH-AN00": "Huawei Mate 40 Pro",
"NMO-L31": "Huawei GT3",
"NMO-L22": "Huawei GT3",
"NMO-L02": "Huawei NMO-L02",
"NICE-TL10": "Huawei Nice-TL10",
"NICE-AL10": "Huawei Nice-AL10",
"NICE-AL00": "Huawei Nice-AL00",
"NEO-L29": "Huawei Mate RS",
"NEN-L23CQ": "Huawei nova 8 5G",
"NEN-L22CQ": "Huawei nova 8 5G",
"NEN-L21CQ": "Huawei nova 8 5G",
"NEN-L03CQ": "Huawei nova 8 5G",
"NEN-L01CQ": "Huawei nova 8 5G",
"NEM-UL10": "Huawei Honor 5C",
"NEM-TL00": "Huawei Honor 5C",
"NEM-L51": "Huawei Honor 5C",
"NEM-L22": "Huawei Honor 5C",
"NEM-L21": "Huawei HONOR 7 Lite",
"NEM-AL10": "Huawei Honor 5C",
"MXWM-TN00": "Honor 30 Youth",
"MXWM-AN00": "Honor 30 Youth",
"MT7-UL00": "Huawei Ascend Mate 7",
"MT7-TL10": "Huawei Ascend Mate7",
"MT7-TL00": "Huawei Mate 7",
"MT7-L11": "Huawei Ascend Mate7",
"MT7-L09": "Huawei Ascend Mate7",
"MT7-J1": "Huawei Ascend Mate 7",
"MT7-CL00": "Huawei Ascend Mate 7",
"MT2-L05": "Huawei Ascend Mate2",
"MT1-U06": "Huawei Ascend Mate7",
"MT-L09": "Huawei Ascend Mate7",
"MRX-W39": "Huawei MatePad Pro",
"MRX-W29": "Huawei MatePad Pro",
"MRX-W19": "Huawei MatePad Pro",
"MRX-W09": "Huawei MatePad Pro",
"MRX-AN19": "Huawei MatePad Pro 5G",
"MRX-AL19": "Huawei MatePad Pro",
"MRX-AL09": "Huawei MatePad Pro",
"MRD-TL00": "Huawei Enjoy 9e",
"MRD-LX3": "Huawei Y6 2019",
"MRD-L41A": "Huawei Y6 (2019)",
"MRD-L41": "Huawei Y6 2019",
"MRD-L23": "Huawei Y6 2019",
"MRD-L22": "Huawei Y6 Pro (2019)",
"MRD-L21A": "Huawei Y6 Pro (2019)",
"MRD-L21": "Huawei Y6 2019",
"MRD-L11": "Huawei Y6 2019",
"MRD-L01": "Huawei Y6 2019",
"MRD-AL00": "Huawei Enjoy 9e",
"MOA-TL00": "Honor Play 9A",
"MOA-L49I": "Honor Play 9A",
"MOA-L49": "Honor 9A",
"MOA-AL20": "Honor Play 9A",
"MOA-AL00": "Honor Play 9A",
"MLA-UL00": "Huawei G9 Plus",
"MLA-TL10": "Huawei G9 Plus",
"MLA-TL00": "Huawei G9 Plus",
"MLA-L13": "Huawei nova plus",
"MLA-L12": "Huawei nova plus",
"MLA-L11": "Huawei nova plus",
"MLA-L03": "Huawei Nova plus",
"MLA-L02": "Huawei Nova Plus",
"MLA-L01": "Huawei Nova Plus",
"MLA-AL10": "Huawei Nova Plus",
"MLA-AL00": "Huawei Maimang 5",
"MHA-TL00": "Huawei Mate 9",
"MHA-L29": "Huawei Mate 9",
"MHA-L09": "Huawei Mate 9",
"MHA-AL00": "Huawei Mate 9 Pro",
"MED-TL00": "Huawei Enjoy 10",
"MED-L49": "Huawei Y6p",
"MED-L29II": "Honor 9A",
"MED-L29": "Honor 9A",
"MED-L09": "Huawei Y6p",
"MED-AL20": "Honor Play 9A",
"MED-AL10": "Honor Play 9A",
"MED-AL00": "Honor Play 9A",
"Maya-U29": "Huawei Honor Maya",
"Maya-TL10": "Huawei Honor Maya",
"Maya-L41": "Huawei Y6 2017",
"Maya-L13": "Huawei Honor Maya",
"Maya-L11": "Huawei Y6 2017",
"Maya-L03": "Huawei Maya L03",
"Maya-AL10": "Huawei Honor Maya",
"MAR-TL00": "Huawei nova 4e",
"MAR-L22BX": "Huawei P30 lite",
"MAR-L22B": "Huawei P30 lite",
"MAR-L22A": "Huawei P30 lite",
"MAR-L21MEB": "Huawei P30 lite",
"MAR-L21MEA": "Huawei P30 lite",
"MAR-L21H": "Huawei P30 lite",
"MAR-L21B": "Huawei P30 lite",
"MAR-L21A": "Huawei P30 lite",
"MAR-L03A": "Huawei P30 lite",
"MAR-L01MEB": "Huawei P30 lite",
'2014215': 'Xiaomi Mi 4',
'2014712': 'Xiaomi Redmi Note',
'2014817': 'Xiaomi Redmi 2',
'2014818': 'Xiaomi Redmi 2',
'2015015': 'Xiaomi Mi 4i',
'2015051': 'Xiaomi Redmi Note 2',
'2015105': 'Xiaomi Mi 5',
'2015116': 'Xiaomi Redmi Note 3',
'2015161': 'Xiaomi Redmi Note 3',
'2015213': 'Xiaomi Mi Note 2',
'2015711': 'Xiaomi Mi 5s',
'2015816': 'Xiaomi Redmi 3',
'2016001': 'Xiaomi Mi Max',
'2016002': 'Xiaomi Mi Max',
'2016007': 'Xiaomi Mi Max',
'2016031': 'Xiaomi Redmi 3s',
'2016060': 'Xiaomi Redmi 4 (4X)',
'2016070': 'Xiaomi Mi 5s Plus',
'2016090': 'Xiaomi Redmi 4 (4X)',
'2016100': 'Xiaomi Redmi Note 4',
'2016117': 'Xiaomi Redmi 4A',
'AWM-A0': 'Xiaomi Black Shark Helo',
'DLT-A0': 'Xiaomi Black Shark 2 Pro',
'DLT-H0': 'Xiaomi Black Shark 2 Pro',
'M1803D5XA': 'Xiaomi Mi Mix 2S',
'M1803E1A': 'Xiaomi Mi 8',
'M1803E6G': 'Xiaomi Redmi S2 (Redmi Y2)',
'M1803E6H': 'Xiaomi Redmi S2 (Redmi Y2)',
'M1803E6I': 'Xiaomi Redmi S2 (Redmi Y2)',
'M1803E7SG': 'Xiaomi Redmi Note 5 AI Dual Camera',
'M1803E7SH': 'Xiaomi Redmi Note 5 AI Dual Camera',
'M1804C3CG': 'Xiaomi Redmi 6A',
'M1804C3CH': 'Xiaomi Redmi 6A',
'M1804C3CI': 'Xiaomi Redmi 6A',
'M1804C3DG': 'Xiaomi Redmi 6',
'M1804C3DH': 'Xiaomi Redmi 6',
'M1804C3DI;': 'Xiaomi Redmi 6',
'M1804D2SG': 'Xiaomi Mi A2 (Mi 6X)',
'M1804D2SI': 'Xiaomi Mi A2 (Mi 6X)',
'M1804E4A': 'Xiaomi Mi Max 3',
'M1805D1SG': 'Xiaomi Mi A2 Lite (Redmi 6 Pro)',
'M1805E10A': 'Xiaomi Pocophone F1',
'M1806E7TG': 'Xiaomi Redmi Note 6 Pro',
'M1806E7TH': 'Xiaomi Redmi Note 6 Pro',
'M1806E7TI': 'Xiaomi Redmi Note 6 Pro',
'M1807E8A': 'Xiaomi Mi 8 Pro',
'M1808D2TG': 'Xiaomi Mi 8 Lite',
'M1810F6LG': 'Xiaomi Redmi 7',
'M1810F6LH': 'Xiaomi Redmi 7',
'M1810F6LI': 'Xiaomi Redmi 7',
'M1901F71': 'Xiaomi Redmi Note 7S',
'M1901F7G': 'Xiaomi Redmi Note 7',
'M1901F7H': 'Xiaomi Redmi Note 7',
'M1901F7I': 'Xiaomi Redmi Note 7',
'M1901F7S': 'Xiaomi Redmi Note 7 Pro',
'M1901F9E': 'Xiaomi Mi Play',
'M1902F1G': 'Xiaomi Mi 9',
'M1903C3EG': 'Xiaomi Redmi 7A',
'M1903C3EH': 'Xiaomi Redmi 7A',
'M1903C3EI': 'Xiaomi Redmi 7A',
'M1903C3GG': 'Xiaomi Redmi Go',
'M1903C3GH': 'Xiaomi Redmi Go',
'M1903C3GI': 'Xiaomi Redmi Go',
'M1903F10G': 'Xiaomi Mi 9T',
'M1903F10I': 'Xiaomi Redmi K20',
'M1903F11G': 'Xiaomi Mi 9T Pro',
'M1903F2G': 'Xiaomi Mi 9 SE',
'M1904F3BG': 'Xiaomi Mi 9 Lite',
'M1906F9SH': 'Xiaomi Mi A3',
'M1906F9SI': 'Xiaomi Mi A3',
'M1906G7G': 'Xiaomi Redmi Note 8 Pro',
'M1906G7I': 'Xiaomi Redmi Note 8 Pro',
'M1908C3JG': 'Xiaomi Redmi Note 8',
'M1908C3JH': 'Xiaomi Redmi Note 8',
'M1908C3JI': 'Xiaomi Redmi Note 8',
'M1908C3KG': 'Xiaomi Redmi 8A',
'M1908C3KH': 'Xiaomi Redmi 8A',
'M1908C3XG': 'Xiaomi Redmi Note 8T',
'M1910F4E': 'Xiaomi Mi CC9 Pro',
'M1910F4G': 'Xiaomi Mi Note 10 Lite',
'M1910F4S': 'Xiaomi Mi Note 10 Pro',
'M1912G7BC': 'Xiaomi Redmi K30',
'M1912G7BE': 'Xiaomi Redmi K30',
'M2001C3K3I': 'Xiaomi Redmi 8A Dual',
'M2001J1G': 'Xiaomi Mi 10 Pro 5G',
'M2001J2G': 'Xiaomi Mi 10 5G',
'M2001J2I': 'Xiaomi Mi 10 5G',
'M2002F4LG': 'Xiaomi Mi Note 10 Lite',
'M2002J9E': 'Xiaomi Mi 10 Youth 5G',
'M2002J9G': 'Xiaomi Mi 10 Lite 5G',
'M2003J15SC': 'Xiaomi Redmi 10X 4G',
'M2003J15SG': 'Xiaomi Redmi Note 9',
'M2003J15SS': 'Xiaomi Redmi Note 9',
'M2003J6A1G': 'Xiaomi Redmi Note 9S',
'M2003J6A1I': 'Xiaomi Redmi Note 9 Pro (India)',
'M2003J6B1I': 'Xiaomi Redmi Note 9 Pro Max',
'M2004C3MI': 'Xiaomi Redmi 9 (India)',
'M2004J11G': 'Xiaomi Poco F2 Pro',
'M2004J19C': 'Xiaomi Redmi 9',
'M2004J19G': 'Xiaomi Redmi 9',
'M2010J19SI': 'Xiaomi Redmi 9 Power',
'M2004J19PI': 'Xiaomi Poco M2',
'M2004J7AC': 'Xiaomi Redmi Note 10',
'M2101K6I': 'Xiaomi Redmi Note 10 Pro Max',
'M2103K19G': 'Xiaomi Redmi Note 10 5G',
'M2004J7BC': 'Xiaomi Redmi 10X Pro 5G',
'M2006C3LC': 'Xiaomi Redmi 9A',
'M2006C3LG': 'Xiaomi Redmi 9A',
'M2006C3LI': 'Xiaomi Redmi 9A',
'M2006C3LII': 'Xiaomi Redmi 9i',
'M2006C3LVG': 'Xiaomi Redmi 9AT',
'M2006C3MG': 'Xiaomi Redmi 9C',
'M2006C3MII': 'Xiaomi Redmi 9 (India)',
'M2006C3MNG': 'Xiaomi Redmi 9C NFC',
'M2006J10C': 'Xiaomi Redmi K30 Ultra',
'M2007J17C': 'Xiaomi Redmi Note 9 Pro 5G',
'M2007J17G': 'Xiaomi Mi 10T Lite 5G',
'M2007J17I': 'Xiaomi Mi 10i',
'M2007J1SC': 'Xiaomi Mi 10 Ultra',
'M2007J20CG': 'Xiaomi Poco X3 NFC',
'M2007J20CI': 'Xiaomi Poco X3',
'M2007J20CT': 'Xiaomi Poco X3 NFC',
'M2007J22C': 'Xiaomi Redmi Note 9 5G',
'M2007J3SC': 'Xiaomi Redmi K30S',
'M2007J3SG': 'Xiaomi Mi 10T Pro 5G',
'M2007J3SY': 'Xiaomi Mi 10T 5G',
'M2010J19CG': 'Xiaomi Poco M3',
'M2010J19CI': 'Xiaomi Poco M3',
'MAE136': 'Xiaomi Redmi 4 (4X)',
'MAG138': 'Xiaomi Redmi 4 (4X)',
'MCE16': 'Xiaomi Mi 6',
'MCE8': 'Xiaomi Mi Note 3',
'MCG3B': 'Xiaomi Redmi 5A',
'MCI3B': 'Xiaomi Redmi 5A',
'MDE40': 'Xiaomi Mi Max 2',
'MDE5': 'Xiaomi Mi Mix 2',
'MDG1': 'Xiaomi Redmi 5',
'MDG2': 'Xiaomi Mi A1 (Mi 5X)',
'MDI1': 'Xiaomi Redmi 5',
'MDI2': 'Xiaomi Mi A1 (Mi 5X)',
'MDI40': 'Xiaomi Mi Max 2',
'MDI6': 'Xiaomi Redmi Y1 Lite',
'MDI6S': 'Xiaomi Redmi Y1 (Note 5A)',
'MEG7': 'Xiaomi Redmi 5 Plus (Redmi Note 5)',
'MEI7': 'Xiaomi Redmi Note 5 Pro',
'MEI7S': 'Xiaomi Redmi Note 5 Pro',
'MZB07QAIN': 'Xiaomi Poco C3',
'MZB07RHIN': 'Xiaomi Poco C3',
'MZB07RIIN': 'Xiaomi Poco C3',
'MZB07RJIN': 'Xiaomi Poco C3',
'MZB07RKIN': 'Xiaomi Poco C3',
'MZB07RLIN': 'Xiaomi Poco C3',
'MZB07Z0IN': 'Xiaomi Poco X3',
'MZB07Z1IN': 'Xiaomi Poco X3',
'MZB07Z2IN': 'Xiaomi Poco X3',
'MZB07Z3IN': 'Xiaomi Poco X3',
'MZB07Z4IN': 'Xiaomi Poco X3',
'MZB7995IN': 'Xiaomi Redmi 7A',
'MZB8458IN': 'Xiaomi Redmi 8A',
'MZB8741IN': 'Xiaomi Poco X2',
'MZB8742IN': 'Xiaomi Poco X2',
'MZB8743IN': 'Xiaomi Poco X2',
'MZB8744IN': 'Xiaomi Poco X2',
'MZB8745IN': 'Xiaomi Poco X2',
'MZB8746IN': 'Xiaomi Poco X2',
'MZB9011IN': 'Xiaomi Poco X2',
'MZB9012IN': 'Xiaomi Poco X2',
'MZB9013IN': 'Xiaomi Poco X2',
'MZB9919IN': 'Xiaomi Poco M2',
'MZB9965IN': 'Xiaomi Poco X3',
'SHARK MBU-A0': 'Xiaomi Black Shark 3 Pro',
'SHARK MBU-H0': 'Xiaomi Black Shark 3 Pro',
'SKW-A0': 'Xiaomi Black Shark 2',
'SKW-H0': 'Xiaomi Black Shark 2',
"RMX1931": "Realme X",
"RMX1901": "Realme X",
"RMX1941": "Realme C2",
"RMX2156": "Realme Narzo 30",
"RMX3360": "Realme GT Master Edition",
"RMX1851": "Realme 3 Pro",
"RMX2030": "Realme 5i",
"BBG100-1": "BlackBerry Evolve",
"ASUS_X00TD": "ASUS Zenfone Max Pro M1",
"ASUS_Z017DB": "ASUS Zenfone 3",
"ASUS_X00HD": "Asus Zenfone 4 Max",
"ASUS_X00TDA": "ASUS Zenfone Max Pro M1",
"ASUS_I01WD": "Asus Zenfone 6",
"ASUS_Z01RD": "Asus Zenfone 5Z",
"ZS630KL": "Asus Zenfone 6",
"I01WD": "Asus Zenfone 6",
"V2037": "vivo Y20G",
"I2012": "vivo"
}
| true
| true
|
1c4a31324e044f38931eecf708b21caa5af9f43f
| 1,130
|
py
|
Python
|
Esophagus/genMaskedImg.py
|
mintanwei/artificial-intelligence-for-classification-and-segmentation-of-esophagus-precancerous-lesions
|
bb7bdc06f2426e99fb16f17bc081993e55db9a81
|
[
"Apache-2.0"
] | null | null | null |
Esophagus/genMaskedImg.py
|
mintanwei/artificial-intelligence-for-classification-and-segmentation-of-esophagus-precancerous-lesions
|
bb7bdc06f2426e99fb16f17bc081993e55db9a81
|
[
"Apache-2.0"
] | null | null | null |
Esophagus/genMaskedImg.py
|
mintanwei/artificial-intelligence-for-classification-and-segmentation-of-esophagus-precancerous-lesions
|
bb7bdc06f2426e99fb16f17bc081993e55db9a81
|
[
"Apache-2.0"
] | null | null | null |
import skimage.io
import os
import numpy as np
import cv2
input_img_dir = './dataset/endoscope/val/img'
input_mask_dir = './result/multi_task_99/mask_dialated3_6_1/128.0'
output_dir = './128.0_output'
T = 0.5
L = '_2'
if not os.path.exists(output_dir):
os.mkdir(output_dir)
overlay_color_mask = [255, 0, 0]
transparency = 0
transparency = 1 - transparency
names = os.listdir(input_img_dir)
for name in names:
name = os.path.splitext(name)
img = skimage.io.imread(os.path.join(input_img_dir, name[0] + name[1]))
img = cv2.imdecode(np.fromfile(os.path.join(input_img_dir, name[0] + name[1]), dtype=np.uint8), -1)
mask = cv2.imdecode(np.fromfile(os.path.join(input_mask_dir, name[0] + L + name[1]), dtype=np.uint8), -1)
ret_mask, binary_mask = cv2.threshold(mask, 127, 255, cv2.THRESH_BINARY)
binary_mask, contours_mask, hierarchy_mask = cv2.findContours(binary_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(img, contours_mask, -1, overlay_color_mask, 3)
cv2.imencode('.png', img)[1].tofile(os.path.join(output_dir, name[0] + L + name[1]))
| 34.242424
| 119
| 0.69646
|
import skimage.io
import os
import numpy as np
import cv2
input_img_dir = './dataset/endoscope/val/img'
input_mask_dir = './result/multi_task_99/mask_dialated3_6_1/128.0'
output_dir = './128.0_output'
T = 0.5
L = '_2'
if not os.path.exists(output_dir):
os.mkdir(output_dir)
overlay_color_mask = [255, 0, 0]
transparency = 0
transparency = 1 - transparency
names = os.listdir(input_img_dir)
for name in names:
name = os.path.splitext(name)
img = skimage.io.imread(os.path.join(input_img_dir, name[0] + name[1]))
img = cv2.imdecode(np.fromfile(os.path.join(input_img_dir, name[0] + name[1]), dtype=np.uint8), -1)
mask = cv2.imdecode(np.fromfile(os.path.join(input_mask_dir, name[0] + L + name[1]), dtype=np.uint8), -1)
ret_mask, binary_mask = cv2.threshold(mask, 127, 255, cv2.THRESH_BINARY)
binary_mask, contours_mask, hierarchy_mask = cv2.findContours(binary_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(img, contours_mask, -1, overlay_color_mask, 3)
cv2.imencode('.png', img)[1].tofile(os.path.join(output_dir, name[0] + L + name[1]))
| true
| true
|
1c4a321cc9d033f7439e19db2a7247d1847aba3d
| 3,334
|
py
|
Python
|
api/group.py
|
D3AdCa7/CTF-Platform
|
1b4b66f3a5f25f69dcd53d233718276607bed8ac
|
[
"MIT"
] | 4
|
2016-03-15T14:29:13.000Z
|
2019-03-26T09:39:19.000Z
|
api/group.py
|
D3AdCa7/CTF-Platform
|
1b4b66f3a5f25f69dcd53d233718276607bed8ac
|
[
"MIT"
] | null | null | null |
api/group.py
|
D3AdCa7/CTF-Platform
|
1b4b66f3a5f25f69dcd53d233718276607bed8ac
|
[
"MIT"
] | 3
|
2016-03-15T14:28:32.000Z
|
2019-01-28T06:05:56.000Z
|
__author__ = "Collin Petty"
__copyright__ = "Carnegie Mellon University"
__license__ = "MIT"
__maintainer__ = ["Collin Petty", "Peter Chapman"]
__credits__ = ["David Brumely", "Collin Petty", "Peter Chapman", "Tyler Nighswander", "Garrett Barboza"]
__email__ = ["collin@cmu.edu", "peter@cmu.edu"]
__status__ = "Production"
from common import db
import common
def get_group_membership(tid):
"""Get the group membership for a team.
Find all groups to which a tid is an owner then add all groups to which a user is just a member.
"""
groups = list()
owners = set()
for g in list(db.groups.find({'owners': tid}, {'name': 1, 'gid': 1})):
groups.append({'name': g['name'],
'gid': g['gid'],
'owner': True})
owners.add(g['gid'])
groups += filter(lambda g: g['gid'] not in owners,
({'name': g['name'],
'gid': g['gid'],
'owner': False} for g in list(db.groups.find({'members': tid}, {'name': 1, 'gid': 1}))))
return groups
def create_group(tid, gname):
"""Create a new group.
Get a groupname posted from a logged in user. Check to see if the group exists, if it does notify the user.
If the group does not exist create it and add the user as a member/owner.
"""
if gname == '':
return {'status': 0, 'message': "The group name cannot be empty!"}
if db.groups.find_one({'name': gname}) is not None:
return {'status': 2, 'message': "This group exists, would you like to join it?"}
db.groups.insert({"name": gname, "owners": [tid], "members": [tid], "gid": common.token()})
return {'status': 1, 'message': "Successfully created the group"}
def join_group(tid, gname):
"""Join a group.
Get a groupname posted from a logged in user. Errors if the name is empty. Search db for the non-empty group
name, if no group with that name exists and error is returned. If a group is found, we query db to see if the
user is already a member/owner, if either, error. If we haven't error so far add the user as a member to the group
and return a status=1 for success.
"""
if gname == '':
return {'status': 0, 'message': "The group name cannot be empty!"}
group = db.groups.find_one({'name': gname})
if group is None:
return {'status': 3, 'message': "Cannot find group '%s', create it?" % gname}
if db.groups.find({'gid': group['gid'], '$or': [{'owners': tid},
{'members': tid}]}).count() != 0:
return {'status': 2, 'message': "You are already in '%s'." % gname}
db.groups.update({'gid': group['gid']}, {'$push': {'members': tid}})
return {'status': 1, 'message': "Success! You have been added to '%s'." % gname}
def leave_group(tid, gid):
"""Removes the current team from a group"""
if gid is None:
return {'status': 0, 'message': "No group id passed."}
if db.groups.find_one({'gid': gid}) is None:
return {'status': 0, 'message': "Internal error, group not found."}
db.groups.update({'gid': gid}, {'$pull': {'owners': tid}})
db.groups.update({'gid': gid}, {'$pull': {'members': tid}})
return {'status': 1, 'message': "You have successfully been removed from the group."}
| 43.868421
| 119
| 0.595081
|
__author__ = "Collin Petty"
__copyright__ = "Carnegie Mellon University"
__license__ = "MIT"
__maintainer__ = ["Collin Petty", "Peter Chapman"]
__credits__ = ["David Brumely", "Collin Petty", "Peter Chapman", "Tyler Nighswander", "Garrett Barboza"]
__email__ = ["collin@cmu.edu", "peter@cmu.edu"]
__status__ = "Production"
from common import db
import common
def get_group_membership(tid):
groups = list()
owners = set()
for g in list(db.groups.find({'owners': tid}, {'name': 1, 'gid': 1})):
groups.append({'name': g['name'],
'gid': g['gid'],
'owner': True})
owners.add(g['gid'])
groups += filter(lambda g: g['gid'] not in owners,
({'name': g['name'],
'gid': g['gid'],
'owner': False} for g in list(db.groups.find({'members': tid}, {'name': 1, 'gid': 1}))))
return groups
def create_group(tid, gname):
if gname == '':
return {'status': 0, 'message': "The group name cannot be empty!"}
if db.groups.find_one({'name': gname}) is not None:
return {'status': 2, 'message': "This group exists, would you like to join it?"}
db.groups.insert({"name": gname, "owners": [tid], "members": [tid], "gid": common.token()})
return {'status': 1, 'message': "Successfully created the group"}
def join_group(tid, gname):
if gname == '':
return {'status': 0, 'message': "The group name cannot be empty!"}
group = db.groups.find_one({'name': gname})
if group is None:
return {'status': 3, 'message': "Cannot find group '%s', create it?" % gname}
if db.groups.find({'gid': group['gid'], '$or': [{'owners': tid},
{'members': tid}]}).count() != 0:
return {'status': 2, 'message': "You are already in '%s'." % gname}
db.groups.update({'gid': group['gid']}, {'$push': {'members': tid}})
return {'status': 1, 'message': "Success! You have been added to '%s'." % gname}
def leave_group(tid, gid):
if gid is None:
return {'status': 0, 'message': "No group id passed."}
if db.groups.find_one({'gid': gid}) is None:
return {'status': 0, 'message': "Internal error, group not found."}
db.groups.update({'gid': gid}, {'$pull': {'owners': tid}})
db.groups.update({'gid': gid}, {'$pull': {'members': tid}})
return {'status': 1, 'message': "You have successfully been removed from the group."}
| true
| true
|
1c4a353f8eb312454c2ccc0840d0e804a826813f
| 1,475
|
py
|
Python
|
tests/stores/test_ssh_tunnel.py
|
materialsproject/maggflow
|
9f8d7a0865ec13212a3fd00d5edebd3cb7b40e7d
|
[
"BSD-3-Clause-LBNL"
] | 15
|
2017-06-15T16:35:23.000Z
|
2022-03-05T09:57:02.000Z
|
tests/stores/test_ssh_tunnel.py
|
materialsproject/maggflow
|
9f8d7a0865ec13212a3fd00d5edebd3cb7b40e7d
|
[
"BSD-3-Clause-LBNL"
] | 573
|
2017-06-14T15:54:27.000Z
|
2022-03-31T23:20:55.000Z
|
tests/stores/test_ssh_tunnel.py
|
rkingsbury/maggma
|
53def068df1cb410bfe91e7045903997813e173a
|
[
"BSD-3-Clause-LBNL"
] | 28
|
2017-06-14T20:50:26.000Z
|
2022-03-04T16:56:40.000Z
|
import asyncio
import paramiko
import pymongo
import pytest
from monty.serialization import dumpfn, loadfn
from paramiko.ssh_exception import (
AuthenticationException,
NoValidConnectionsError,
SSHException,
)
from maggma.stores.mongolike import MongoStore, SSHTunnel
@pytest.fixture
def ssh_server_available():
"""
Fixture to determine if an SSH server is available
to test the SSH tunnel
"""
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect("127.0.0.1", 22)
client.close()
except (AuthenticationException, NoValidConnectionsError, SSHException):
pytest.skip("No SSH server to test tunnel against")
def test_mongostore_connect_via_ssh(ssh_server_available):
server = SSHTunnel("127.0.0.1:22", "127.0.0.1:27017")
mongostore = MongoStore("maggma_test", "test", ssh_tunnel=server)
mongostore.connect()
assert isinstance(mongostore._collection, pymongo.collection.Collection)
mongostore.remove_docs({})
assert mongostore.count() == 0
mongostore.update([{"task_id": 0}])
assert mongostore.count() == 1
mongostore.remove_docs({})
mongostore.close()
def test_serialization(tmpdir, ssh_server_available):
tunnel = SSHTunnel("127.0.0.1:22", "127.0.0.1:27017")
dumpfn(tunnel, tmpdir / "tunnel.json")
new_tunnel = loadfn(tmpdir / "tunnel.json")
assert isinstance(new_tunnel, SSHTunnel)
| 27.314815
| 76
| 0.721356
|
import asyncio
import paramiko
import pymongo
import pytest
from monty.serialization import dumpfn, loadfn
from paramiko.ssh_exception import (
AuthenticationException,
NoValidConnectionsError,
SSHException,
)
from maggma.stores.mongolike import MongoStore, SSHTunnel
@pytest.fixture
def ssh_server_available():
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect("127.0.0.1", 22)
client.close()
except (AuthenticationException, NoValidConnectionsError, SSHException):
pytest.skip("No SSH server to test tunnel against")
def test_mongostore_connect_via_ssh(ssh_server_available):
server = SSHTunnel("127.0.0.1:22", "127.0.0.1:27017")
mongostore = MongoStore("maggma_test", "test", ssh_tunnel=server)
mongostore.connect()
assert isinstance(mongostore._collection, pymongo.collection.Collection)
mongostore.remove_docs({})
assert mongostore.count() == 0
mongostore.update([{"task_id": 0}])
assert mongostore.count() == 1
mongostore.remove_docs({})
mongostore.close()
def test_serialization(tmpdir, ssh_server_available):
tunnel = SSHTunnel("127.0.0.1:22", "127.0.0.1:27017")
dumpfn(tunnel, tmpdir / "tunnel.json")
new_tunnel = loadfn(tmpdir / "tunnel.json")
assert isinstance(new_tunnel, SSHTunnel)
| true
| true
|
1c4a35ff9382304504947b41d126b9b2e6bc6f14
| 4,332
|
py
|
Python
|
ray_exp.py
|
vakker/spg-experiments
|
4824861c3ac66387078023c14ead47ba9a9e6c72
|
[
"MIT"
] | null | null | null |
ray_exp.py
|
vakker/spg-experiments
|
4824861c3ac66387078023c14ead47ba9a9e6c72
|
[
"MIT"
] | null | null | null |
ray_exp.py
|
vakker/spg-experiments
|
4824861c3ac66387078023c14ead47ba9a9e6c72
|
[
"MIT"
] | 2
|
2021-02-15T11:12:27.000Z
|
2021-04-20T17:15:10.000Z
|
import argparse
from datetime import datetime
import ray
from ray import tune
from ray.tune import CLIReporter
from ray.tune.suggest.variant_generator import grid_search
from spg_experiments import models
from spg_experiments.gym_env import PlaygroundEnv
def exp_name(prefix):
return prefix + '.' + datetime.now().strftime("%Y-%m-%d.%H:%M:%S")
class E(dict):
def keys(self):
return []
def trial_str_creator(trial):
params = {
k.split('/')[-1]: p[-1] if isinstance(p, list) else str(p)
for k, p in trial.evaluated_params.items()
}
name = '-'.join([f'{k}:{p}' for k, p in params.items()])
return f'trial-{name}'
def main(args):
ray.init(local_mode=args.local)
config = {
"num_workers": args.num_workers, # parallelism
"num_envs_per_worker": 2,
"num_cpus_per_worker": 0.5,
"evaluation_num_workers": args.num_workers,
# "evaluation_config": {
# },
"evaluation_interval": 10,
"env": PlaygroundEnv,
"output": "logdir",
"env_config": {
"agent_type": "base",
# "index_exp": grid_search([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
"playground_name": grid_search([
["foraging", "candy_collect"],
["foraging", "candy_fireballs"],
['navigation', 'endgoal_cue'],
['sequential', 'door_dispenser_coin'],
]),
"sensors_name": grid_search([
"blind",
"rgb",
"depth",
"rgb_depth",
"rgb_touch",
"rgb_depth_touch",
]),
# "multisteps": grid_search([0, 2, 3, 4])
# "multisteps": 0
},
"num_gpus": 0.5 if args.gpu else 0,
"framework": "torch",
"gamma": grid_search([0.1, 0.2, 0.5, 0.8, 0.99]), # checked
"lr": grid_search([0.001, 0.0001, 0.00001]),
"lambda": 0.95, # checked
# "kl_coeff": 0.5, # ?
"clip_rewards": False,
"clip_param": 0.2, # checked?
"grad_clip": 0.5, # checked
# "vf_clip_param": 10, # checked, it's None in SB, 10 in RLlib
"vf_loss_coeff": 0.0001, # checked
"entropy_coeff": grid_search([0.05, 0.01, 0.005, 0.001]), # checked
"train_batch_size": 128 * 10 * 8, # checked, but check the *4*2
"sgd_minibatch_size": 128, # could be larger
"num_sgd_iter": 4, # checked?
"batch_mode": "truncate_episodes",
"observation_filter": "NoFilter",
"model": {
"custom_model": "vision-1d",
"conv_filters": [
[64, 5, 3],
[64, 3, 2],
[64, 3, 2],
[128, 3, 2],
[128, 3, 2],
# [128, 3, 2],
],
"use_lstm": grid_search([True, False]),
},
}
stop = {"timesteps_total": args.stop_timesteps}
if args.stop_iters:
stop.update({"training_iteration": args.stop_iters})
if args.stop_reward:
stop.update({"episode_reward_mean": args.stop_reward})
name = exp_name('PPO')
reporter = CLIReporter(parameter_columns=E({"_": "_"}))
results = tune.run(
args.run,
config=config,
stop=stop,
local_dir=args.logdir,
checkpoint_at_end=True,
checkpoint_freq=1,
keep_checkpoints_num=2,
trial_name_creator=trial_str_creator,
trial_dirname_creator=trial_str_creator,
progress_reporter=reporter,
name=name,
max_failures=3,
verbose=1)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--logdir", type=str, default="logs")
parser.add_argument("--run", type=str, default="PPO")
parser.add_argument("--as-test", action="store_true")
parser.add_argument("--local", action="store_true")
parser.add_argument("--gpu", action="store_true")
parser.add_argument("--monitor", action="store_true")
parser.add_argument("--num-workers", type=int, default=5)
parser.add_argument("--stop-timesteps", type=int, default=1000000)
parser.add_argument("--stop-iters", type=int)
parser.add_argument("--stop-reward", type=float)
args = parser.parse_args()
main(args)
| 32.328358
| 76
| 0.557248
|
import argparse
from datetime import datetime
import ray
from ray import tune
from ray.tune import CLIReporter
from ray.tune.suggest.variant_generator import grid_search
from spg_experiments import models
from spg_experiments.gym_env import PlaygroundEnv
def exp_name(prefix):
return prefix + '.' + datetime.now().strftime("%Y-%m-%d.%H:%M:%S")
class E(dict):
def keys(self):
return []
def trial_str_creator(trial):
params = {
k.split('/')[-1]: p[-1] if isinstance(p, list) else str(p)
for k, p in trial.evaluated_params.items()
}
name = '-'.join([f'{k}:{p}' for k, p in params.items()])
return f'trial-{name}'
def main(args):
ray.init(local_mode=args.local)
config = {
"num_workers": args.num_workers,
"num_envs_per_worker": 2,
"num_cpus_per_worker": 0.5,
"evaluation_num_workers": args.num_workers,
"evaluation_interval": 10,
"env": PlaygroundEnv,
"output": "logdir",
"env_config": {
"agent_type": "base",
"playground_name": grid_search([
["foraging", "candy_collect"],
["foraging", "candy_fireballs"],
['navigation', 'endgoal_cue'],
['sequential', 'door_dispenser_coin'],
]),
"sensors_name": grid_search([
"blind",
"rgb",
"depth",
"rgb_depth",
"rgb_touch",
"rgb_depth_touch",
]),
},
"num_gpus": 0.5 if args.gpu else 0,
"framework": "torch",
"gamma": grid_search([0.1, 0.2, 0.5, 0.8, 0.99]),
"lr": grid_search([0.001, 0.0001, 0.00001]),
"lambda": 0.95,
"clip_rewards": False,
"clip_param": 0.2,
"grad_clip": 0.5,
ecked
"entropy_coeff": grid_search([0.05, 0.01, 0.005, 0.001]), # checked
"train_batch_size": 128 * 10 * 8, # checked, but check the *4*2
"sgd_minibatch_size": 128, # could be larger
"num_sgd_iter": 4, # checked?
"batch_mode": "truncate_episodes",
"observation_filter": "NoFilter",
"model": {
"custom_model": "vision-1d",
"conv_filters": [
[64, 5, 3],
[64, 3, 2],
[64, 3, 2],
[128, 3, 2],
[128, 3, 2],
# [128, 3, 2],
],
"use_lstm": grid_search([True, False]),
},
}
stop = {"timesteps_total": args.stop_timesteps}
if args.stop_iters:
stop.update({"training_iteration": args.stop_iters})
if args.stop_reward:
stop.update({"episode_reward_mean": args.stop_reward})
name = exp_name('PPO')
reporter = CLIReporter(parameter_columns=E({"_": "_"}))
results = tune.run(
args.run,
config=config,
stop=stop,
local_dir=args.logdir,
checkpoint_at_end=True,
checkpoint_freq=1,
keep_checkpoints_num=2,
trial_name_creator=trial_str_creator,
trial_dirname_creator=trial_str_creator,
progress_reporter=reporter,
name=name,
max_failures=3,
verbose=1)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--logdir", type=str, default="logs")
parser.add_argument("--run", type=str, default="PPO")
parser.add_argument("--as-test", action="store_true")
parser.add_argument("--local", action="store_true")
parser.add_argument("--gpu", action="store_true")
parser.add_argument("--monitor", action="store_true")
parser.add_argument("--num-workers", type=int, default=5)
parser.add_argument("--stop-timesteps", type=int, default=1000000)
parser.add_argument("--stop-iters", type=int)
parser.add_argument("--stop-reward", type=float)
args = parser.parse_args()
main(args)
| true
| true
|
1c4a375ec65d60b933b68a9c2cccd74c1d4c9d3c
| 2,168
|
py
|
Python
|
test_scripts/B3_VanillaBSM_tests.py
|
Abhi1588/PricingToolBox
|
2c0bded1a6374c481113c972c819101df043d9f2
|
[
"MIT"
] | null | null | null |
test_scripts/B3_VanillaBSM_tests.py
|
Abhi1588/PricingToolBox
|
2c0bded1a6374c481113c972c819101df043d9f2
|
[
"MIT"
] | null | null | null |
test_scripts/B3_VanillaBSM_tests.py
|
Abhi1588/PricingToolBox
|
2c0bded1a6374c481113c972c819101df043d9f2
|
[
"MIT"
] | null | null | null |
def main():
spot = 100
strike = 100
maturity = 1
rate = 0.02
dividend = 0
vol = .05
# Put Call Parity
put = europeanPutOptionPrice(spot,strike,maturity,rate,dividend,vol)
call = europeanCallOptionPrice(spot,strike,maturity,rate,dividend,vol)
fwd = forwardPrice(spot,strike,maturity,rate,dividend)
print("Put Call Parity \nCall :{} - Put :{} = {} \nForward: {}".format(call,put,call-put,fwd))
print("+"*20)
#Price of call is monotonically decreasing in strike
lStrike = []
lcallPrice = []
for i in range(strike-90, strike+110, 10):
lStrike.append(i)
lcallPrice.append(europeanCallOptionPrice(spot,i,maturity,rate,dividend,vol))
fig, ax = plt.subplots()
ax.plot(lStrike, lcallPrice, label = "call price")
ax.set_xlabel('strikes') # Add an x-label to the axes.
ax.set_ylabel('option price') # Add a y-label to the axes.
ax.set_title("Call Option Price vs Strike") # Add a title to the axes.
ax.legend()
plt.show()
print("+"*20)
#Price of call is between S and S - k e^(-rt)
f = spot - strike*math.exp(-rate*maturity)
if f < call and call < spot:
print("True")
print("+"*20)
#Price of call is monotonically increasign in vol
lvol = []
lcallPrice = []
for i in np.arange(vol*.5, vol*1.5, 0.005):
lvol.append(i)
lcallPrice.append(europeanCallOptionPrice(spot,strike,maturity,rate,dividend,i))
fig, ax = plt.subplots()
ax.plot(lvol, lcallPrice, label = "call price")
ax.set_xlabel('vol') # Add an x-label to the axes.
ax.set_ylabel('option price') # Add a y-label to the axes.
ax.set_title("Call Option Price vs vol") # Add a title to the axes.
ax.legend()
plt.show()
print("+"*20)
Dcall = digitalCall(spot,strike,maturity,rate,dividend,vol)
Dput = digitalPut(spot,strike,maturity,rate,dividend,vol)
zcb = zerocouponbond(rate,maturity)
call_short = europeanCallOptionPrice(spot,strike+1,maturity,rate,dividend,vol)
spread = call - call_short
print(Dcall+Dput, "ZCB : {}".format(zcb))
print(Dcall,"Spread: {}".format(spread))
| 30.971429
| 98
| 0.642528
|
def main():
spot = 100
strike = 100
maturity = 1
rate = 0.02
dividend = 0
vol = .05
put = europeanPutOptionPrice(spot,strike,maturity,rate,dividend,vol)
call = europeanCallOptionPrice(spot,strike,maturity,rate,dividend,vol)
fwd = forwardPrice(spot,strike,maturity,rate,dividend)
print("Put Call Parity \nCall :{} - Put :{} = {} \nForward: {}".format(call,put,call-put,fwd))
print("+"*20)
lStrike = []
lcallPrice = []
for i in range(strike-90, strike+110, 10):
lStrike.append(i)
lcallPrice.append(europeanCallOptionPrice(spot,i,maturity,rate,dividend,vol))
fig, ax = plt.subplots()
ax.plot(lStrike, lcallPrice, label = "call price")
ax.set_xlabel('strikes')
ax.set_ylabel('option price')
ax.set_title("Call Option Price vs Strike")
ax.legend()
plt.show()
print("+"*20)
f = spot - strike*math.exp(-rate*maturity)
if f < call and call < spot:
print("True")
print("+"*20)
lvol = []
lcallPrice = []
for i in np.arange(vol*.5, vol*1.5, 0.005):
lvol.append(i)
lcallPrice.append(europeanCallOptionPrice(spot,strike,maturity,rate,dividend,i))
fig, ax = plt.subplots()
ax.plot(lvol, lcallPrice, label = "call price")
ax.set_xlabel('vol')
ax.set_ylabel('option price')
ax.set_title("Call Option Price vs vol")
ax.legend()
plt.show()
print("+"*20)
Dcall = digitalCall(spot,strike,maturity,rate,dividend,vol)
Dput = digitalPut(spot,strike,maturity,rate,dividend,vol)
zcb = zerocouponbond(rate,maturity)
call_short = europeanCallOptionPrice(spot,strike+1,maturity,rate,dividend,vol)
spread = call - call_short
print(Dcall+Dput, "ZCB : {}".format(zcb))
print(Dcall,"Spread: {}".format(spread))
| true
| true
|
1c4a3ad80df40d707d4b56018ddf73806353244b
| 1,790
|
py
|
Python
|
config/settings/test.py
|
iamjdcollins/districtwebsite
|
89e2aea47ca3d221665bc23586a4374421be5800
|
[
"MIT"
] | null | null | null |
config/settings/test.py
|
iamjdcollins/districtwebsite
|
89e2aea47ca3d221665bc23586a4374421be5800
|
[
"MIT"
] | null | null | null |
config/settings/test.py
|
iamjdcollins/districtwebsite
|
89e2aea47ca3d221665bc23586a4374421be5800
|
[
"MIT"
] | null | null | null |
from .base import *
ENVIRONMENT_MODE = 'test'
ALLOWED_HOSTS += [
'backman-test.slcschools.org',
'beaconheights-test.slcschools.org',
'bennion-test.slcschools.org',
'bonneville-test.slcschools.org',
'bryant-test.slcschools.org',
'clayton-test.slcschools.org',
'dilworth-test.slcschools.org',
'east-test.slcschools.org',
'edison-test.slcschools.org',
'emerson-test.slcschools.org',
'ensign-test.slcschools.org',
'escalante-test.slcschools.org',
'franklin-test.slcschools.org',
'glendale-test.slcschools.org',
'hawthorne-test.slcschools.org',
'highland-test.slcschools.org',
'highlandpark-test.slcschools.org',
'hillside-test.slcschools.org',
'horizonte-test.slcschools.org',
'horizonte-test.slcschools.org',
'indianhills-test.slcschools.org',
'innovations-test.slcschools.org',
'innovations-test.slcschools.org',
'liberty-test.slcschools.org',
'maryjackson-test.slcschools.org',
'meadowlark-test.slcschools.org',
'mountainview-test.slcschools.org',
'newman-test.slcschools.org',
'nibleypark-test.slcschools.org',
'northstar-test.slcschools.org',
'northwest-test.slcschools.org',
'parkview-test.slcschools.org',
'riley-test.slcschools.org',
'rosepark-test.slcschools.org',
'uintah-test.slcschools.org',
'wasatch-test.slcschools.org',
'washington-test.slcschools.org',
'websites-test.slcschools.org',
'west-test.slcschools.org',
'whittier-test.slcschools.org',
'www-test.ocslc.org',
'www-test.saltlakespa.org',
'www-test.slcschools.org',
'www-test.slcse.org',
]
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = '/tmp'
STATIC_URL = 'https://websites-test.slcschools.org/static/'
| 31.964286
| 66
| 0.698324
|
from .base import *
ENVIRONMENT_MODE = 'test'
ALLOWED_HOSTS += [
'backman-test.slcschools.org',
'beaconheights-test.slcschools.org',
'bennion-test.slcschools.org',
'bonneville-test.slcschools.org',
'bryant-test.slcschools.org',
'clayton-test.slcschools.org',
'dilworth-test.slcschools.org',
'east-test.slcschools.org',
'edison-test.slcschools.org',
'emerson-test.slcschools.org',
'ensign-test.slcschools.org',
'escalante-test.slcschools.org',
'franklin-test.slcschools.org',
'glendale-test.slcschools.org',
'hawthorne-test.slcschools.org',
'highland-test.slcschools.org',
'highlandpark-test.slcschools.org',
'hillside-test.slcschools.org',
'horizonte-test.slcschools.org',
'horizonte-test.slcschools.org',
'indianhills-test.slcschools.org',
'innovations-test.slcschools.org',
'innovations-test.slcschools.org',
'liberty-test.slcschools.org',
'maryjackson-test.slcschools.org',
'meadowlark-test.slcschools.org',
'mountainview-test.slcschools.org',
'newman-test.slcschools.org',
'nibleypark-test.slcschools.org',
'northstar-test.slcschools.org',
'northwest-test.slcschools.org',
'parkview-test.slcschools.org',
'riley-test.slcschools.org',
'rosepark-test.slcschools.org',
'uintah-test.slcschools.org',
'wasatch-test.slcschools.org',
'washington-test.slcschools.org',
'websites-test.slcschools.org',
'west-test.slcschools.org',
'whittier-test.slcschools.org',
'www-test.ocslc.org',
'www-test.saltlakespa.org',
'www-test.slcschools.org',
'www-test.slcse.org',
]
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = '/tmp'
STATIC_URL = 'https://websites-test.slcschools.org/static/'
| true
| true
|
1c4a3b9240115b8c64d64f4014238d4942020122
| 3,525
|
py
|
Python
|
textbox/model/Seq2Seq/t5.py
|
StevenTang1998/TextBox
|
acd8298c7e6618384d585146f799d02cc475520c
|
[
"MIT"
] | 347
|
2021-01-09T07:55:55.000Z
|
2022-03-27T00:46:36.000Z
|
textbox/model/Seq2Seq/t5.py
|
StevenTang1998/TextBox
|
acd8298c7e6618384d585146f799d02cc475520c
|
[
"MIT"
] | 18
|
2021-01-12T07:37:06.000Z
|
2022-01-11T02:26:49.000Z
|
textbox/model/Seq2Seq/t5.py
|
StevenTang1998/TextBox
|
acd8298c7e6618384d585146f799d02cc475520c
|
[
"MIT"
] | 67
|
2021-01-09T07:23:52.000Z
|
2022-03-27T12:02:12.000Z
|
# @Time : 2021/3/15
# @Author : Zhuohao Yu
# @Email : zhuohao@ruc.edu.cn
r"""
T5
################################################
Reference:
Colin et al. "Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer" at JMLR 2020.
"""
import torch
import torch.nn as nn
import torch.functional as F
from textbox.model.abstract_generator import Seq2SeqGenerator
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config
class T5(Seq2SeqGenerator):
def __init__(self, config, dataset):
super(T5, self).__init__(config, dataset)
self.pretrained_model_path = config['pretrained_model_path']
self.tokenizer = T5Tokenizer.from_pretrained(self.pretrained_model_path)
self.configuration = T5Config.from_pretrained(self.pretrained_model_path)
self.model = T5ForConditionalGeneration.from_pretrained(self.pretrained_model_path, config=self.configuration)
self.padding_token_idx = self.tokenizer.pad_token_id
self.loss = nn.CrossEntropyLoss(ignore_index=self.padding_token_idx, reduction='none')
if config['task_type'] == "summarization":
self.t5_task_text = "summarize: "
elif config['task_type'] == "translation":
self.t5_task_text = "translate German to English: "
else:
raise NotImplementedError("Only summarization and translation are supported.")
def generate(self, batch_data, eval_data):
source_text = batch_data['source_text']
input_ids, attn_masks = self.tokenize_text(source_text)
sample_outputs = self.model.generate(
input_ids, attention_mask=attn_masks, num_beams=5, max_length=self.target_max_length, early_stopping=True
)
generated_text = self.tokenizer.batch_decode(sample_outputs, skip_special_tokens=True)
generate_corpus = [text.lower().split() for text in generated_text]
return generate_corpus
def tokenize_text(self, text, is_target=False):
input_ids = []
attn_masks = []
texts = [(self.t5_task_text if not is_target else '') + ' '.join(t) for t in text]
encoding_dict = self.tokenizer(
texts, max_length=self.source_max_length, padding=True, truncation=True, return_tensors="pt"
)
input_ids = encoding_dict['input_ids'].to(self.device)
attn_masks = encoding_dict['attention_mask'].to(self.device)
return input_ids, attn_masks
def forward(self, corpus, epoch_idx=-1):
source_text = corpus['source_text']
target_text = corpus['target_text']
input_ids, attn_masks = self.tokenize_text(source_text)
target_ids, decoder_attn_masks = self.tokenize_text(target_text, is_target=True)
decoder_input_ids = target_ids[:, :-1].contiguous()
decoder_attn_masks = decoder_attn_masks[:, :-1].contiguous()
decoder_target_ids = target_ids[:, 1:].contiguous()
outputs = self.model(
input_ids,
attention_mask=attn_masks,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attn_masks,
use_cache=False
)
token_logits = outputs.logits
loss = self.loss(token_logits.view(-1, token_logits.size(-1)), decoder_target_ids.view(-1))
loss = loss.reshape_as(decoder_target_ids)
length = (decoder_target_ids != self.padding_token_idx).sum(dim=1).float()
loss = loss.sum(dim=1) / length
return loss.mean()
| 38.736264
| 118
| 0.680284
|
import torch
import torch.nn as nn
import torch.functional as F
from textbox.model.abstract_generator import Seq2SeqGenerator
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config
class T5(Seq2SeqGenerator):
def __init__(self, config, dataset):
super(T5, self).__init__(config, dataset)
self.pretrained_model_path = config['pretrained_model_path']
self.tokenizer = T5Tokenizer.from_pretrained(self.pretrained_model_path)
self.configuration = T5Config.from_pretrained(self.pretrained_model_path)
self.model = T5ForConditionalGeneration.from_pretrained(self.pretrained_model_path, config=self.configuration)
self.padding_token_idx = self.tokenizer.pad_token_id
self.loss = nn.CrossEntropyLoss(ignore_index=self.padding_token_idx, reduction='none')
if config['task_type'] == "summarization":
self.t5_task_text = "summarize: "
elif config['task_type'] == "translation":
self.t5_task_text = "translate German to English: "
else:
raise NotImplementedError("Only summarization and translation are supported.")
def generate(self, batch_data, eval_data):
source_text = batch_data['source_text']
input_ids, attn_masks = self.tokenize_text(source_text)
sample_outputs = self.model.generate(
input_ids, attention_mask=attn_masks, num_beams=5, max_length=self.target_max_length, early_stopping=True
)
generated_text = self.tokenizer.batch_decode(sample_outputs, skip_special_tokens=True)
generate_corpus = [text.lower().split() for text in generated_text]
return generate_corpus
def tokenize_text(self, text, is_target=False):
input_ids = []
attn_masks = []
texts = [(self.t5_task_text if not is_target else '') + ' '.join(t) for t in text]
encoding_dict = self.tokenizer(
texts, max_length=self.source_max_length, padding=True, truncation=True, return_tensors="pt"
)
input_ids = encoding_dict['input_ids'].to(self.device)
attn_masks = encoding_dict['attention_mask'].to(self.device)
return input_ids, attn_masks
def forward(self, corpus, epoch_idx=-1):
source_text = corpus['source_text']
target_text = corpus['target_text']
input_ids, attn_masks = self.tokenize_text(source_text)
target_ids, decoder_attn_masks = self.tokenize_text(target_text, is_target=True)
decoder_input_ids = target_ids[:, :-1].contiguous()
decoder_attn_masks = decoder_attn_masks[:, :-1].contiguous()
decoder_target_ids = target_ids[:, 1:].contiguous()
outputs = self.model(
input_ids,
attention_mask=attn_masks,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attn_masks,
use_cache=False
)
token_logits = outputs.logits
loss = self.loss(token_logits.view(-1, token_logits.size(-1)), decoder_target_ids.view(-1))
loss = loss.reshape_as(decoder_target_ids)
length = (decoder_target_ids != self.padding_token_idx).sum(dim=1).float()
loss = loss.sum(dim=1) / length
return loss.mean()
| true
| true
|
1c4a3c1daeaf23bbe73948eea1a5de1332e04d05
| 2,766
|
py
|
Python
|
examples/complex/karman.py
|
david-moravec/classy_examples
|
f57e8b77db6f3e536a5367fb00ec850c6d901333
|
[
"MIT"
] | null | null | null |
examples/complex/karman.py
|
david-moravec/classy_examples
|
f57e8b77db6f3e536a5367fb00ec850c6d901333
|
[
"MIT"
] | null | null | null |
examples/complex/karman.py
|
david-moravec/classy_examples
|
f57e8b77db6f3e536a5367fb00ec850c6d901333
|
[
"MIT"
] | null | null | null |
from classy_blocks.classes.mesh import Mesh
from classy_blocks.classes.shapes import ExtrudedRing, Box
def get_mesh():
cylinder_diameter = 20e-3 # [m]
ring_thickness = 5e-3 # [m]
# domain size
domain_height = 0.05 # [m] (increase for "proper" simulation)
upstream_length = 0.03 # [m]
downstream_length = 0.05 # [m]
# size to roughly match cells outside ring
cell_size = 0.3*ring_thickness
bl_thickness = 1e-4
c2c_expansion = 1.2 # cell-to-cell expansion ratio
# it's a 2-dimensional case
z = 0.01
mesh = Mesh()
# a layer of cells on the cylinder
d = 2**0.5/2
ring_point = d*cylinder_diameter/2
outer_point = d*(cylinder_diameter/2 + ring_thickness)
wall_ring = ExtrudedRing(
[0, 0, 0],
[0, 0, z],
[ring_point, ring_point, 0],
cylinder_diameter/2 + ring_thickness
)
wall_ring.chop_axial(count=1)
wall_ring.chop_tangential(start_size=cell_size)
wall_ring.chop_radial(start_size=bl_thickness, c2c_expansion=c2c_expansion)
wall_ring.set_inner_patch('cylinder')
mesh.add(wall_ring)
# boxes that fill up the whole domain
def make_box(p1, p2, size_axes, patches):
box = Box(
[p1[0], p1[1], 0],
[p2[0], p2[1], z])
for axis in size_axes:
box.chop(axis, start_size=cell_size)
for side, name in patches.items():
box.set_patch(side, name)
mesh.add(box)
# top 3 boxes
make_box(
[-upstream_length, outer_point],
[-outer_point, domain_height/2],
[0, 1],
{'back': 'upper_wall', 'left': 'inlet'})
make_box(
[-outer_point, outer_point],
[outer_point, domain_height/2],
[],
{'back': 'upper_wall'})
make_box(
[outer_point, outer_point],
[downstream_length, domain_height/2],
[0, 1],
{'back': 'upper_wall', 'right': 'outlet'})
# left and right of the cylinder
make_box(
[-upstream_length, -outer_point],
[-outer_point, outer_point],
[],
{'left': 'inlet'})
make_box(
[outer_point, -outer_point],
[downstream_length, outer_point],
[],
{'right': 'outlet'})
# bottom 3 boxes
make_box(
[-upstream_length, -domain_height/2],
[-outer_point, -outer_point],
[0, 1],
{'front': 'lower_wall', 'left': 'inlet'})
make_box(
[-outer_point, -domain_height/2],
[outer_point, -outer_point],
[],
{'front': 'lower_wall'})
make_box(
[outer_point, -domain_height/2],
[downstream_length, -outer_point],
[0, 1],
{'front': 'lower_wall', 'right': 'outlet'})
return mesh
| 27.117647
| 79
| 0.58026
|
from classy_blocks.classes.mesh import Mesh
from classy_blocks.classes.shapes import ExtrudedRing, Box
def get_mesh():
cylinder_diameter = 20e-3
ring_thickness = 5e-3
domain_height = 0.05
upstream_length = 0.03
downstream_length = 0.05
cell_size = 0.3*ring_thickness
bl_thickness = 1e-4
c2c_expansion = 1.2
z = 0.01
mesh = Mesh()
# a layer of cells on the cylinder
d = 2**0.5/2
ring_point = d*cylinder_diameter/2
outer_point = d*(cylinder_diameter/2 + ring_thickness)
wall_ring = ExtrudedRing(
[0, 0, 0],
[0, 0, z],
[ring_point, ring_point, 0],
cylinder_diameter/2 + ring_thickness
)
wall_ring.chop_axial(count=1)
wall_ring.chop_tangential(start_size=cell_size)
wall_ring.chop_radial(start_size=bl_thickness, c2c_expansion=c2c_expansion)
wall_ring.set_inner_patch('cylinder')
mesh.add(wall_ring)
# boxes that fill up the whole domain
def make_box(p1, p2, size_axes, patches):
box = Box(
[p1[0], p1[1], 0],
[p2[0], p2[1], z])
for axis in size_axes:
box.chop(axis, start_size=cell_size)
for side, name in patches.items():
box.set_patch(side, name)
mesh.add(box)
# top 3 boxes
make_box(
[-upstream_length, outer_point],
[-outer_point, domain_height/2],
[0, 1],
{'back': 'upper_wall', 'left': 'inlet'})
make_box(
[-outer_point, outer_point],
[outer_point, domain_height/2],
[],
{'back': 'upper_wall'})
make_box(
[outer_point, outer_point],
[downstream_length, domain_height/2],
[0, 1],
{'back': 'upper_wall', 'right': 'outlet'})
# left and right of the cylinder
make_box(
[-upstream_length, -outer_point],
[-outer_point, outer_point],
[],
{'left': 'inlet'})
make_box(
[outer_point, -outer_point],
[downstream_length, outer_point],
[],
{'right': 'outlet'})
# bottom 3 boxes
make_box(
[-upstream_length, -domain_height/2],
[-outer_point, -outer_point],
[0, 1],
{'front': 'lower_wall', 'left': 'inlet'})
make_box(
[-outer_point, -domain_height/2],
[outer_point, -outer_point],
[],
{'front': 'lower_wall'})
make_box(
[outer_point, -domain_height/2],
[downstream_length, -outer_point],
[0, 1],
{'front': 'lower_wall', 'right': 'outlet'})
return mesh
| true
| true
|
1c4a3cc7dcf16ecf0590cb9b2204ae30f0f7f58c
| 32,215
|
py
|
Python
|
arcade/tilemap/tilemap.py
|
EnlNovius/arcade
|
020d3aafecb6c202dd76cfdf1dbd576117a608c2
|
[
"MIT"
] | null | null | null |
arcade/tilemap/tilemap.py
|
EnlNovius/arcade
|
020d3aafecb6c202dd76cfdf1dbd576117a608c2
|
[
"MIT"
] | null | null | null |
arcade/tilemap/tilemap.py
|
EnlNovius/arcade
|
020d3aafecb6c202dd76cfdf1dbd576117a608c2
|
[
"MIT"
] | null | null | null |
"""
This module provides functionality to load in JSON map files from
the Tiled Map Editor. This is achieved using the pytiled-parser
library.
For more info on Tiled see: https://www.mapeditor.org/
For more info on pytiled-parser see: https://github.com/Beefy-Swain/pytiled_parser
"""
import copy
import math
import os
from collections import OrderedDict
from pathlib import Path
from typing import Any, Dict, List, Optional, OrderedDict, Tuple, Union, cast
import pytiled_parser
import pytiled_parser.tiled_object
from arcade import (
AnimatedTimeBasedSprite,
AnimationKeyframe,
Sprite,
SpriteList,
load_texture,
)
from arcade.arcade_types import Point, TiledObject
from arcade.resources import resolve_resource_path
_FLIPPED_HORIZONTALLY_FLAG = 0x80000000
_FLIPPED_VERTICALLY_FLAG = 0x40000000
_FLIPPED_DIAGONALLY_FLAG = 0x20000000
def _get_image_info_from_tileset(tile: pytiled_parser.Tile):
image_x = 0
image_y = 0
if tile.tileset.image is not None:
margin = tile.tileset.margin or 0
spacing = tile.tileset.spacing or 0
row = tile.id // tile.tileset.columns
image_y = margin + row * (tile.tileset.tile_height + spacing)
col = tile.id % tile.tileset.columns
image_x = margin + col * (tile.tileset.tile_width + spacing)
if tile.tileset.image:
width = tile.tileset.tile_width
height = tile.tileset.tile_height
else:
width = tile.image_width
height = tile.image_height
return image_x, image_y, width, height
def _get_image_source(
tile: pytiled_parser.Tile,
map_directory: Optional[str],
) -> Optional[Path]:
image_file = None
if tile.image:
image_file = tile.image
elif tile.tileset.image:
image_file = tile.tileset.image
if not image_file:
print(
f"Warning for tile {tile.id}, no image source listed either for individual tile, or as a tileset."
)
return None
if os.path.exists(image_file):
return image_file
if map_directory:
try2 = Path(map_directory, image_file)
if os.path.exists(try2):
return try2
print(f"Warning, can't find image {image_file} for tile {tile.id}")
return None
class TileMap:
"""
Class that represents a fully parsed and loaded map from Tiled.
For examples on how to use this class, see:
https://arcade.academy/examples/index.html#using-tiled-map-editor-to-create-maps
Attributes:
:tiled_map: The pytiled-parser map object. This can be useful for implementing features
that aren't supported by this class by accessing the raw map data directly.
:width: The width of the map in tiles. This is the number of tiles, not pixels.
:height: The height of the map in tiles. This is the number of tiles, not pixels.
:tile_width: The width in pixels of each tile.
:tile_height: The height in pixels of each tile.
:background_color: The background color of the map.
:scaling: A global scaling value to be applied to all Sprites in the map.
:sprite_lists: A dictionary mapping SpriteLists to their layer names. This is used
for all tile layers of the map.
:object_lists: A dictionary mapping TiledObjects to their layer names. This is used
for all object layers of the map.
"""
def __init__(
self,
map_file: Union[str, Path],
scaling: float = 1.0,
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> None:
"""
Given a .json file, this will read in a Tiled map file, and
initialize a new TileMap object.
The `layer_options` parameter can be used to specify per layer arguments.
The available options for this are:
use_spatial_hash - A boolean to enable spatial hashing on this layer's SpriteList.
scaling - A float providing layer specific Sprite scaling.
hit_box_algorithm - A string for the hit box algorithm to use for the Sprite's in this layer.
hit_box_detail - A float specifying the level of detail for each Sprite's hitbox
For example:
code-block::
layer_options = {
"Platforms": {
"use_spatial_hash": True,
"scaling": 2.5,
},
}
The keys and their values in each layer are passed to the layer processing functions
using the `**` operator on the dictionary.
:param Union[str, Path] map_file: The JSON map file.
:param float scaling: Global scaling to apply to all Sprites.
:param Dict[str, Dict[str, Any]] layer_options: Extra parameters for each layer.
:param Optional[bool] use_spatial_hash: If set to True, this will make moving a sprite
in the SpriteList slower, but it will speed up collision detection
with items in the SpriteList. Great for doing collision detection
with static walls/platforms.
:param str hit_box_algorithm: One of 'None', 'Simple' or 'Detailed'.
:param float hit_box_detail: Float, defaults to 4.5. Used with 'Detailed' to hit box.
"""
# If we should pull from local resources, replace with proper path
map_file = resolve_resource_path(map_file)
# This attribute stores the pytiled-parser map object
self.tiled_map = pytiled_parser.parse_map(map_file)
# Set Map Attributes
self.width = self.tiled_map.map_size.width
self.height = self.tiled_map.map_size.height
self.tile_width = self.tiled_map.tile_size.width
self.tile_height = self.tiled_map.tile_size.height
self.background_color = self.tiled_map.background_color
# Global Layer Defaults
self.scaling = scaling
self.use_spatial_hash = use_spatial_hash
self.hit_box_algorithm = hit_box_algorithm
self.hit_box_detail = hit_box_detail
# Dictionaries to store the SpriteLists for processed layers
self.sprite_lists: OrderedDict[str, SpriteList] = OrderedDict[str, SpriteList]()
self.object_lists: OrderedDict[str, List[TiledObject]] = OrderedDict[
str, SpriteList
]()
self.properties = self.tiled_map.properties
global_options = {
"scaling": self.scaling,
"use_spatial_hash": self.use_spatial_hash,
"hit_box_algorithm": self.hit_box_algorithm,
"hit_box_detail": self.hit_box_detail,
}
for layer in self.tiled_map.layers:
if (layer.name in self.sprite_lists) or (layer.name in self.object_lists):
raise AttributeError(
f"You have a duplicate layer name '{layer.name}' in your Tiled map. "
"Please use unique names for all layers and tilesets in your map."
)
self._process_layer(layer, global_options, layer_options)
def _process_layer(
self,
layer: pytiled_parser.Layer,
global_options: Dict[str, Any],
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
) -> None:
processed: Union[
SpriteList, Tuple[Optional[SpriteList], Optional[List[TiledObject]]]
]
options = global_options
if layer_options:
if layer.name in layer_options:
new_options = {
key: layer_options[layer.name].get(key, global_options[key])
for key in global_options
}
options = new_options
if isinstance(layer, pytiled_parser.TileLayer):
processed = self._process_tile_layer(layer, **options)
self.sprite_lists[layer.name] = processed
elif isinstance(layer, pytiled_parser.ObjectLayer):
processed = self._process_object_layer(layer, **options)
if processed[0]:
sprite_list = processed[0]
if sprite_list:
self.sprite_lists[layer.name] = sprite_list
if processed[1]:
object_list = processed[1]
if object_list:
self.object_lists[layer.name] = object_list
elif isinstance(layer, pytiled_parser.ImageLayer):
processed = self._process_image_layer(layer, **options)
self.sprite_lists[layer.name] = processed
elif isinstance(layer, pytiled_parser.LayerGroup):
for sub_layer in layer.layers:
self._process_layer(sub_layer, global_options, layer_options)
def get_cartesian(
self,
x: float,
y: float,
) -> Tuple[float, float]:
"""
Given a set of coordinates in pixel units, this returns the cartesian coordinates.
This assumes the supplied coordinates are pixel coordinates, and bases the cartesian
grid off of the Map's tile size.
If you have a map with 128x128 pixel Tiles, and you supply coordinates 500, 250 to
this function you'll receive back 3, 2
:param float x: The X Coordinate to convert
:param float y: The Y Coordinate to convert
"""
x = math.floor(x / (self.tile_width * self.scaling))
y = math.floor(y / (self.tile_height * self.scaling))
return x, y
def get_tilemap_layer(self, layer_path: str) -> Optional[pytiled_parser.Layer]:
assert isinstance(layer_path, str)
def _get_tilemap_layer(my_path, layers):
layer_name = my_path.pop(0)
for my_layer in layers:
if my_layer.name == layer_name:
if isinstance(my_layer, pytiled_parser.LayerGroup):
if len(my_path) != 0:
return _get_tilemap_layer(my_path, my_layer.layers)
else:
return my_layer
return None
path = layer_path.strip("/").split("/")
layer = _get_tilemap_layer(path, self.tiled_map.layers)
return layer
def _get_tile_by_gid(self, tile_gid: int) -> Optional[pytiled_parser.Tile]:
flipped_diagonally = False
flipped_horizontally = False
flipped_vertically = False
if tile_gid & _FLIPPED_HORIZONTALLY_FLAG:
flipped_horizontally = True
tile_gid -= _FLIPPED_HORIZONTALLY_FLAG
if tile_gid & _FLIPPED_DIAGONALLY_FLAG:
flipped_diagonally = True
tile_gid -= _FLIPPED_DIAGONALLY_FLAG
if tile_gid & _FLIPPED_VERTICALLY_FLAG:
flipped_vertically = True
tile_gid -= _FLIPPED_VERTICALLY_FLAG
for tileset_key, tileset in self.tiled_map.tilesets.items():
if tile_gid < tileset_key:
continue
# No specific tile info, but there is a tile sheet
# print(f"data {tileset_key} {tileset.tiles} {tileset.image} {tileset_key} {tile_gid} {tileset.tile_count}")
if (
tileset.image is not None
and tileset_key <= tile_gid < tileset_key + tileset.tile_count
):
# No specific tile info, but there is a tile sheet
tile_ref = pytiled_parser.Tile(
id=(tile_gid - tileset_key), image=tileset.image
)
elif tileset.tiles is None and tileset.image is not None:
# Not in this tileset, move to the next
continue
else:
if tileset.tiles is None:
return None
tile_ref = tileset.tiles.get(tile_gid - tileset_key)
if tile_ref:
my_tile = copy.copy(tile_ref)
my_tile.tileset = tileset
my_tile.flipped_vertically = flipped_vertically
my_tile.flipped_diagonally = flipped_diagonally
my_tile.flipped_horizontally = flipped_horizontally
return my_tile
print(f"Returning NO tile for {tile_gid}.")
return None
def _get_tile_by_id(
self, tileset: pytiled_parser.Tileset, tile_id: int
) -> Optional[pytiled_parser.Tile]:
for tileset_key, cur_tileset in self.tiled_map.tilesets.items():
if cur_tileset is tileset:
for tile_key, tile in cur_tileset.tiles.items():
if tile_id == tile.id:
return tile
return None
def _create_sprite_from_tile(
self,
tile: pytiled_parser.Tile,
scaling: float = 1.0,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> Sprite:
"""Given a tile from the parser, try and create a Sprite from it."""
# --- Step 1, Find a reference to an image this is going to be based off of
map_source = self.tiled_map.map_file
map_directory = os.path.dirname(map_source)
image_file = _get_image_source(tile, map_directory)
if tile.animation:
my_sprite: Sprite = AnimatedTimeBasedSprite(image_file, scaling)
else:
image_x, image_y, width, height = _get_image_info_from_tileset(tile)
my_sprite = Sprite(
image_file,
scaling,
image_x,
image_y,
width,
height,
flipped_horizontally=tile.flipped_horizontally,
flipped_vertically=tile.flipped_vertically,
flipped_diagonally=tile.flipped_diagonally,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if tile.properties is not None and len(tile.properties) > 0:
for key, value in tile.properties.items():
my_sprite.properties[key] = value
if tile.type:
my_sprite.properties["type"] = tile.type
if tile.objects is not None:
if not isinstance(tile.objects, pytiled_parser.ObjectLayer):
print("Warning, tile.objects is not an ObjectLayer as expected.")
return my_sprite
if len(tile.objects.tiled_objects) > 1:
if tile.image:
print(
f"Warning, only one hit box supported for tile with image {tile.image}."
)
else:
print(f"Warning, only one hit box supported for tile.")
for hitbox in tile.objects.tiled_objects:
points: List[Point] = []
if isinstance(hitbox, pytiled_parser.tiled_object.Rectangle):
if hitbox.size is None:
print(
f"Warning: Rectangle hitbox created for without a "
f"height or width Ignoring."
)
continue
sx = hitbox.coordinates.x - (my_sprite.width / (scaling * 2))
sy = -(hitbox.coordinates.y - (my_sprite.height / (scaling * 2)))
ex = (hitbox.coordinates.x + hitbox.size.width) - (
my_sprite.width / (scaling * 2)
)
ey = -(hitbox.coordinates.y + hitbox.size.height) - (
my_sprite.height / (scaling * 2)
)
points = [[sx, sy], [ex, sy], [ex, ey], [sx, ey]]
elif isinstance(
hitbox, pytiled_parser.tiled_object.Polygon
) or isinstance(hitbox, pytiled_parser.tiled_object.Polyline):
for point in hitbox.points:
adj_x = (
point.x
+ hitbox.coordinates.x
- my_sprite.width / (scaling * 2)
)
adj_y = -(
point.y
+ hitbox.coordinates.y
- my_sprite.height / (scaling * 2)
)
adj_point = [adj_x, adj_y]
points.append(adj_point)
if points[0][0] == points[-1][0] and points[0][1] == points[-1][1]:
points.pop()
elif isinstance(hitbox, pytiled_parser.tiled_object.Ellipse):
if not hitbox.size:
print(
f"Warning: Ellipse hitbox created without a height "
f" or width for {tile.image}. Ignoring."
)
continue
hw = hitbox.size.width / 2
hh = hitbox.size.height / 2
cx = hitbox.coordinates.x + hw
cy = hitbox.coordinates.y + hh
acx = cx - (my_sprite.width / (scaling * 2))
acy = cy - (my_sprite.height / (scaling * 2))
total_steps = 8
angles = [
step / total_steps * 2 * math.pi for step in range(total_steps)
]
for angle in angles:
x = hw * math.cos(angle) + acx
y = -(hh * math.sin(angle) + acy)
points.append([x, y])
else:
print(f"Warning: Hitbox type {type(hitbox)} not supported.")
my_sprite.hit_box = points
if tile.animation:
key_frame_list = []
for frame in tile.animation:
frame_tile = self._get_tile_by_id(tile.tileset, frame.tile_id)
if frame_tile:
image_file = _get_image_source(frame_tile, map_directory)
if frame_tile.image and image_file:
texture = load_texture(image_file)
elif not frame_tile.image and image_file:
# No image for tile, pull from tilesheet
(
image_x,
image_y,
width,
height,
) = _get_image_info_from_tileset(frame_tile)
texture = load_texture(
image_file, image_x, image_y, width, height
)
else:
print(
f"Warning: failed to load image for animation frame for tile {frame_tile.id}"
)
texture = None
key_frame = AnimationKeyframe(
frame.tile_id, frame.duration, texture
)
key_frame_list.append(key_frame)
if len(key_frame_list) == 1:
my_sprite.texture = key_frame.texture
cast(AnimatedTimeBasedSprite, my_sprite).frames = key_frame_list
return my_sprite
def _process_image_layer(
self,
layer: pytiled_parser.ImageLayer,
scaling: float = 1.0,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> SpriteList:
sprite_list: SpriteList = SpriteList(use_spatial_hash=use_spatial_hash)
map_source = self.tiled_map.map_file
map_directory = os.path.dirname(map_source)
image_file = layer.image
if not os.path.exists(image_file) and (map_directory):
try2 = Path(map_directory, image_file)
if not os.path.exists(try2):
print(
f"Warning, can't find image {image_file} for Image Layer {layer.name}"
)
image_file = try2
my_texture = load_texture(
image_file,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if layer.transparent_color:
data = my_texture.image.getdata()
target = layer.transparent_color
new_data = []
for item in data:
if (
item[0] == target[0]
and item[1] == target[1]
and item[2] == target[2]
):
new_data.append((255, 255, 255, 0))
else:
new_data.append(item)
my_texture.image.putdata(new_data)
my_sprite = Sprite(
image_file,
scaling,
texture=my_texture,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if layer.properties:
for key, value in layer.properties.items():
my_sprite.properties[key] = value
if layer.tint_color:
my_sprite.color = layer.tint_color
if layer.opacity:
my_sprite.alpha = int(layer.opacity * 255)
my_sprite.center_x = (layer.offset[0] * scaling) + my_sprite.width / 2
my_sprite.center_y = layer.offset[1]
sprite_list.append(my_sprite)
return sprite_list
def _process_tile_layer(
self,
layer: pytiled_parser.TileLayer,
scaling: float = 1.0,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> SpriteList:
sprite_list: SpriteList = SpriteList(use_spatial_hash=use_spatial_hash)
map_array = layer.data
# Loop through the layer and add in the list
for row_index, row in enumerate(map_array):
for column_index, item in enumerate(row):
# Check for an empty tile
if item == 0:
continue
tile = self._get_tile_by_gid(item)
if tile is None:
raise ValueError(
(
f"Couldn't find tile for item {item} in layer "
f"'{layer.name}' in file '{self.tiled_map.map_file}'"
f"at ({column_index}, {row_index})."
)
)
my_sprite = self._create_sprite_from_tile(
tile,
scaling=scaling,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if my_sprite is None:
print(
f"Warning: Could not create sprite number {item} in layer '{layer.name}' {tile.image}"
)
else:
my_sprite.center_x = (
column_index * (self.tiled_map.tile_size[0] * scaling)
+ my_sprite.width / 2
)
my_sprite.center_y = (
self.tiled_map.map_size.height - row_index - 1
) * (self.tiled_map.tile_size[1] * scaling) + my_sprite.height / 2
# Tint
if layer.tint_color:
my_sprite.color = layer.tint_color
# Opacity
opacity = layer.opacity
if opacity:
my_sprite.alpha = int(opacity * 255)
sprite_list.append(my_sprite)
return sprite_list
def _process_object_layer(
self,
layer: pytiled_parser.ObjectLayer,
scaling: float = 1.0,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> Tuple[Optional[SpriteList], Optional[List[TiledObject]]]:
if not scaling:
scaling = self.scaling
sprite_list: Optional[SpriteList] = None
objects_list: Optional[List[TiledObject]] = []
for cur_object in layer.tiled_objects:
# shape: Optional[Union[Point, PointList, Rect]] = None
if isinstance(cur_object, pytiled_parser.tiled_object.Tile):
if not sprite_list:
sprite_list = SpriteList(use_spatial_hash=use_spatial_hash)
tile = self._get_tile_by_gid(cur_object.gid)
my_sprite = self._create_sprite_from_tile(
tile,
scaling=scaling,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
x = cur_object.coordinates.x * scaling
y = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling
my_sprite.width = width = cur_object.size[0] * scaling
my_sprite.height = height = cur_object.size[1] * scaling
center_x = width / 2
center_y = height / 2
if cur_object.rotation:
rotation = -math.radians(cur_object.rotation)
else:
rotation = 0
cos_rotation = math.cos(rotation)
sin_rotation = math.sin(rotation)
rotated_center_x = center_x * cos_rotation - center_y * sin_rotation
rotated_center_y = center_y * sin_rotation + center_y * cos_rotation
my_sprite.position = (x + rotated_center_x, y + rotated_center_y)
my_sprite.angle = math.degrees(rotation)
if layer.tint_color:
my_sprite.color = layer.tint_color
opacity = layer.opacity
if opacity:
my_sprite.alpha = int(opacity * 255)
if cur_object.properties and "change_x" in cur_object.properties:
my_sprite.change_x = float(cur_object.properties["change_x"])
if cur_object.properties and "change_y" in cur_object.properties:
my_sprite.change_y = float(cur_object.properties["change_y"])
if cur_object.properties and "boundary_bottom" in cur_object.properties:
my_sprite.boundary_bottom = float(
cur_object.properties["boundary_bottom"]
)
if cur_object.properties and "boundary_top" in cur_object.properties:
my_sprite.boundary_top = float(
cur_object.properties["boundary_top"]
)
if cur_object.properties and "boundary_left" in cur_object.properties:
my_sprite.boundary_left = float(
cur_object.properties["boundary_left"]
)
if cur_object.properties and "boundary_right" in cur_object.properties:
my_sprite.boundary_right = float(
cur_object.properties["boundary_right"]
)
if cur_object.properties:
my_sprite.properties.update(cur_object.properties)
if cur_object.type:
my_sprite.properties["type"] = cur_object.type
if cur_object.name:
my_sprite.properties["name"] = cur_object.name
sprite_list.append(my_sprite)
continue
elif isinstance(cur_object, pytiled_parser.tiled_object.Point):
x = cur_object.coordinates.x * scaling
y = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling
shape = [x, y]
elif isinstance(cur_object, pytiled_parser.tiled_object.Rectangle):
sx = cur_object.coordinates.x
sy = -cur_object.coordinates.y
ex = cur_object.coordinates.x + cur_object.size.width
ey = -(cur_object.coordinates.y + cur_object.size.height)
p1 = [sx, sy]
p2 = [ex, sy]
p3 = [ex, ey]
p4 = [sx, ey]
shape = [p1, p2, p3, p4]
elif isinstance(
cur_object, pytiled_parser.tiled_object.Polygon
) or isinstance(cur_object, pytiled_parser.tiled_object.Polyline):
shape = []
for point in cur_object.points:
x = point.x + cur_object.coordinates.x
y = (self.height * self.tile_height) - (
point.y + cur_object.coordinates.y
)
point = (x, y)
shape.append(point)
# If shape is a polyline, and it is closed, we need to remove the duplicate end point
if shape[0][0] == shape[-1][0] and shape[0][1] == shape[-1][1]:
shape.pop()
elif isinstance(cur_object, pytiled_parser.tiled_object.Ellipse):
hw = cur_object.size.width / 2
hh = cur_object.size.height / 2
cx = cur_object.coordinates.x + hw
cy = cur_object.coordinates.y + hh
total_steps = 8
angles = [
step / total_steps * 2 * math.pi for step in range(total_steps)
]
shape = []
for angle in angles:
x = hw * math.cos(angle) + cx
y = -(hh * math.sin(angle) + cy)
point = [x, y]
shape.append(point)
else:
continue
if shape:
tiled_object = TiledObject(
shape, cur_object.properties, cur_object.name, cur_object.type
)
if not objects_list:
objects_list = []
objects_list.append(tiled_object)
return sprite_list or None, objects_list or None
def load_tilemap(
map_file: Union[str, Path],
scaling: float = 1.0,
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> TileMap:
"""
Given a .json map file, loads in and returns a `TileMap` object.
A TileMap can be created directly using the classes `__init__` function.
This function exists for ease of use.
For more clarification on the layer_options key, see the `__init__` function
of the `TileMap` class
:param Union[str, Path] map_file: The JSON map file.
:param float scaling: The global scaling to apply to all Sprite's within the map.
:param Optional[bool] use_spatial_hash: If set to True, this will make moving a sprite
in the SpriteList slower, but it will speed up collision detection
with items in the SpriteList. Great for doing collision detection
with static walls/platforms.
:param str hit_box_algorithm: One of 'None', 'Simple' or 'Detailed'.
:param float hit_box_detail: Float, defaults to 4.5. Used with 'Detailed' to hit box.
:param Dict[str, Dict[str, Any]] layer_options: Layer specific options for the map.
"""
return TileMap(
map_file,
scaling,
layer_options,
use_spatial_hash,
hit_box_algorithm,
hit_box_detail,
)
def read_tmx(map_file: Union[str, Path]) -> pytiled_parser.TiledMap:
"""
Deprecated function to raise a warning that it has been removed.
Exists to provide info for outdated code bases.
"""
raise DeprecationWarning(
"The read_tmx function has been replaced by the new TileMap class."
)
| 38.673469
| 120
| 0.556418
|
import copy
import math
import os
from collections import OrderedDict
from pathlib import Path
from typing import Any, Dict, List, Optional, OrderedDict, Tuple, Union, cast
import pytiled_parser
import pytiled_parser.tiled_object
from arcade import (
AnimatedTimeBasedSprite,
AnimationKeyframe,
Sprite,
SpriteList,
load_texture,
)
from arcade.arcade_types import Point, TiledObject
from arcade.resources import resolve_resource_path
_FLIPPED_HORIZONTALLY_FLAG = 0x80000000
_FLIPPED_VERTICALLY_FLAG = 0x40000000
_FLIPPED_DIAGONALLY_FLAG = 0x20000000
def _get_image_info_from_tileset(tile: pytiled_parser.Tile):
image_x = 0
image_y = 0
if tile.tileset.image is not None:
margin = tile.tileset.margin or 0
spacing = tile.tileset.spacing or 0
row = tile.id // tile.tileset.columns
image_y = margin + row * (tile.tileset.tile_height + spacing)
col = tile.id % tile.tileset.columns
image_x = margin + col * (tile.tileset.tile_width + spacing)
if tile.tileset.image:
width = tile.tileset.tile_width
height = tile.tileset.tile_height
else:
width = tile.image_width
height = tile.image_height
return image_x, image_y, width, height
def _get_image_source(
tile: pytiled_parser.Tile,
map_directory: Optional[str],
) -> Optional[Path]:
image_file = None
if tile.image:
image_file = tile.image
elif tile.tileset.image:
image_file = tile.tileset.image
if not image_file:
print(
f"Warning for tile {tile.id}, no image source listed either for individual tile, or as a tileset."
)
return None
if os.path.exists(image_file):
return image_file
if map_directory:
try2 = Path(map_directory, image_file)
if os.path.exists(try2):
return try2
print(f"Warning, can't find image {image_file} for tile {tile.id}")
return None
class TileMap:
def __init__(
self,
map_file: Union[str, Path],
scaling: float = 1.0,
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> None:
# If we should pull from local resources, replace with proper path
map_file = resolve_resource_path(map_file)
# This attribute stores the pytiled-parser map object
self.tiled_map = pytiled_parser.parse_map(map_file)
# Set Map Attributes
self.width = self.tiled_map.map_size.width
self.height = self.tiled_map.map_size.height
self.tile_width = self.tiled_map.tile_size.width
self.tile_height = self.tiled_map.tile_size.height
self.background_color = self.tiled_map.background_color
# Global Layer Defaults
self.scaling = scaling
self.use_spatial_hash = use_spatial_hash
self.hit_box_algorithm = hit_box_algorithm
self.hit_box_detail = hit_box_detail
# Dictionaries to store the SpriteLists for processed layers
self.sprite_lists: OrderedDict[str, SpriteList] = OrderedDict[str, SpriteList]()
self.object_lists: OrderedDict[str, List[TiledObject]] = OrderedDict[
str, SpriteList
]()
self.properties = self.tiled_map.properties
global_options = {
"scaling": self.scaling,
"use_spatial_hash": self.use_spatial_hash,
"hit_box_algorithm": self.hit_box_algorithm,
"hit_box_detail": self.hit_box_detail,
}
for layer in self.tiled_map.layers:
if (layer.name in self.sprite_lists) or (layer.name in self.object_lists):
raise AttributeError(
f"You have a duplicate layer name '{layer.name}' in your Tiled map. "
"Please use unique names for all layers and tilesets in your map."
)
self._process_layer(layer, global_options, layer_options)
def _process_layer(
self,
layer: pytiled_parser.Layer,
global_options: Dict[str, Any],
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
) -> None:
processed: Union[
SpriteList, Tuple[Optional[SpriteList], Optional[List[TiledObject]]]
]
options = global_options
if layer_options:
if layer.name in layer_options:
new_options = {
key: layer_options[layer.name].get(key, global_options[key])
for key in global_options
}
options = new_options
if isinstance(layer, pytiled_parser.TileLayer):
processed = self._process_tile_layer(layer, **options)
self.sprite_lists[layer.name] = processed
elif isinstance(layer, pytiled_parser.ObjectLayer):
processed = self._process_object_layer(layer, **options)
if processed[0]:
sprite_list = processed[0]
if sprite_list:
self.sprite_lists[layer.name] = sprite_list
if processed[1]:
object_list = processed[1]
if object_list:
self.object_lists[layer.name] = object_list
elif isinstance(layer, pytiled_parser.ImageLayer):
processed = self._process_image_layer(layer, **options)
self.sprite_lists[layer.name] = processed
elif isinstance(layer, pytiled_parser.LayerGroup):
for sub_layer in layer.layers:
self._process_layer(sub_layer, global_options, layer_options)
def get_cartesian(
self,
x: float,
y: float,
) -> Tuple[float, float]:
x = math.floor(x / (self.tile_width * self.scaling))
y = math.floor(y / (self.tile_height * self.scaling))
return x, y
def get_tilemap_layer(self, layer_path: str) -> Optional[pytiled_parser.Layer]:
assert isinstance(layer_path, str)
def _get_tilemap_layer(my_path, layers):
layer_name = my_path.pop(0)
for my_layer in layers:
if my_layer.name == layer_name:
if isinstance(my_layer, pytiled_parser.LayerGroup):
if len(my_path) != 0:
return _get_tilemap_layer(my_path, my_layer.layers)
else:
return my_layer
return None
path = layer_path.strip("/").split("/")
layer = _get_tilemap_layer(path, self.tiled_map.layers)
return layer
def _get_tile_by_gid(self, tile_gid: int) -> Optional[pytiled_parser.Tile]:
flipped_diagonally = False
flipped_horizontally = False
flipped_vertically = False
if tile_gid & _FLIPPED_HORIZONTALLY_FLAG:
flipped_horizontally = True
tile_gid -= _FLIPPED_HORIZONTALLY_FLAG
if tile_gid & _FLIPPED_DIAGONALLY_FLAG:
flipped_diagonally = True
tile_gid -= _FLIPPED_DIAGONALLY_FLAG
if tile_gid & _FLIPPED_VERTICALLY_FLAG:
flipped_vertically = True
tile_gid -= _FLIPPED_VERTICALLY_FLAG
for tileset_key, tileset in self.tiled_map.tilesets.items():
if tile_gid < tileset_key:
continue
# No specific tile info, but there is a tile sheet
# print(f"data {tileset_key} {tileset.tiles} {tileset.image} {tileset_key} {tile_gid} {tileset.tile_count}")
if (
tileset.image is not None
and tileset_key <= tile_gid < tileset_key + tileset.tile_count
):
# No specific tile info, but there is a tile sheet
tile_ref = pytiled_parser.Tile(
id=(tile_gid - tileset_key), image=tileset.image
)
elif tileset.tiles is None and tileset.image is not None:
# Not in this tileset, move to the next
continue
else:
if tileset.tiles is None:
return None
tile_ref = tileset.tiles.get(tile_gid - tileset_key)
if tile_ref:
my_tile = copy.copy(tile_ref)
my_tile.tileset = tileset
my_tile.flipped_vertically = flipped_vertically
my_tile.flipped_diagonally = flipped_diagonally
my_tile.flipped_horizontally = flipped_horizontally
return my_tile
print(f"Returning NO tile for {tile_gid}.")
return None
def _get_tile_by_id(
self, tileset: pytiled_parser.Tileset, tile_id: int
) -> Optional[pytiled_parser.Tile]:
for tileset_key, cur_tileset in self.tiled_map.tilesets.items():
if cur_tileset is tileset:
for tile_key, tile in cur_tileset.tiles.items():
if tile_id == tile.id:
return tile
return None
def _create_sprite_from_tile(
self,
tile: pytiled_parser.Tile,
scaling: float = 1.0,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> Sprite:
# --- Step 1, Find a reference to an image this is going to be based off of
map_source = self.tiled_map.map_file
map_directory = os.path.dirname(map_source)
image_file = _get_image_source(tile, map_directory)
if tile.animation:
my_sprite: Sprite = AnimatedTimeBasedSprite(image_file, scaling)
else:
image_x, image_y, width, height = _get_image_info_from_tileset(tile)
my_sprite = Sprite(
image_file,
scaling,
image_x,
image_y,
width,
height,
flipped_horizontally=tile.flipped_horizontally,
flipped_vertically=tile.flipped_vertically,
flipped_diagonally=tile.flipped_diagonally,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if tile.properties is not None and len(tile.properties) > 0:
for key, value in tile.properties.items():
my_sprite.properties[key] = value
if tile.type:
my_sprite.properties["type"] = tile.type
if tile.objects is not None:
if not isinstance(tile.objects, pytiled_parser.ObjectLayer):
print("Warning, tile.objects is not an ObjectLayer as expected.")
return my_sprite
if len(tile.objects.tiled_objects) > 1:
if tile.image:
print(
f"Warning, only one hit box supported for tile with image {tile.image}."
)
else:
print(f"Warning, only one hit box supported for tile.")
for hitbox in tile.objects.tiled_objects:
points: List[Point] = []
if isinstance(hitbox, pytiled_parser.tiled_object.Rectangle):
if hitbox.size is None:
print(
f"Warning: Rectangle hitbox created for without a "
f"height or width Ignoring."
)
continue
sx = hitbox.coordinates.x - (my_sprite.width / (scaling * 2))
sy = -(hitbox.coordinates.y - (my_sprite.height / (scaling * 2)))
ex = (hitbox.coordinates.x + hitbox.size.width) - (
my_sprite.width / (scaling * 2)
)
ey = -(hitbox.coordinates.y + hitbox.size.height) - (
my_sprite.height / (scaling * 2)
)
points = [[sx, sy], [ex, sy], [ex, ey], [sx, ey]]
elif isinstance(
hitbox, pytiled_parser.tiled_object.Polygon
) or isinstance(hitbox, pytiled_parser.tiled_object.Polyline):
for point in hitbox.points:
adj_x = (
point.x
+ hitbox.coordinates.x
- my_sprite.width / (scaling * 2)
)
adj_y = -(
point.y
+ hitbox.coordinates.y
- my_sprite.height / (scaling * 2)
)
adj_point = [adj_x, adj_y]
points.append(adj_point)
if points[0][0] == points[-1][0] and points[0][1] == points[-1][1]:
points.pop()
elif isinstance(hitbox, pytiled_parser.tiled_object.Ellipse):
if not hitbox.size:
print(
f"Warning: Ellipse hitbox created without a height "
f" or width for {tile.image}. Ignoring."
)
continue
hw = hitbox.size.width / 2
hh = hitbox.size.height / 2
cx = hitbox.coordinates.x + hw
cy = hitbox.coordinates.y + hh
acx = cx - (my_sprite.width / (scaling * 2))
acy = cy - (my_sprite.height / (scaling * 2))
total_steps = 8
angles = [
step / total_steps * 2 * math.pi for step in range(total_steps)
]
for angle in angles:
x = hw * math.cos(angle) + acx
y = -(hh * math.sin(angle) + acy)
points.append([x, y])
else:
print(f"Warning: Hitbox type {type(hitbox)} not supported.")
my_sprite.hit_box = points
if tile.animation:
key_frame_list = []
for frame in tile.animation:
frame_tile = self._get_tile_by_id(tile.tileset, frame.tile_id)
if frame_tile:
image_file = _get_image_source(frame_tile, map_directory)
if frame_tile.image and image_file:
texture = load_texture(image_file)
elif not frame_tile.image and image_file:
# No image for tile, pull from tilesheet
(
image_x,
image_y,
width,
height,
) = _get_image_info_from_tileset(frame_tile)
texture = load_texture(
image_file, image_x, image_y, width, height
)
else:
print(
f"Warning: failed to load image for animation frame for tile {frame_tile.id}"
)
texture = None
key_frame = AnimationKeyframe(
frame.tile_id, frame.duration, texture
)
key_frame_list.append(key_frame)
if len(key_frame_list) == 1:
my_sprite.texture = key_frame.texture
cast(AnimatedTimeBasedSprite, my_sprite).frames = key_frame_list
return my_sprite
def _process_image_layer(
self,
layer: pytiled_parser.ImageLayer,
scaling: float = 1.0,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> SpriteList:
sprite_list: SpriteList = SpriteList(use_spatial_hash=use_spatial_hash)
map_source = self.tiled_map.map_file
map_directory = os.path.dirname(map_source)
image_file = layer.image
if not os.path.exists(image_file) and (map_directory):
try2 = Path(map_directory, image_file)
if not os.path.exists(try2):
print(
f"Warning, can't find image {image_file} for Image Layer {layer.name}"
)
image_file = try2
my_texture = load_texture(
image_file,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if layer.transparent_color:
data = my_texture.image.getdata()
target = layer.transparent_color
new_data = []
for item in data:
if (
item[0] == target[0]
and item[1] == target[1]
and item[2] == target[2]
):
new_data.append((255, 255, 255, 0))
else:
new_data.append(item)
my_texture.image.putdata(new_data)
my_sprite = Sprite(
image_file,
scaling,
texture=my_texture,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if layer.properties:
for key, value in layer.properties.items():
my_sprite.properties[key] = value
if layer.tint_color:
my_sprite.color = layer.tint_color
if layer.opacity:
my_sprite.alpha = int(layer.opacity * 255)
my_sprite.center_x = (layer.offset[0] * scaling) + my_sprite.width / 2
my_sprite.center_y = layer.offset[1]
sprite_list.append(my_sprite)
return sprite_list
def _process_tile_layer(
self,
layer: pytiled_parser.TileLayer,
scaling: float = 1.0,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> SpriteList:
sprite_list: SpriteList = SpriteList(use_spatial_hash=use_spatial_hash)
map_array = layer.data
for row_index, row in enumerate(map_array):
for column_index, item in enumerate(row):
if item == 0:
continue
tile = self._get_tile_by_gid(item)
if tile is None:
raise ValueError(
(
f"Couldn't find tile for item {item} in layer "
f"'{layer.name}' in file '{self.tiled_map.map_file}'"
f"at ({column_index}, {row_index})."
)
)
my_sprite = self._create_sprite_from_tile(
tile,
scaling=scaling,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
if my_sprite is None:
print(
f"Warning: Could not create sprite number {item} in layer '{layer.name}' {tile.image}"
)
else:
my_sprite.center_x = (
column_index * (self.tiled_map.tile_size[0] * scaling)
+ my_sprite.width / 2
)
my_sprite.center_y = (
self.tiled_map.map_size.height - row_index - 1
) * (self.tiled_map.tile_size[1] * scaling) + my_sprite.height / 2
# Tint
if layer.tint_color:
my_sprite.color = layer.tint_color
# Opacity
opacity = layer.opacity
if opacity:
my_sprite.alpha = int(opacity * 255)
sprite_list.append(my_sprite)
return sprite_list
def _process_object_layer(
self,
layer: pytiled_parser.ObjectLayer,
scaling: float = 1.0,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> Tuple[Optional[SpriteList], Optional[List[TiledObject]]]:
if not scaling:
scaling = self.scaling
sprite_list: Optional[SpriteList] = None
objects_list: Optional[List[TiledObject]] = []
for cur_object in layer.tiled_objects:
# shape: Optional[Union[Point, PointList, Rect]] = None
if isinstance(cur_object, pytiled_parser.tiled_object.Tile):
if not sprite_list:
sprite_list = SpriteList(use_spatial_hash=use_spatial_hash)
tile = self._get_tile_by_gid(cur_object.gid)
my_sprite = self._create_sprite_from_tile(
tile,
scaling=scaling,
hit_box_algorithm=hit_box_algorithm,
hit_box_detail=hit_box_detail,
)
x = cur_object.coordinates.x * scaling
y = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling
my_sprite.width = width = cur_object.size[0] * scaling
my_sprite.height = height = cur_object.size[1] * scaling
center_x = width / 2
center_y = height / 2
if cur_object.rotation:
rotation = -math.radians(cur_object.rotation)
else:
rotation = 0
cos_rotation = math.cos(rotation)
sin_rotation = math.sin(rotation)
rotated_center_x = center_x * cos_rotation - center_y * sin_rotation
rotated_center_y = center_y * sin_rotation + center_y * cos_rotation
my_sprite.position = (x + rotated_center_x, y + rotated_center_y)
my_sprite.angle = math.degrees(rotation)
if layer.tint_color:
my_sprite.color = layer.tint_color
opacity = layer.opacity
if opacity:
my_sprite.alpha = int(opacity * 255)
if cur_object.properties and "change_x" in cur_object.properties:
my_sprite.change_x = float(cur_object.properties["change_x"])
if cur_object.properties and "change_y" in cur_object.properties:
my_sprite.change_y = float(cur_object.properties["change_y"])
if cur_object.properties and "boundary_bottom" in cur_object.properties:
my_sprite.boundary_bottom = float(
cur_object.properties["boundary_bottom"]
)
if cur_object.properties and "boundary_top" in cur_object.properties:
my_sprite.boundary_top = float(
cur_object.properties["boundary_top"]
)
if cur_object.properties and "boundary_left" in cur_object.properties:
my_sprite.boundary_left = float(
cur_object.properties["boundary_left"]
)
if cur_object.properties and "boundary_right" in cur_object.properties:
my_sprite.boundary_right = float(
cur_object.properties["boundary_right"]
)
if cur_object.properties:
my_sprite.properties.update(cur_object.properties)
if cur_object.type:
my_sprite.properties["type"] = cur_object.type
if cur_object.name:
my_sprite.properties["name"] = cur_object.name
sprite_list.append(my_sprite)
continue
elif isinstance(cur_object, pytiled_parser.tiled_object.Point):
x = cur_object.coordinates.x * scaling
y = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling
shape = [x, y]
elif isinstance(cur_object, pytiled_parser.tiled_object.Rectangle):
sx = cur_object.coordinates.x
sy = -cur_object.coordinates.y
ex = cur_object.coordinates.x + cur_object.size.width
ey = -(cur_object.coordinates.y + cur_object.size.height)
p1 = [sx, sy]
p2 = [ex, sy]
p3 = [ex, ey]
p4 = [sx, ey]
shape = [p1, p2, p3, p4]
elif isinstance(
cur_object, pytiled_parser.tiled_object.Polygon
) or isinstance(cur_object, pytiled_parser.tiled_object.Polyline):
shape = []
for point in cur_object.points:
x = point.x + cur_object.coordinates.x
y = (self.height * self.tile_height) - (
point.y + cur_object.coordinates.y
)
point = (x, y)
shape.append(point)
# If shape is a polyline, and it is closed, we need to remove the duplicate end point
if shape[0][0] == shape[-1][0] and shape[0][1] == shape[-1][1]:
shape.pop()
elif isinstance(cur_object, pytiled_parser.tiled_object.Ellipse):
hw = cur_object.size.width / 2
hh = cur_object.size.height / 2
cx = cur_object.coordinates.x + hw
cy = cur_object.coordinates.y + hh
total_steps = 8
angles = [
step / total_steps * 2 * math.pi for step in range(total_steps)
]
shape = []
for angle in angles:
x = hw * math.cos(angle) + cx
y = -(hh * math.sin(angle) + cy)
point = [x, y]
shape.append(point)
else:
continue
if shape:
tiled_object = TiledObject(
shape, cur_object.properties, cur_object.name, cur_object.type
)
if not objects_list:
objects_list = []
objects_list.append(tiled_object)
return sprite_list or None, objects_list or None
def load_tilemap(
map_file: Union[str, Path],
scaling: float = 1.0,
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_spatial_hash: Optional[bool] = None,
hit_box_algorithm: str = "Simple",
hit_box_detail: float = 4.5,
) -> TileMap:
return TileMap(
map_file,
scaling,
layer_options,
use_spatial_hash,
hit_box_algorithm,
hit_box_detail,
)
def read_tmx(map_file: Union[str, Path]) -> pytiled_parser.TiledMap:
raise DeprecationWarning(
"The read_tmx function has been replaced by the new TileMap class."
)
| true
| true
|
1c4a3d45e3fd189c77bba8dbb394542a0543752f
| 303
|
py
|
Python
|
Achive/rpyc_MyService.py
|
rscd27p/DockerTest
|
aee56356f7cdaded1c6ef787e6cdf8415308c8c3
|
[
"MIT"
] | 1
|
2021-08-30T14:22:15.000Z
|
2021-08-30T14:22:15.000Z
|
Achive/rpyc_MyService.py
|
rscd27p/DockerTest
|
aee56356f7cdaded1c6ef787e6cdf8415308c8c3
|
[
"MIT"
] | null | null | null |
Achive/rpyc_MyService.py
|
rscd27p/DockerTest
|
aee56356f7cdaded1c6ef787e6cdf8415308c8c3
|
[
"MIT"
] | null | null | null |
import rpyc
import nidmm
class MyService(rpyc.Service):
exposed_nidmm = nidmm
if __name__ == "__main__":
from rpyc.utils.server import ThreadedServer
t = ThreadedServer(MyService, port = 18861, protocol_config = {"allow_public_attrs" : True, "allow_all_attrs" : True})
t.start()
| 27.545455
| 122
| 0.712871
|
import rpyc
import nidmm
class MyService(rpyc.Service):
exposed_nidmm = nidmm
if __name__ == "__main__":
from rpyc.utils.server import ThreadedServer
t = ThreadedServer(MyService, port = 18861, protocol_config = {"allow_public_attrs" : True, "allow_all_attrs" : True})
t.start()
| true
| true
|
1c4a3d7c38a7fb4505e3308b90c0223962601a64
| 8,701
|
py
|
Python
|
test/test.py
|
ahcorde/srdfdom
|
1fb929e4aab58f5fe889d4bf2b4e330423605632
|
[
"BSD-3-Clause"
] | null | null | null |
test/test.py
|
ahcorde/srdfdom
|
1fb929e4aab58f5fe889d4bf2b4e330423605632
|
[
"BSD-3-Clause"
] | 1
|
2020-01-13T15:58:39.000Z
|
2020-01-13T16:32:08.000Z
|
test/test.py
|
ahcorde/srdfdom
|
1fb929e4aab58f5fe889d4bf2b4e330423605632
|
[
"BSD-3-Clause"
] | 1
|
2019-12-31T16:07:31.000Z
|
2019-12-31T16:07:31.000Z
|
#!/usr/bin/env python
PKG = 'srdfdom'
import sys
import rospkg
import unittest
from srdfdom.srdf import SRDF
from xml.dom.minidom import parseString
import xml.dom
# xml match code from test_xacro.py
# by Stuart Glaser and William Woodall
def first_child_element(elt):
c = elt.firstChild
while c:
if c.nodeType == xml.dom.Node.ELEMENT_NODE:
return c
c = c.nextSibling
return None
def next_sibling_element(elt):
c = elt.nextSibling
while c:
if c.nodeType == xml.dom.Node.ELEMENT_NODE:
return c
c = c.nextSibling
return None
def all_attributes_match(a, b):
if len(a.attributes) != len(b.attributes):
print("Different number of attributes")
return False
a_atts = [(a.attributes.item(i).name, a.attributes.item(i).value) for i in range(len(a.attributes))]
b_atts = [(b.attributes.item(i).name, b.attributes.item(i).value) for i in range(len(b.attributes))]
a_atts.sort()
b_atts.sort()
for i in range(len(a_atts)):
if a_atts[i][0] != b_atts[i][0]:
print("Different attribute names: %s and %s" % (a_atts[i][0], b_atts[i][0]))
return False
try:
if abs(float(a_atts[i][1]) - float(b_atts[i][1])) > 1.0e-9:
print("Different attribute values: %s and %s" % (a_atts[i][1], b_atts[i][1]))
return False
except ValueError: # Attribute values aren't numeric
if a_atts[i][1] != b_atts[i][1]:
print("Different attribute values: %s and %s" % (a_atts[i][1], b_atts[i][1]))
return False
return True
def elements_match(a, b):
if not a and not b:
return True
if not a or not b:
return False
if a.nodeType != b.nodeType:
print("Different node types: %d and %d" % (a.nodeType, b.nodeType))
return False
if a.nodeName != b.nodeName:
print("Different element names: %s and %s" % (a.nodeName, b.nodeName))
return False
if not all_attributes_match(a, b):
return False
if not elements_match(first_child_element(a), first_child_element(b)):
return False
if not elements_match(next_sibling_element(a), next_sibling_element(b)):
return False
return True
def xml_matches(a, b):
if isinstance(a, str) or isinstance(a, unicode):
return xml_matches(parseString(a).documentElement, b)
if isinstance(b, str) or isinstance(b, unicode):
return xml_matches(a, parseString(b).documentElement)
if a.nodeType == xml.dom.Node.DOCUMENT_NODE:
return xml_matches(a.documentElement, b)
if b.nodeType == xml.dom.Node.DOCUMENT_NODE:
return xml_matches(a, b.documentElement)
if not elements_match(a, b):
print("Match failed:")
a.writexml(sys.stdout)
print
print('=' * 78)
b.writexml(sys.stdout)
return False
return True
## A python unit test for srdf
class TestSRDFParser(unittest.TestCase):
## test valid srdf
def test_full_srdf(self):
srdf_data = '''
<robot name="myrobot">
<group name="body">
<joint name="J1" />
<joint name="J2" />
<joint name="J3" />
<chain base_link="robot_base" tip_link="robot_tip" />
<group name="arm" />
</group>
<group_state name="zero" group="body">
<joint name="J1" value="0" />
<joint name="J2" value="0" />
<joint name="J3" value="0" />
</group_state>
<end_effector name="tip_ee" parent_link="tip" group="arm" parent_group="body" />
<end_effector name="othertip_ee" parent_link="othertip" group="arm" />
<virtual_joint name="virtual_joint" type="floating" parent_frame="body_frame" child_link="arm" />
<disable_collisions link1="link1" link2="link3" />
<disable_collisions reason="Adjacent" link1="link1" link2="link2" />
<link_sphere_approximation link="link1" />
<link_sphere_approximation link="link2" >
<sphere center="1.0 2.0 3.0" radius="1.0" />
<sphere center="1.0 2.0 4.0" radius="2.0" />
</link_sphere_approximation>
</robot>
'''
expected = '''
<robot name="myrobot">
<group name="body">
<joint name="J1" />
<joint name="J2" />
<joint name="J3" />
<chain base_link="robot_base" tip_link="robot_tip"/>
<group name="arm" />
</group>
<group_state name="zero" group="body">
<joint name="J1" value="0" />
<joint name="J2" value="0" />
<joint name="J3" value="0" />
</group_state>
<end_effector group="arm" name="tip_ee" parent_group="body" parent_link="tip"/>
<end_effector name="othertip_ee" parent_link="othertip" group="arm" />
<virtual_joint child_link="arm" name="virtual_joint" parent_frame="body_frame" type="floating" />
<disable_collisions link1="link1" link2="link3" />
<disable_collisions link1="link1" link2="link2" reason="Adjacent" />
<link_sphere_approximation link="link1" />
<link_sphere_approximation link="link2" >
<sphere center="1.0 2.0 3.0" radius="1.0" />
<sphere center="1.0 2.0 4.0" radius="2.0" />
</link_sphere_approximation>
</robot>
'''
robot = SRDF.from_xml_string(srdf_data)
self.assertTrue(xml_matches(robot.to_xml_string(),expected))
def test_simple_srdf(self):
datadir=rospkg.RosPack().get_path('srdfdom')+"/test/resources/"
stream = open(datadir+'pr2_desc.1.srdf', 'r')
robot = SRDF.from_xml_string(stream.read())
stream.close()
self.assertTrue(len(robot.virtual_joints)==0)
self.assertTrue(len(robot.groups)==0)
self.assertTrue(len(robot.group_states)==0)
self.assertTrue(len(robot.disable_collisionss)==0)
self.assertTrue(len(robot.end_effectors)==0)
stream = open(datadir+'pr2_desc.2.srdf', 'r')
robot = SRDF.from_xml_string(stream.read())
stream.close()
self.assertTrue(len(robot.virtual_joints)==1)
self.assertTrue(len(robot.groups)==1)
self.assertTrue(len(robot.group_states)==0)
self.assertTrue(len(robot.disable_collisionss)==0)
self.assertTrue(len(robot.end_effectors)==0)
def test_complex_srdf(self):
datadir=rospkg.RosPack().get_path('srdfdom')+"/test/resources/"
stream = open(datadir+'pr2_desc.3.srdf', 'r')
robot = SRDF.from_xml_string(stream.read())
stream.close()
self.assertTrue(len(robot.virtual_joints)==1)
self.assertTrue(len(robot.groups)==7)
self.assertTrue(len(robot.group_states)==2)
self.assertTrue(len(robot.disable_collisionss)==2)
self.assertTrue(robot.disable_collisionss[0].reason=="adjacent")
self.assertTrue(len(robot.end_effectors)==2)
self.assertTrue(robot.virtual_joints[0].name=="world_joint")
self.assertTrue(robot.virtual_joints[0].type=="planar")
for group in robot.groups:
if (group.name == "left_arm" or group.name == "right_arm" ):
self.assertTrue(len(group.chains)==1)
if group.name == "arms":
self.assertTrue(len(group.subgroups)==2)
if group.name == "base":
self.assertTrue(len(group.joints)==1)
if (group.name == "l_end_effector" or group.name == "r_end_effector" ):
self.assertTrue(len(group.links)==1)
self.assertTrue(len(group.joints)==9)
if group.name == "whole_body" :
self.assertTrue(len(group.joints)==1)
self.assertTrue(len(group.subgroups)==2)
index=0
if robot.group_states[0].group !="arms":
index=1
self.assertTrue(robot.group_states[index].group =="arms")
self.assertTrue(robot.group_states[index].name =="tuck_arms")
self.assertTrue(robot.group_states[1-index].group =="base")
self.assertTrue(robot.group_states[1-index].name =="home")
v=next((joint.value for joint in robot.group_states[index].joints if joint.name=="l_shoulder_pan_joint"),None)
self.assertTrue(len(v) == 1)
self.assertTrue(v[0] ==0.2)
w=next((joint.value for joint in robot.group_states[1-index].joints if joint.name=="world_joint"),None)
self.assertTrue(len(w) == 3)
self.assertTrue(w[0] ==0.4)
self.assertTrue(w[1] ==0)
self.assertTrue(w[2] ==-1)
index = 0 if (robot.end_effectors[0].name[0] == 'r') else 1
self.assertTrue(robot.end_effectors[index].name == 'r_end_effector')
self.assertTrue(robot.end_effectors[index].group == 'r_end_effector')
self.assertTrue(robot.end_effectors[index].parent_link == 'r_wrist_roll_link')
if __name__ == '__main__':
import rostest
rostest.rosrun(PKG, 'srdf_python_parser_test', TestSRDFParser)
| 38.5
| 120
| 0.635329
|
PKG = 'srdfdom'
import sys
import rospkg
import unittest
from srdfdom.srdf import SRDF
from xml.dom.minidom import parseString
import xml.dom
def first_child_element(elt):
c = elt.firstChild
while c:
if c.nodeType == xml.dom.Node.ELEMENT_NODE:
return c
c = c.nextSibling
return None
def next_sibling_element(elt):
c = elt.nextSibling
while c:
if c.nodeType == xml.dom.Node.ELEMENT_NODE:
return c
c = c.nextSibling
return None
def all_attributes_match(a, b):
if len(a.attributes) != len(b.attributes):
print("Different number of attributes")
return False
a_atts = [(a.attributes.item(i).name, a.attributes.item(i).value) for i in range(len(a.attributes))]
b_atts = [(b.attributes.item(i).name, b.attributes.item(i).value) for i in range(len(b.attributes))]
a_atts.sort()
b_atts.sort()
for i in range(len(a_atts)):
if a_atts[i][0] != b_atts[i][0]:
print("Different attribute names: %s and %s" % (a_atts[i][0], b_atts[i][0]))
return False
try:
if abs(float(a_atts[i][1]) - float(b_atts[i][1])) > 1.0e-9:
print("Different attribute values: %s and %s" % (a_atts[i][1], b_atts[i][1]))
return False
except ValueError:
if a_atts[i][1] != b_atts[i][1]:
print("Different attribute values: %s and %s" % (a_atts[i][1], b_atts[i][1]))
return False
return True
def elements_match(a, b):
if not a and not b:
return True
if not a or not b:
return False
if a.nodeType != b.nodeType:
print("Different node types: %d and %d" % (a.nodeType, b.nodeType))
return False
if a.nodeName != b.nodeName:
print("Different element names: %s and %s" % (a.nodeName, b.nodeName))
return False
if not all_attributes_match(a, b):
return False
if not elements_match(first_child_element(a), first_child_element(b)):
return False
if not elements_match(next_sibling_element(a), next_sibling_element(b)):
return False
return True
def xml_matches(a, b):
if isinstance(a, str) or isinstance(a, unicode):
return xml_matches(parseString(a).documentElement, b)
if isinstance(b, str) or isinstance(b, unicode):
return xml_matches(a, parseString(b).documentElement)
if a.nodeType == xml.dom.Node.DOCUMENT_NODE:
return xml_matches(a.documentElement, b)
if b.nodeType == xml.dom.Node.DOCUMENT_NODE:
return xml_matches(a, b.documentElement)
if not elements_match(a, b):
print("Match failed:")
a.writexml(sys.stdout)
print
print('=' * 78)
b.writexml(sys.stdout)
return False
return True
## A python unit test for srdf
class TestSRDFParser(unittest.TestCase):
## test valid srdf
def test_full_srdf(self):
srdf_data = '''
<robot name="myrobot">
<group name="body">
<joint name="J1" />
<joint name="J2" />
<joint name="J3" />
<chain base_link="robot_base" tip_link="robot_tip" />
<group name="arm" />
</group>
<group_state name="zero" group="body">
<joint name="J1" value="0" />
<joint name="J2" value="0" />
<joint name="J3" value="0" />
</group_state>
<end_effector name="tip_ee" parent_link="tip" group="arm" parent_group="body" />
<end_effector name="othertip_ee" parent_link="othertip" group="arm" />
<virtual_joint name="virtual_joint" type="floating" parent_frame="body_frame" child_link="arm" />
<disable_collisions link1="link1" link2="link3" />
<disable_collisions reason="Adjacent" link1="link1" link2="link2" />
<link_sphere_approximation link="link1" />
<link_sphere_approximation link="link2" >
<sphere center="1.0 2.0 3.0" radius="1.0" />
<sphere center="1.0 2.0 4.0" radius="2.0" />
</link_sphere_approximation>
</robot>
'''
expected = '''
<robot name="myrobot">
<group name="body">
<joint name="J1" />
<joint name="J2" />
<joint name="J3" />
<chain base_link="robot_base" tip_link="robot_tip"/>
<group name="arm" />
</group>
<group_state name="zero" group="body">
<joint name="J1" value="0" />
<joint name="J2" value="0" />
<joint name="J3" value="0" />
</group_state>
<end_effector group="arm" name="tip_ee" parent_group="body" parent_link="tip"/>
<end_effector name="othertip_ee" parent_link="othertip" group="arm" />
<virtual_joint child_link="arm" name="virtual_joint" parent_frame="body_frame" type="floating" />
<disable_collisions link1="link1" link2="link3" />
<disable_collisions link1="link1" link2="link2" reason="Adjacent" />
<link_sphere_approximation link="link1" />
<link_sphere_approximation link="link2" >
<sphere center="1.0 2.0 3.0" radius="1.0" />
<sphere center="1.0 2.0 4.0" radius="2.0" />
</link_sphere_approximation>
</robot>
'''
robot = SRDF.from_xml_string(srdf_data)
self.assertTrue(xml_matches(robot.to_xml_string(),expected))
def test_simple_srdf(self):
datadir=rospkg.RosPack().get_path('srdfdom')+"/test/resources/"
stream = open(datadir+'pr2_desc.1.srdf', 'r')
robot = SRDF.from_xml_string(stream.read())
stream.close()
self.assertTrue(len(robot.virtual_joints)==0)
self.assertTrue(len(robot.groups)==0)
self.assertTrue(len(robot.group_states)==0)
self.assertTrue(len(robot.disable_collisionss)==0)
self.assertTrue(len(robot.end_effectors)==0)
stream = open(datadir+'pr2_desc.2.srdf', 'r')
robot = SRDF.from_xml_string(stream.read())
stream.close()
self.assertTrue(len(robot.virtual_joints)==1)
self.assertTrue(len(robot.groups)==1)
self.assertTrue(len(robot.group_states)==0)
self.assertTrue(len(robot.disable_collisionss)==0)
self.assertTrue(len(robot.end_effectors)==0)
def test_complex_srdf(self):
datadir=rospkg.RosPack().get_path('srdfdom')+"/test/resources/"
stream = open(datadir+'pr2_desc.3.srdf', 'r')
robot = SRDF.from_xml_string(stream.read())
stream.close()
self.assertTrue(len(robot.virtual_joints)==1)
self.assertTrue(len(robot.groups)==7)
self.assertTrue(len(robot.group_states)==2)
self.assertTrue(len(robot.disable_collisionss)==2)
self.assertTrue(robot.disable_collisionss[0].reason=="adjacent")
self.assertTrue(len(robot.end_effectors)==2)
self.assertTrue(robot.virtual_joints[0].name=="world_joint")
self.assertTrue(robot.virtual_joints[0].type=="planar")
for group in robot.groups:
if (group.name == "left_arm" or group.name == "right_arm" ):
self.assertTrue(len(group.chains)==1)
if group.name == "arms":
self.assertTrue(len(group.subgroups)==2)
if group.name == "base":
self.assertTrue(len(group.joints)==1)
if (group.name == "l_end_effector" or group.name == "r_end_effector" ):
self.assertTrue(len(group.links)==1)
self.assertTrue(len(group.joints)==9)
if group.name == "whole_body" :
self.assertTrue(len(group.joints)==1)
self.assertTrue(len(group.subgroups)==2)
index=0
if robot.group_states[0].group !="arms":
index=1
self.assertTrue(robot.group_states[index].group =="arms")
self.assertTrue(robot.group_states[index].name =="tuck_arms")
self.assertTrue(robot.group_states[1-index].group =="base")
self.assertTrue(robot.group_states[1-index].name =="home")
v=next((joint.value for joint in robot.group_states[index].joints if joint.name=="l_shoulder_pan_joint"),None)
self.assertTrue(len(v) == 1)
self.assertTrue(v[0] ==0.2)
w=next((joint.value for joint in robot.group_states[1-index].joints if joint.name=="world_joint"),None)
self.assertTrue(len(w) == 3)
self.assertTrue(w[0] ==0.4)
self.assertTrue(w[1] ==0)
self.assertTrue(w[2] ==-1)
index = 0 if (robot.end_effectors[0].name[0] == 'r') else 1
self.assertTrue(robot.end_effectors[index].name == 'r_end_effector')
self.assertTrue(robot.end_effectors[index].group == 'r_end_effector')
self.assertTrue(robot.end_effectors[index].parent_link == 'r_wrist_roll_link')
if __name__ == '__main__':
import rostest
rostest.rosrun(PKG, 'srdf_python_parser_test', TestSRDFParser)
| true
| true
|
1c4a3e0601d9a72176eb416461930fed869c830b
| 1,543
|
py
|
Python
|
zvt/domain/fundamental/valuation.py
|
aaron8tang/zvt
|
568cf0d42577eb05b96e1a07ec512aed34245b2d
|
[
"MIT"
] | null | null | null |
zvt/domain/fundamental/valuation.py
|
aaron8tang/zvt
|
568cf0d42577eb05b96e1a07ec512aed34245b2d
|
[
"MIT"
] | null | null | null |
zvt/domain/fundamental/valuation.py
|
aaron8tang/zvt
|
568cf0d42577eb05b96e1a07ec512aed34245b2d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from sqlalchemy import Column, String, Float
from sqlalchemy.orm import declarative_base
from zvt.contract import Mixin
from zvt.contract.register import register_schema
ValuationBase = declarative_base()
class StockValuation(ValuationBase, Mixin):
"""
股票估值相关的数据,比如PE、市净率等。
"""
__tablename__ = 'stock_valuation'
code = Column(String(length=32))
name = Column(String(length=32))
# 总股本(股)
capitalization = Column(Float)
# 公司已发行的普通股股份总数(包含A股,B股和H股的总股本)
circulating_cap = Column(Float)
# 市值
market_cap = Column(Float)
# 流通市值
circulating_market_cap = Column(Float)
# 换手率
turnover_ratio = Column(Float)
# 静态pe
pe = Column(Float)
# 动态pe
pe_ttm = Column(Float)
# 市净率
pb = Column(Float)
# 市销率
ps = Column(Float)
# 市现率
pcf = Column(Float)
class EtfValuation(ValuationBase, Mixin):
__tablename__ = 'etf_valuation'
code = Column(String(length=32))
name = Column(String(length=32))
# 静态pe
pe = Column(Float)
# 加权
pe1 = Column(Float)
# 动态pe
pe_ttm = Column(Float)
# 加权
pe_ttm1 = Column(Float)
# 市净率
pb = Column(Float)
# 加权
pb1 = Column(Float)
# 市销率
ps = Column(Float)
# 加权
ps1 = Column(Float)
# 市现率
pcf = Column(Float)
# 加权
pcf1 = Column(Float)
register_schema(providers=['joinquant'], db_name='valuation', schema_base=ValuationBase, entity_type='stock')
# the __all__ is generated
__all__ = ['StockValuation', 'EtfValuation']
| 21.732394
| 109
| 0.644848
|
from sqlalchemy import Column, String, Float
from sqlalchemy.orm import declarative_base
from zvt.contract import Mixin
from zvt.contract.register import register_schema
ValuationBase = declarative_base()
class StockValuation(ValuationBase, Mixin):
__tablename__ = 'stock_valuation'
code = Column(String(length=32))
name = Column(String(length=32))
capitalization = Column(Float)
circulating_cap = Column(Float)
market_cap = Column(Float)
circulating_market_cap = Column(Float)
turnover_ratio = Column(Float)
pe = Column(Float)
pe_ttm = Column(Float)
pb = Column(Float)
ps = Column(Float)
pcf = Column(Float)
class EtfValuation(ValuationBase, Mixin):
__tablename__ = 'etf_valuation'
code = Column(String(length=32))
name = Column(String(length=32))
pe = Column(Float)
pe1 = Column(Float)
pe_ttm = Column(Float)
pe_ttm1 = Column(Float)
pb = Column(Float)
pb1 = Column(Float)
ps = Column(Float)
ps1 = Column(Float)
pcf = Column(Float)
pcf1 = Column(Float)
register_schema(providers=['joinquant'], db_name='valuation', schema_base=ValuationBase, entity_type='stock')
__all__ = ['StockValuation', 'EtfValuation']
| true
| true
|
1c4a3e27ced94a73850bd98d2b3abc4188cff8f8
| 42
|
py
|
Python
|
frappe/public/hello.py
|
process-success/frappeModifyed
|
17031bdf5bb81d335d47aa3028a36be288bd9a1a
|
[
"MIT"
] | null | null | null |
frappe/public/hello.py
|
process-success/frappeModifyed
|
17031bdf5bb81d335d47aa3028a36be288bd9a1a
|
[
"MIT"
] | null | null | null |
frappe/public/hello.py
|
process-success/frappeModifyed
|
17031bdf5bb81d335d47aa3028a36be288bd9a1a
|
[
"MIT"
] | 1
|
2018-03-21T18:34:08.000Z
|
2018-03-21T18:34:08.000Z
|
#!/usr/bin/python2.7
print("Hello world")
| 21
| 21
| 0.690476
|
print("Hello world")
| true
| true
|
1c4a3fee5a6e45dc07ce48fd0fa054883c373055
| 144
|
py
|
Python
|
main.py
|
alwye/snake-game
|
dd35388576a4e44b0426cac67b08fed4cada7aa2
|
[
"MIT"
] | null | null | null |
main.py
|
alwye/snake-game
|
dd35388576a4e44b0426cac67b08fed4cada7aa2
|
[
"MIT"
] | null | null | null |
main.py
|
alwye/snake-game
|
dd35388576a4e44b0426cac67b08fed4cada7aa2
|
[
"MIT"
] | null | null | null |
"""
Famous snake game
"""
from settings import *
from game import SnakeGame
if __name__ == "__main__":
game = SnakeGame()
game.run()
| 12
| 26
| 0.652778
|
from settings import *
from game import SnakeGame
if __name__ == "__main__":
game = SnakeGame()
game.run()
| true
| true
|
1c4a41169b1b0f2b1b6e98f644def8bec59c8f6f
| 616
|
py
|
Python
|
tests/test_replace_intensity.py
|
elsandal/pyclesperanto_prototype
|
7bda828813b86b44b63d73d5e8f466d9769cded1
|
[
"BSD-3-Clause"
] | 64
|
2020-03-18T12:11:22.000Z
|
2022-03-31T08:19:18.000Z
|
tests/test_replace_intensity.py
|
elsandal/pyclesperanto_prototype
|
7bda828813b86b44b63d73d5e8f466d9769cded1
|
[
"BSD-3-Clause"
] | 148
|
2020-05-14T06:14:11.000Z
|
2022-03-26T15:02:31.000Z
|
tests/test_replace_intensity.py
|
elsandal/pyclesperanto_prototype
|
7bda828813b86b44b63d73d5e8f466d9769cded1
|
[
"BSD-3-Clause"
] | 16
|
2020-05-31T00:53:44.000Z
|
2022-03-23T13:20:57.000Z
|
import pyclesperanto_prototype as cle
import numpy as np
def test_replace_intensity():
test1 = cle.push(np.asarray([
[0, 0, 0, 0, 0],
[0, 1, 2, 3, 0],
[0, 2, 3, 4, 0],
[0, 4, 4, 5, 0],
[0, 0, 0, 0, 0]
]))
reference = cle.push(np.asarray([
[0, 0, 0, 0, 0],
[0, 1, 8, 3, 0],
[0, 8, 3, 4, 0],
[0, 4, 4, 5, 0],
[0, 0, 0, 0, 0]
]))
result = cle.create(test1)
cle.replace_intensity(test1, result, 2, 8)
print(result)
a = cle.pull(result)
b = cle.pull(reference)
assert (np.allclose(a, b, 0.001))
| 20.533333
| 46
| 0.465909
|
import pyclesperanto_prototype as cle
import numpy as np
def test_replace_intensity():
test1 = cle.push(np.asarray([
[0, 0, 0, 0, 0],
[0, 1, 2, 3, 0],
[0, 2, 3, 4, 0],
[0, 4, 4, 5, 0],
[0, 0, 0, 0, 0]
]))
reference = cle.push(np.asarray([
[0, 0, 0, 0, 0],
[0, 1, 8, 3, 0],
[0, 8, 3, 4, 0],
[0, 4, 4, 5, 0],
[0, 0, 0, 0, 0]
]))
result = cle.create(test1)
cle.replace_intensity(test1, result, 2, 8)
print(result)
a = cle.pull(result)
b = cle.pull(reference)
assert (np.allclose(a, b, 0.001))
| true
| true
|
1c4a42653316a4744277d052b7656cb48a32dd27
| 6,951
|
py
|
Python
|
api/batch_processing/postprocessing/combine_api_outputs.py
|
alsnothome/CameraTraps
|
fddd64d5e1a941bb46557bec09af7091da334cf4
|
[
"MIT"
] | null | null | null |
api/batch_processing/postprocessing/combine_api_outputs.py
|
alsnothome/CameraTraps
|
fddd64d5e1a941bb46557bec09af7091da334cf4
|
[
"MIT"
] | 1
|
2021-02-24T00:17:21.000Z
|
2021-02-24T00:17:21.000Z
|
api/batch_processing/postprocessing/combine_api_outputs.py
|
isabella232/CameraTraps
|
8a01a191cd061deac3aa8ab9edf89b210a89a0be
|
[
"MIT"
] | null | null | null |
"""
Merges two or more .json files in batch API output format, optionally
writing the results to another .json file.
- Concatenates image lists, erroring if images are not unique.
- Errors if class lists are conflicting; errors on unrecognized fields.
- Checks compatibility in info structs, within reason.
File format:
https://github.com/microsoft/CameraTraps/tree/master/api/batch_processing#batch-processing-api-output-format
Command-line use:
combine_api_outputs input1.json input2.json ... inputN.json output.json
Also see combine_api_shard_files() (not exposed via the command line yet) to
combine the intermediate files created by the API.
"""
#%% Constants and imports
import argparse
import json
from typing import Any, Dict, Iterable, Mapping, List, Optional
#%% Merge functions
def combine_api_output_files(input_files: List[str],
output_file: Optional[str] = None,
require_uniqueness: bool = True
) -> Dict[str, Any]:
"""Merges list of JSON API detection files *input_files* into a single
dictionary, optionally writing the result to *output_file*.
Args:
input_files: list of str, paths to JSON detection files
output_file: optional str, path to write merged JSON
require_uniqueness: bool, TODO
"""
input_dicts = []
print('Loading input files')
for fn in input_files:
with open(fn, 'r') as f:
input_dicts.append(json.load(f))
print('Merging results')
merged_dict = combine_api_output_dictionaries(
input_dicts, require_uniqueness=require_uniqueness)
print('Writing output')
if output_file is not None:
with open(output_file, 'w') as f:
json.dump(merged_dict, f, indent=1)
return merged_dict
def combine_api_output_dictionaries(input_dicts: Iterable[Mapping[str, Any]],
require_uniqueness: bool = True
) -> Dict[str, Any]:
"""Merges the list of API detection dictionaries *input_dicts*. See header
comment for details on merge rules.
Args:
input_dicts: list of dicts, each dict is the JSON of the detections
output file from the Batch Processing API
require_uniqueness: bool, whether to require that the images in
each input_dict be unique
Returns: dict, represents the merged JSON
"""
# Map image filenames to detections, we'll convert to a list later
images = {}
info: Dict[str, str] = {}
detection_categories: Dict[str, str] = {}
classification_categories: Dict[str, str] = {}
n_redundant_images = 0
n_images = 0
known_fields = ['info', 'detection_categories', 'classification_categories',
'images']
for input_dict in input_dicts:
for k in input_dict:
if k not in known_fields:
raise ValueError(f'Unrecognized API output field: {k}')
# Check compatibility of detection categories
for cat_id in input_dict['detection_categories']:
cat_name = input_dict['detection_categories'][cat_id]
if cat_id in detection_categories:
assert detection_categories[cat_id] == cat_name, (
'Detection category mismatch')
else:
detection_categories[cat_id] = cat_name
# Check compatibility of classification categories
if 'classification_categories' in input_dict:
for cat_id in input_dict['classification_categories']:
cat_name = input_dict['classification_categories'][cat_id]
if cat_id in classification_categories:
assert classification_categories[cat_id] == cat_name, (
'Classification category mismatch')
else:
classification_categories[cat_id] = cat_name
# Merge image lists, checking uniqueness
for im in input_dict['images']:
im_file = im['file']
if require_uniqueness:
assert im_file not in images, f'Duplicate image: {im_file}'
elif im_file in images:
n_redundant_images += 1
# print(f'Warning, duplicate results for image: {im_file}')
images[im_file] = im
n_images += 1
# Merge info dicts, don't check completion time fields
if len(info) == 0:
info = input_dict['info']
else:
info_compare = input_dict['info']
assert info_compare['detector'] == info['detector'], (
'Incompatible detection versions in merging')
assert info_compare['format_version'] == info['format_version'], (
'Incompatible API output versions in merging')
if 'classifier' in info_compare:
if 'classifier' in info:
assert info['classifier'] == info_compare['classifier']
else:
info['classifier'] = info_compare['classifier']
# ...for each dictionary
if n_redundant_images > 0:
print(f'Warning: found {n_redundant_images} redundant images '
f'(out of {n_images} total) during merge')
# Convert merged image dictionaries to a sorted list
sorted_images = sorted(images.values(), key=lambda im: im['file'])
merged_dict = {'info': info,
'detection_categories': detection_categories,
'classification_categories': classification_categories,
'images': sorted_images}
return merged_dict
def combine_api_shard_files(input_files, output_file=None):
"""
Merges the list of .json-formatted API shard files *input_files* into a single
list of dictionaries, optionally writing the result to *output_file*.
"""
input_lists = []
print('Loading input files')
for fn in input_files:
input_lists.append(json.load(open(fn)))
detections = []
# detection_list = input_lists[0]
for detection_list in input_lists:
assert isinstance(detection_list, list)
# d = detection_list[0]
for d in detection_list:
assert 'file' in d
assert 'max_detection_conf' in d
assert 'detections' in d
detections.extend([d])
print('Writing output')
if output_file is not None:
with open(output_file, 'w') as f:
json.dump(detections, f, indent=1)
return detections
#%% Driver
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'input_paths', nargs='+',
help='List of input .json files')
parser.add_argument(
'output_path',
help='Output .json file')
args = parser.parse_args()
combine_api_output_files(args.input_paths, args.output_path)
if __name__ == '__main__':
main()
| 35.464286
| 108
| 0.630269
|
import argparse
import json
from typing import Any, Dict, Iterable, Mapping, List, Optional
def combine_api_output_files(input_files: List[str],
output_file: Optional[str] = None,
require_uniqueness: bool = True
) -> Dict[str, Any]:
input_dicts = []
print('Loading input files')
for fn in input_files:
with open(fn, 'r') as f:
input_dicts.append(json.load(f))
print('Merging results')
merged_dict = combine_api_output_dictionaries(
input_dicts, require_uniqueness=require_uniqueness)
print('Writing output')
if output_file is not None:
with open(output_file, 'w') as f:
json.dump(merged_dict, f, indent=1)
return merged_dict
def combine_api_output_dictionaries(input_dicts: Iterable[Mapping[str, Any]],
require_uniqueness: bool = True
) -> Dict[str, Any]:
images = {}
info: Dict[str, str] = {}
detection_categories: Dict[str, str] = {}
classification_categories: Dict[str, str] = {}
n_redundant_images = 0
n_images = 0
known_fields = ['info', 'detection_categories', 'classification_categories',
'images']
for input_dict in input_dicts:
for k in input_dict:
if k not in known_fields:
raise ValueError(f'Unrecognized API output field: {k}')
# Check compatibility of detection categories
for cat_id in input_dict['detection_categories']:
cat_name = input_dict['detection_categories'][cat_id]
if cat_id in detection_categories:
assert detection_categories[cat_id] == cat_name, (
'Detection category mismatch')
else:
detection_categories[cat_id] = cat_name
# Check compatibility of classification categories
if 'classification_categories' in input_dict:
for cat_id in input_dict['classification_categories']:
cat_name = input_dict['classification_categories'][cat_id]
if cat_id in classification_categories:
assert classification_categories[cat_id] == cat_name, (
'Classification category mismatch')
else:
classification_categories[cat_id] = cat_name
# Merge image lists, checking uniqueness
for im in input_dict['images']:
im_file = im['file']
if require_uniqueness:
assert im_file not in images, f'Duplicate image: {im_file}'
elif im_file in images:
n_redundant_images += 1
# print(f'Warning, duplicate results for image: {im_file}')
images[im_file] = im
n_images += 1
# Merge info dicts, don't check completion time fields
if len(info) == 0:
info = input_dict['info']
else:
info_compare = input_dict['info']
assert info_compare['detector'] == info['detector'], (
'Incompatible detection versions in merging')
assert info_compare['format_version'] == info['format_version'], (
'Incompatible API output versions in merging')
if 'classifier' in info_compare:
if 'classifier' in info:
assert info['classifier'] == info_compare['classifier']
else:
info['classifier'] = info_compare['classifier']
if n_redundant_images > 0:
print(f'Warning: found {n_redundant_images} redundant images '
f'(out of {n_images} total) during merge')
sorted_images = sorted(images.values(), key=lambda im: im['file'])
merged_dict = {'info': info,
'detection_categories': detection_categories,
'classification_categories': classification_categories,
'images': sorted_images}
return merged_dict
def combine_api_shard_files(input_files, output_file=None):
input_lists = []
print('Loading input files')
for fn in input_files:
input_lists.append(json.load(open(fn)))
detections = []
for detection_list in input_lists:
assert isinstance(detection_list, list)
for d in detection_list:
assert 'file' in d
assert 'max_detection_conf' in d
assert 'detections' in d
detections.extend([d])
print('Writing output')
if output_file is not None:
with open(output_file, 'w') as f:
json.dump(detections, f, indent=1)
return detections
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'input_paths', nargs='+',
help='List of input .json files')
parser.add_argument(
'output_path',
help='Output .json file')
args = parser.parse_args()
combine_api_output_files(args.input_paths, args.output_path)
if __name__ == '__main__':
main()
| true
| true
|
1c4a43254129f6dce18adb2aab2ca72a6cf10471
| 3,836
|
py
|
Python
|
tests/core/contracts/test_contract_method_to_argument_matching.py
|
y19818/web3.py
|
32a85a287ab63220d1e0c06d77be74de595ff02f
|
[
"MIT"
] | null | null | null |
tests/core/contracts/test_contract_method_to_argument_matching.py
|
y19818/web3.py
|
32a85a287ab63220d1e0c06d77be74de595ff02f
|
[
"MIT"
] | null | null | null |
tests/core/contracts/test_contract_method_to_argument_matching.py
|
y19818/web3.py
|
32a85a287ab63220d1e0c06d77be74de595ff02f
|
[
"MIT"
] | null | null | null |
import json
import pytest
from web3._utils.abi import (
get_abi_input_types,
)
from web3._utils.function_identifiers import (
FallbackFn,
)
from web3.exceptions import (
ValidationError,
)
SINGLE_FN_NO_ARGS = json.loads('[{"constant":false,"inputs":[],"name":"a","outputs":[],"type":"function"}]') # noqa: E501
SINGLE_FN_ONE_ARG = json.loads('[{"constant":false,"inputs":[{"name":"","type":"uint256"}],"name":"a","outputs":[],"type":"function"}]') # noqa: E501
FALLBACK_FUNCTION = json.loads('[{"constant": false, "inputs": [], "name": "getData", "outputs": [{"name": "r", "type": "uint256"}], "payable": false, "stateMutability": "nonpayable", "type": "function"}, {"payable": false, "stateMutability": "nonpayable", "type": "fallback"}]') # noqa: E501
MULTIPLE_FUNCTIONS = json.loads('''
[
{
"constant": false,
"inputs": [],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "bytes32"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "uint256"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "uint8"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "int8"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "tuple[]",
"components": [
{"name": "", "type": "int256"},
{"name": "", "type": "bool"}
]
}
],
"name": "a",
"outputs": [],
"type": "function"
}
]
''')
def test_finds_single_function_without_args(web3):
Contract = web3.vns.contract(abi=SINGLE_FN_NO_ARGS)
abi = Contract._find_matching_fn_abi('a', [])
assert abi['name'] == 'a'
assert abi['inputs'] == []
def test_finds_single_function_with_args(web3):
Contract = web3.vns.contract(abi=SINGLE_FN_ONE_ARG)
abi = Contract._find_matching_fn_abi('a', [1234])
assert abi['name'] == 'a'
assert len(abi['inputs']) == 1
assert abi['inputs'][0]['type'] == 'uint256'
def test_finds_fallback_function(web3):
Contract = web3.vns.contract(abi=FALLBACK_FUNCTION)
abi = Contract._find_matching_fn_abi(FallbackFn, [])
assert abi['type'] == 'fallback'
def test_error_when_no_function_name_match(web3):
Contract = web3.vns.contract(abi=SINGLE_FN_NO_ARGS)
with pytest.raises(ValidationError):
Contract._find_matching_fn_abi('no_function_name', [1234])
@pytest.mark.parametrize(
'arguments,expected_types',
(
([], []),
([b'arst'], ['bytes32']),
(['0xf00b47'], ['bytes32']),
([1234567890], ['uint256']),
# ([255], ['uint8']), # TODO: enable
([-1], ['int8']),
([[(-1, True), (2, False)]], ['(int256,bool)[]']),
)
)
def test_finds_function_with_matching_args(web3, arguments, expected_types):
Contract = web3.vns.contract(abi=MULTIPLE_FUNCTIONS)
abi = Contract._find_matching_fn_abi('a', arguments)
assert abi['name'] == 'a'
assert len(abi['inputs']) == len(expected_types)
assert set(get_abi_input_types(abi)) == set(expected_types)
def test_error_when_duplicate_match(web3):
Contract = web3.vns.contract(abi=MULTIPLE_FUNCTIONS)
with pytest.raises(ValidationError):
Contract._find_matching_fn_abi('a', [100])
| 25.403974
| 294
| 0.539103
|
import json
import pytest
from web3._utils.abi import (
get_abi_input_types,
)
from web3._utils.function_identifiers import (
FallbackFn,
)
from web3.exceptions import (
ValidationError,
)
SINGLE_FN_NO_ARGS = json.loads('[{"constant":false,"inputs":[],"name":"a","outputs":[],"type":"function"}]')
SINGLE_FN_ONE_ARG = json.loads('[{"constant":false,"inputs":[{"name":"","type":"uint256"}],"name":"a","outputs":[],"type":"function"}]')
FALLBACK_FUNCTION = json.loads('[{"constant": false, "inputs": [], "name": "getData", "outputs": [{"name": "r", "type": "uint256"}], "payable": false, "stateMutability": "nonpayable", "type": "function"}, {"payable": false, "stateMutability": "nonpayable", "type": "fallback"}]')
MULTIPLE_FUNCTIONS = json.loads('''
[
{
"constant": false,
"inputs": [],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "bytes32"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "uint256"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "uint8"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "int8"
}
],
"name": "a",
"outputs": [],
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "",
"type": "tuple[]",
"components": [
{"name": "", "type": "int256"},
{"name": "", "type": "bool"}
]
}
],
"name": "a",
"outputs": [],
"type": "function"
}
]
''')
def test_finds_single_function_without_args(web3):
Contract = web3.vns.contract(abi=SINGLE_FN_NO_ARGS)
abi = Contract._find_matching_fn_abi('a', [])
assert abi['name'] == 'a'
assert abi['inputs'] == []
def test_finds_single_function_with_args(web3):
Contract = web3.vns.contract(abi=SINGLE_FN_ONE_ARG)
abi = Contract._find_matching_fn_abi('a', [1234])
assert abi['name'] == 'a'
assert len(abi['inputs']) == 1
assert abi['inputs'][0]['type'] == 'uint256'
def test_finds_fallback_function(web3):
Contract = web3.vns.contract(abi=FALLBACK_FUNCTION)
abi = Contract._find_matching_fn_abi(FallbackFn, [])
assert abi['type'] == 'fallback'
def test_error_when_no_function_name_match(web3):
Contract = web3.vns.contract(abi=SINGLE_FN_NO_ARGS)
with pytest.raises(ValidationError):
Contract._find_matching_fn_abi('no_function_name', [1234])
@pytest.mark.parametrize(
'arguments,expected_types',
(
([], []),
([b'arst'], ['bytes32']),
(['0xf00b47'], ['bytes32']),
([1234567890], ['uint256']),
['int8']),
([[(-1, True), (2, False)]], ['(int256,bool)[]']),
)
)
def test_finds_function_with_matching_args(web3, arguments, expected_types):
Contract = web3.vns.contract(abi=MULTIPLE_FUNCTIONS)
abi = Contract._find_matching_fn_abi('a', arguments)
assert abi['name'] == 'a'
assert len(abi['inputs']) == len(expected_types)
assert set(get_abi_input_types(abi)) == set(expected_types)
def test_error_when_duplicate_match(web3):
Contract = web3.vns.contract(abi=MULTIPLE_FUNCTIONS)
with pytest.raises(ValidationError):
Contract._find_matching_fn_abi('a', [100])
| true
| true
|
1c4a437b86d7bfb9b1e7b2ed2f1ba8e9cb370973
| 754
|
py
|
Python
|
setup.py
|
aaalgo/calign
|
7e96f4c0df85200dec260e603711172486107f23
|
[
"MIT"
] | null | null | null |
setup.py
|
aaalgo/calign
|
7e96f4c0df85200dec260e603711172486107f23
|
[
"MIT"
] | null | null | null |
setup.py
|
aaalgo/calign
|
7e96f4c0df85200dec260e603711172486107f23
|
[
"MIT"
] | null | null | null |
from distutils.core import setup, Extension
picpac = Extension('_calign',
language = 'c++',
extra_compile_args = ['-O3', '-std=c++1y'],
include_dirs = ['/usr/local/include'],
libraries = ['opencv_highgui', 'opencv_core', 'boost_filesystem', 'boost_system', 'boost_python'],
library_dirs = ['/usr/local/lib'],
sources = ['python-api.cpp'],
depends = ['calign.h'])
setup (name = 'calign',
version = '0.0.1',
url = 'https://github.com/aaalgo/calign',
author = 'Wei Dong',
author_email = 'wdong@wdong.org',
license = 'BSD',
description = 'This is a demo package',
ext_modules = [calign],
py_modules = ['picpac.mxnet', 'picpac.neon'],
)
| 34.272727
| 106
| 0.570292
|
from distutils.core import setup, Extension
picpac = Extension('_calign',
language = 'c++',
extra_compile_args = ['-O3', '-std=c++1y'],
include_dirs = ['/usr/local/include'],
libraries = ['opencv_highgui', 'opencv_core', 'boost_filesystem', 'boost_system', 'boost_python'],
library_dirs = ['/usr/local/lib'],
sources = ['python-api.cpp'],
depends = ['calign.h'])
setup (name = 'calign',
version = '0.0.1',
url = 'https://github.com/aaalgo/calign',
author = 'Wei Dong',
author_email = 'wdong@wdong.org',
license = 'BSD',
description = 'This is a demo package',
ext_modules = [calign],
py_modules = ['picpac.mxnet', 'picpac.neon'],
)
| true
| true
|
1c4a43914b6834384b739b37fc8cbf4a1fededbb
| 25,191
|
py
|
Python
|
faker/providers/job/da_DK/__init__.py
|
tristanHdez18/faker
|
14cb25712e6efcb7bf8d9f30f404a7304722af6d
|
[
"MIT"
] | null | null | null |
faker/providers/job/da_DK/__init__.py
|
tristanHdez18/faker
|
14cb25712e6efcb7bf8d9f30f404a7304722af6d
|
[
"MIT"
] | 4
|
2022-02-04T17:24:59.000Z
|
2022-03-29T20:02:57.000Z
|
faker/providers/job/da_DK/__init__.py
|
tristanHdez18/faker
|
14cb25712e6efcb7bf8d9f30f404a7304722af6d
|
[
"MIT"
] | null | null | null |
from .. import Provider as BaseProvider
class Provider(BaseProvider):
"""
Source: https://star.dk/it/saadan-arbejder-vi-med-it-i-styrelsen/oversigt-over-stillingsbetegnelser-og-kvalifikationer/. # NOQA
"""
jobs = [
"Lastvognsmekanikerlærling",
"Knallertmekaniker",
"Møbelarkitekt",
"Forsyningsoperatørelev",
"Hospitalsfysiker",
"Økonomicontroller",
"Revisor",
"Skatterevisor",
"Kontrollør",
"Musikpædagog",
"Pantefoged",
"Serveringsmedarbejder",
"Maskinmesteraspirant",
"Sygehusdirektør",
"Laborant",
"Overlæge",
"Designassistent",
"Teknisk chef",
"Socialformidler",
"Overassistent",
"Pædagogisk assistent",
"Pedel",
"Kustode",
"Pædagogmedhjælper",
"Projektmedarbejder",
"Pedelmedhjælper",
"Museumsbetjent",
"Molekylærbiolog",
"Lærervikar",
"Sognehjælper",
"Lysdesigner",
"Instruktørassistent",
"Teatertekniker",
"Researcher",
"Redaktør",
"Teknisk designer",
"Ressourceleder",
"Indkøbschef",
"E-commerce manager",
"Kontraktchef",
"Produktchef",
"Museumsinspektør",
"Kurator",
"Konservator",
"Modelkonstruktør",
"Kommunikationschef",
"Forskningschef",
"Skovrider",
"Fiskeriteknolog",
"Produktionschef",
"Driftsleder",
"Direktør",
"Officer",
"Sergent",
"IT-ingeniør",
"IT-arkitekt",
"IT-revisor",
"Programmør og systemudvikler",
"UX designer",
"Webredaktør",
"Webudvikler",
"Datakonsulent",
"Idrætsinstruktør og -konsulent",
"Efterretningsofficer",
"Miljøkonsulent",
"Campingpladsbestyrer",
"Miljøkoordinator",
"Grafisk tekniker",
"Elektrotekniker",
"Vindmølleoperatør",
"Urmager",
"Byplanlægger",
"Trafikplanlægger",
"GIS-medarbejder",
"Illustrator",
"Mediegrafiker",
"Artdirector",
"Multimediedesigner",
"Praktiserende læge",
"Speciallæge",
"Struktør",
"Pakkerimedarbejder",
"Cykelbud",
"Fabriksbager",
"Møller",
"Guld- og sølvsmed",
"Ciselør",
"Produktionsleder inden for film og teater",
"Centerleder",
"Lufthavnschef",
"Kameramand",
"Tonemester",
"Studietekniker",
"Eventtekniker",
"Produktionstekniker",
"Fødevareteknolog",
"Brygmester",
"Specialist i biomedicin",
"Botaniker",
"Biokemiker",
"Havbiolog",
"Fysiolog",
"Planteforædler",
"Skoleleder",
"Døvekonsulent",
"Import- og eksportmedarbejder",
"Friskolelærer",
"Au pair",
"Børnepasser",
"Landbrugsmaskinemekaniker",
"Trafikinformationsmedarbejder",
"Togfører",
"Guide",
"Kok",
"Vært i restaurant",
"Tjener",
"Bartender",
"Korrekturlæser",
"Postfunktionær",
"Biblioteksassistent",
"Telefonist",
"Kundeservicemedarbejder",
"Natportier",
"Interviewer",
"Vekselbureaumedarbejder",
"Skattefunktionær",
"Forsikringsfunktionær",
"Revisorassistent",
"Lønbogholder",
"Lagerforvalter",
"Overstyrmand",
"Flyklarerer",
"Marketingmedarbejder",
"Kreativ chef",
"Miljøanalytiker",
"Naturvejleder",
"Procesingeniør",
"Logistiker",
"Bankdirektør",
"Civilingeniør",
"Miljøingeniør",
"Maskiningeniør",
"Værkstedsleder",
"Programdirektør",
"Lystekniker",
"IT-supporter",
"IT-tekniker",
"IT-kvalitetsmedarbejder",
"Korleder",
"Marketingchef",
"Destinationschef",
"Ordblindelærer",
"Kursusleder",
"Produktspecialist",
"Områdechef",
"Rengøringsinspektør",
"Smedelærling",
"Stenhuggerlærling",
"Shippingmedarbejder",
"Lager- og logistikelev",
"Stukkatørlærling",
"Automekanikerlærling",
"Beklædningshåndværkerelev",
"Butikselev",
"Datateknikerelev",
"Industrislagterlærling",
"Shippingassistent",
"Konditorlærling",
"Gulvlæggerlærling",
"Køleteknikerlærling",
"Bygningstruktørlærling",
"Rustfast industrimontør",
"Værktøjsmagerlærling",
"Industriteknikerlærling",
"Vagtcentralassistent",
"Juridisk chef",
"Kunstlærer",
"Lærer på skuespillerskole",
"Asfaltør",
"Jordemoder",
"Erhvervsskolelærer",
"Personalekonsulent",
"Job- og virksomhedskonsulent",
"Tekstforfatter",
"Virksomhedsudvikler",
"Byggeleder",
"Departementschef",
"Politidirektør",
"Diplomat",
"Generalsekretær",
"Leder af offentlig forvaltning",
"Konstabel",
"Speditør",
"Flyttearbejder",
"Lager- og logistikmedarbejder",
"Havnearbejder",
"Anlægsarbejder",
"Slagteriarbejder",
"Fiskeindustriarbejder",
"Industrislagter",
"Slagtermester",
"Bager",
"Konditor",
"Mejeriarbejder",
"Mejerist",
"Familievejleder",
"Socialfaglig leder",
"HR-konsulent",
"SSP-medarbejder",
"Havnefoged",
"Lufthavnsoperatør",
"Assistent til salgssupport",
"Frisør",
"Model",
"Demonstratør",
"Call centermedarbejder",
"Viceskoleleder",
"Ortopædiskomager",
"Fiskeribetjent",
"Indkøber",
"Massageterapeut",
"Levnedsmiddelinspektør",
"Ambulancefører",
"Paramediciner",
"Kunstformidler",
"Arkivar",
"Registrar",
"Bibliotekar",
"Økonom",
"Antropolog",
"Arkæolog",
"Motorcykelmekanikerlærling",
"Skibsmekanikerlærling",
"Landbrugsmaskine-mekanikerlærling",
"VVS-lærling",
"Privatpraktiserende tandplejer",
"Glarmesterlærling",
"Ejendomsserviceteknikerelev",
"Audiologiassistentelev",
"Dyrepasserelev",
"Tømrerlærling",
"Autolakererlærling",
"Bygningsmalerlærling",
"Automatikteknikerelev",
"Skorstensfejerlærling",
"Bagerlærling",
"Vagtcentralmedarbejder",
"Murerlærling",
"Elektrikerlærling",
"Rørlægger",
"Flymekanikerlærling",
"Cykelmekanikerlærling",
"Skibsmontørlærling",
"Bygningssnedkerlærling",
"Studentermedhjælp",
"Redder",
"Chaufførelev",
"Slagterlærling",
"Tagdækkerlærling",
"Organist",
"Sagsbehandler",
"Databaseadministrator",
"Bankrådgiver",
"Realkreditmedarbejder",
"Bogholder",
"Bogholderi- og regnskabsassistent",
"Assurandør",
"Valuar",
"Taksator",
"Hardware-udvikler",
"Medicoingeniør",
"Sensortekniker",
"Boghandler",
"Ekspedient",
"Fiskehandler",
"Farvehandler",
"Blomsterbinder",
"Delikatesseassistent",
"Farmakonom",
"Serviceøkonom",
"SOME-medarbejder",
"Pressesekretær",
"Fundraiser",
"Kampagnemedarbejder",
"Kommunikationskonsulent",
"IT-konsulent",
"IT-direktør",
"IT-chef",
"IT-dokumentationschef",
"Chief data officer",
"IT-projektleder",
"Børne- og ungekoordinator",
"Leder af børne- og ungdomsklub",
"Børsmægler",
"Lagerekspedient",
"Sommelier",
"Levnedsmiddelingeniør",
"Vagt",
"Dørmand",
"Barista",
"Tekster",
"Flyinstruktør",
"Helikopterfører",
"Flymaskinist",
"Klimaforsker",
"Handelsskolelærer",
"Møbelpolstrer",
"Børneværnskonsulent",
"Klargører",
"Klubmedarbejder",
"Kontorchef",
"Koordinator",
"Efterskoleforstander",
"Vicerektor",
"Politisk medarbejder",
"Politisk konsulent",
"Kommunal planlægger",
"Fuldmægtig",
"Rådgivende konsulent",
"Business intelligence manager",
"Økonomiassistent",
"Finansanalytiker",
"Gymnasielærer",
"Folkeskolelærer",
"Pædagog",
"Studiesekretær",
"Speciallærer",
"Fotografmedhjælper",
"Erhvervsdykker",
"Danselærer",
"Geograf",
"Kriminolog",
"Sociolog",
"Historiker",
"Filosof",
"Socialrådgiver",
"Politolog",
"Psykolog",
"Socialarbejder",
"Socialpædagog",
"Præst",
"Geotekniker",
"Svejseinspektør",
"Designer",
"Merchandiser",
"Visual merchandiser",
"Scenograf",
"Tandklinikassistent",
"Tandplejer",
"Keramiker",
"Gravør",
"Kunstner",
"Tegner",
"Garver",
"Landinspektør",
"Byggemontagetekniker",
"Brolægger",
"Forskningsbibliotekar",
"Anlægsgartnerarbejder",
"Cafemedarbejder",
"Kontorleder",
"Farmakonomelev",
"Rejsebureauelev",
"Tandplejerelev",
"Tandteknikerelev",
"Frisørelev",
"Receptionistelev",
"Vejrvært",
"Arrangementchef",
"Udviklingschef",
"Indretningsarkitekt",
"Autoteknolog",
"Butiksassistent",
"Skolepædagog",
"Social- og sundhedsassistent",
"Social- og sundhedshjælper",
"Kasseassistent",
"Levnedsmiddeltekniker",
"Maskinsnedker",
"Møbelsnedker",
"Automationsingeniør",
"Produktionsmedarbejder",
"Byggetekniker",
"Reklamechef",
"Sproglærer",
"Tegnsprogslærer",
"Energiingeniør",
"Dagtilbudsleder",
"Vuggestueleder",
"Plejehjemsleder",
"Kommunikationskoordinator",
"Brandchef",
"Flysikkerhedschef",
"Miljø- og sikkerhedschef",
"Bibliotekschef",
"Museumsleder",
"Kunstnerisk leder",
"Kundeservicechef",
"Rigsarkivar",
"Flymekaniker",
"Skibsmekaniker",
"Entreprenørmaskinemekaniker",
"Kranmekaniker",
"Industrimekaniker",
"Cykelmekaniker",
"Skorstensfejer",
"Industrilakerer",
"Autolakerer",
"Murer",
"Stenhugger",
"Betonmager",
"Køkkenmontør",
"Tømrer",
"Skov- og naturteknikerelev",
"Lægemiddelkonsulent",
"Bevægelsespædagog",
"Ernæringsassistent",
"Ungdomsskolelærer",
"PAU-elev",
"IT-underviser",
"VUC-lærer",
"Uddannelses- og erhvervsvejleder",
"Finansrådgiver",
"Investeringsrådgiver",
"Musiklærer",
"Hotelchef",
"Butikschef",
"Regionschef",
"Teaterteknikerelev",
"Speditørelev",
"IT-supporterelev",
"Politielev",
"Vindmølleoperatørelev",
"Gartnerelev",
"Ortopædielev",
"Fotografelev",
"Film- og tvproduktionselev",
"Procesoperatørelev",
"Optikerelev",
"Radio- og TV-fagteknikerelev",
"Handelselev",
"Elektronikoperatørelev",
"Toldelev",
"Plastmagerelev",
"Social- og sundhedshjælperelev",
"Grafikerelev",
"Forsikringselev",
"Revisorelev",
"Shippingelev",
"Regnskabselev",
"Tjenerelev",
"Finmekanikerelev",
"Oliefyrsteknikerelev",
"Urmagerelev",
"Redderelev",
"Teleteknikerelev",
"Industrioperatørelev",
"Landbrugselev",
"Kosmetologelev",
"Asfaltørelev",
"Kontorelev",
"Fitnessinstruktørelev",
"Møbelsnedkerelev",
"Serviceassistentelev",
"Mejerielev",
"Ernæringsassistentelev",
"Neurofysiologiassistentelev",
"Kostumier",
"Buntmager",
"Parykmager",
"Skrædder",
"Skomager",
"Bore- og udvindingsarbejder",
"Offshorearbejder",
"Ordrebehandler",
"Reservedelsekspedient",
"Oldfrue",
"Vicevært",
"Ledsager",
"Bedemandsassistent",
"Bedemand",
"Graver",
"Kosmetolog",
"Stylist",
"Negletekniker",
"Massør",
"Tekstildesigner",
"Kostumedesigner",
"Institutleder på universitet",
"Rektor",
"Information- og videnchef IT",
"Dokumentationsmedarbejder",
"Efterskolelærer",
"Dagplejer",
"Yogalærer",
"Dommerfuldmægtig",
"Hotelmedarbejder",
"Dagplejeleder",
"Kordegn",
"FGU-lærer",
"Værkstedsassistent",
"Økonoma",
"Artist",
"Væksthusgartner",
"Gartneriarbejder",
"Chef for gartneriproduktion",
"Anlægsgartner",
"Staldmester",
"Støberitekniker",
"Skibsbygger",
"Svejser",
"Klejnsmed",
"Laboratorieleder",
"Skovfoged",
"Fiskeassistent",
"Fisker",
"Skytte",
"Landmand",
"Pelsdyravler",
"Fængselsfunktionær",
"Livredder",
"Nødhjælpsarbejder",
"Parkeringsvagt",
"Kældermester",
"Fødevareinspektør",
"Grossist",
"Varemægler",
"Skibsfører",
"Lods",
"Skibsmaskinist",
"Maskinmester",
"Maskinassistent",
"Mejeritekniker",
"Produktionsteknolog",
"Produktionsleder",
"Værkfører",
"Fysiker",
"Astronom",
"Metrolog",
"Meteorolog",
"Kemiker",
"Geofysiker",
"Geolog",
"Statistiker",
"Aktuar",
"Demograf",
"Matematiker",
"Farmakolog",
"Biolog",
"Skovbrugsrådgiver",
"Landbrugskonsulent",
"Agronom",
"Sagsadministrator",
"Detektiv",
"Kontormedhjælper",
"Sekretær",
"Tasteoperatør",
"Bankassistent",
"Croupier",
"Av tekniker",
"Tekniker radio- og TV-udsendelser",
"Webmaster",
"Garderobeassistent",
"Butiksdetektiv",
"Beklædningsdesigner",
"Psykoterapeut",
"Klinisk psykolog",
"Produktionsingeniør",
"Regionsdirektør",
"Havearkitekt",
"Salgs- og kundeansvarlig",
"Systemadministrator",
"IT-sikkerhedskonsulent",
"Eventmanager",
"Eventassistent",
"Ejendomsadministrator",
"Ejendomsmægler",
"Reklamekonsulent",
"Auktionsleder",
"Musiker",
"Danser",
"Koreograf",
"Kirketjener",
"Driftschef",
"Chefkonsulent",
"Turismechef",
"Brandinspektør",
"Testingeniør",
"Materialetekniker",
"Kemiingeniør",
"Økonomichef",
"Cykelhandler",
"Bagermester",
"Politifuldmægtig",
"Musikterapeut",
"Kvalitetsingeniør",
"Hundetræner",
"Beslagsmed",
"Teatermedarbejder",
"Scenefunktionær",
"Sikkerhedschef",
"Plade- og konstruktionssmed",
"Smed",
"Finmekaniker",
"Værktøjsmager",
"Modelsnedker",
"Låsesmed",
"Hundefører",
"Medarbejder på et dyreinternat",
"Kørelærer",
"Instrumentbygger",
"Lydtekniker",
"Tandklinikassistentelev",
"Museumsmedhjælper",
"Bådebyggerlærling",
"Teknisk isolatørelev",
"VVS-montør",
"Blikkenslager",
"Galvanisør",
"Bådebygger",
"Lastvognsmekaniker",
"Knallertmekanikerlærling",
"Laboratorietekniker",
"Skibsmontør",
"Manuskriptforfatter",
"Teknisk kommunikator",
"Vulkanisør",
"Veterinærsygeplejerske",
"Inseminør",
"Drejer",
"CNC-operatør",
"Jern- og metalsliber",
"Karosserismed",
"Automekaniker",
"Dækmontør",
"Mekaniker",
"Filmklipper",
"Producer",
"Skuespiller",
"Jordbrugsteknolog",
"Miljøtekniker",
"Kort- og landmålingstekniker",
"Fræser",
"Transportchef",
"Porcelænsmaler",
"Robottekniker",
"Personalechef",
"Programchef",
"Chefstrateg",
"Facility manager",
"Administrationschef",
"Kvalitetschef",
"Kontorfuldmægtig",
"Advokatsekretær",
"Direktionssekretær",
"Redaktionssekretær",
"Lægesekretær",
"Administrativ lægesekretær",
"Tolder",
"Teletekniker",
"Elektrofagtekniker",
"Pottemager",
"Glarmester",
"Glasmager",
"Skiltemaler",
"Klaverstemmer",
"Kranfører",
"Truckfører",
"Sadelmager",
"Trykkeriarbejder",
"Tekstiltrykker",
"Elektriker",
"Dirigent",
"Korsanger",
"Ligestillings- og inklusionschef",
"Akupunktør",
"Orgelbygger",
"Personlig træner",
"Forlystelsesmedarbejder",
"Renseriassistent",
"Redaktionschef",
"Distributionschef",
"Lagerchef",
"Import- og eksportchef",
"Supply chain manager",
"Elektronikarbejder",
"Lokomotivfører",
"Togklargører",
"Taxichauffør",
"Risikoanalytiker",
"Værdipapiranalytiker",
"Forsikringsanalytiker",
"Investeringsanalytiker",
"Sceneinstruktør",
"Caster",
"Filminstruktør",
"Stilladsmontør",
"Nedriver",
"Brandmand",
"Tagdækker",
"Stukkatør",
"Isolatør",
"Kloakrørlægger",
"Kloakmester",
"VVS-installatør",
"Gastekniker",
"HVAC-tekniker",
"Arbejdsmiljøkonsulent",
"Fysioterapeut",
"Ernærings- og sundhedskonsulent",
"Audiolog",
"Logopæd",
"Øjenlæge",
"Radiograf",
"Kiropraktor",
"Ergoterapeut",
"Elektroingeniør",
"Fodterapeut",
"Alternativ behandler",
"Lektor",
"Oversygeplejerske",
"Specialsygeplejerske",
"Sygeplejerske",
"Elektronikingeniør",
"Telekommunikationsingeniør",
"Arkitekt",
"Landskabsarkitekt",
"Industriel designer",
"Vaskeriassistent",
"Bryggeriarbejder",
"Datalog",
"Psykiatrisk sygeplejerske",
"Ortopædist",
"Designteknolog",
"Skibsmægler",
"Medicotekniker",
"Finanschef",
"Regnskabschef",
"Filmfotograf",
"Guitarbygger",
"Frisør inden for teater- og TV-branchen",
"Hundefrisør",
"Skilærer",
"Misbrugsbehandler",
"VVS-tekniker",
"Maskinkonstruktør",
"Skibsingeniør",
"Lægesekretærelev",
"Social- og sundhedsassistentelev",
"Filmklipperelev",
"Laborantelev",
"Elektronikfagtekniker-elev",
"Finanselev",
"Kokkeelev",
"Guld- og sølvsmedelev",
"Maskinsnedkerelev",
"Teknisk designerelev",
"Uddannelsesleder",
"Kørselsleder",
"Greenkeeper",
"Kunsthåndværker",
"Neurofysiologiassistent",
"Ministerialbetjent",
"Sekretariatschef",
"Plejehjemsmedhjælper",
"Psykomotorisk terapeut",
"Sundhedsplejerske",
"TV-producer",
"Sejlmager",
"Smørrebrødsjomfru",
"Sanglærer",
"Rengøringsassistent",
"Vinduespudser",
"Bademester",
"Maskinfører",
"Buschauffør",
"Procesoperatør",
"Bygningsmaler",
"Flisemontør",
"Gulvlægger",
"Tæppemontør",
"Hospitalsserviceassistent",
"Arkivmedarbejder",
"HR-assistent",
"Korrespondent",
"Purser",
"Rideskoleassistent",
"Dyrepasser",
"Køkkenmedhjælper",
"Opvasker",
"Omdeler",
"Renovationsarbejder",
"Gadefejer",
"Måleraflæser",
"Pizzabager",
"Fastfood ekspedient",
"Butiksmedhjælper",
"Landbrugsmedhjælper",
"Gartner",
"Skovarbejder",
"Dambrugsarbejder",
"Politiinspektør",
"Speditionsleder",
"Bygningsingeniør",
"Energikonsulent",
"Elektronikfagtekniker",
"Lingvist",
"Tegnsprogstolk",
"Oversætter",
"Tolk",
"Journalist",
"Dramaturg",
"Forlagsredaktør",
"Advokat",
"Dommer",
"Notar",
"Jurist",
"Produktudvikler",
"Industritekniker",
"Laboratorieassistent",
"Biomediciner",
"Apotekerassistent",
"Apotekstekniker",
"Ortopædiingeniør",
"Klinisk tandtekniker",
"Dyreklinikassistent",
"Flyveleder",
"Flyveklarerer",
"Pilot",
"Rejsekonsulent",
"Trafikassistent",
"Billetsælger",
"Rejsebureaumedarbejder",
"Medarbejder på turistkontor",
"Inkassomedarbejder",
"Specialtandlæge",
"Dyrlæge",
"Tandlæge",
"Apoteker",
"CSR-ansvarlig",
"Projektleder",
"Afdelingsleder",
"Salgsdirektør",
"Bygningskonstruktør",
"Afdelingschef",
"Fodermester",
"Havneassistent",
"Farmaceut",
"Artdirector assistent",
"Professor",
"Faglærer",
"Automontør",
"Familieplejer",
"Blomsterdekoratør",
"Sundhedsøkonom",
"Bilsynsassistent",
"Badeassistent",
"Businesscontroller",
"Specialkonsulent",
"FVU-lærer",
"Bageriarbejder",
"Ridelærer",
"Fitness-instruktør",
"Optiker",
"Fotograf",
"Fotojournalist",
"Køkkenchef",
"Regissør",
"Salgschef",
"Elinstallatør",
"Skolekonsulent",
"Læge",
"Byggesagsbehandler",
"Økologikonsulent",
"Restaurantchef",
"Cater",
"Adjunkt",
"Faglig konsulent",
"Forsorgsmedarbejder",
"Pædagogisk konsulent",
"Sygehuslæge",
"Hospitalsmedhjælper",
"Kirkesanger",
"Kantineleder",
"Fagkonsulent",
"Handicaphjælper",
"Aftenskolelærer",
"Projektkoordinator",
"Ligestillingskonsulent",
"Brolæggerarbejde",
"Bygningsstruktør",
"Oliefyrstekniker",
"Motorcykelmekaniker",
"Buschaufførelev",
"Minkfarmmedhjælper",
"Procesteknolog",
"Rengøringsassistent i transportmidler",
"Butiks- og detailslagter",
"Audiologiassistent",
"Skiltemalerlærling",
"Rengøringsassistent i kontor",
"Ventilationstekniker",
"Skibsassistent",
"AV teknikerelev",
"Højskolelærer",
"Detailhandelselev",
"Forretningsudvikler",
"Vinkyper",
"Kulturmedarbejder",
"Zoneterapeut",
"Styrmand",
"Turistchef",
"Anæstesisygeplejerske",
"Støttepædagog",
"Salgskonsulent",
"Eventkoordinatorelev",
"Bygningssnedker",
"Finansmedarbejder",
"Kursuskoordinator",
"Automatiktekniker",
"Bioanalytiker",
"Klubpædagog",
"Vagtcentralleder",
"Flyteknikner",
"Forsyningsoperatør",
"Account manager",
"Datatekniker",
"Logistikchef",
"Tale-hørelærer",
"Plastmager",
"IT-produktchef",
"Erhvervsanalytiker",
"Halinspektør",
"Maskinoperatør",
"Kommunikationsmedarbejder",
"Anlægsstruktør",
"Filmtekniker",
"Elektronikfagteknikerelev",
"Servicetekniker",
"Mejeriingeniør",
"Poder",
"Advokatfuldmægtig",
"Omsorgshjælper",
"Kvalitetsmedarbejder",
"Forlagskonsulent",
"Flyteknikerlærling",
"Skov- og naturtekniker",
"Skolesekretær",
"IT-produktejer",
"Kontorassistent",
"Udviklingskonsulent",
"Pædagogisk faglig koordinator",
]
| 26.158879
| 132
| 0.534596
|
from .. import Provider as BaseProvider
class Provider(BaseProvider):
jobs = [
"Lastvognsmekanikerlærling",
"Knallertmekaniker",
"Møbelarkitekt",
"Forsyningsoperatørelev",
"Hospitalsfysiker",
"Økonomicontroller",
"Revisor",
"Skatterevisor",
"Kontrollør",
"Musikpædagog",
"Pantefoged",
"Serveringsmedarbejder",
"Maskinmesteraspirant",
"Sygehusdirektør",
"Laborant",
"Overlæge",
"Designassistent",
"Teknisk chef",
"Socialformidler",
"Overassistent",
"Pædagogisk assistent",
"Pedel",
"Kustode",
"Pædagogmedhjælper",
"Projektmedarbejder",
"Pedelmedhjælper",
"Museumsbetjent",
"Molekylærbiolog",
"Lærervikar",
"Sognehjælper",
"Lysdesigner",
"Instruktørassistent",
"Teatertekniker",
"Researcher",
"Redaktør",
"Teknisk designer",
"Ressourceleder",
"Indkøbschef",
"E-commerce manager",
"Kontraktchef",
"Produktchef",
"Museumsinspektør",
"Kurator",
"Konservator",
"Modelkonstruktør",
"Kommunikationschef",
"Forskningschef",
"Skovrider",
"Fiskeriteknolog",
"Produktionschef",
"Driftsleder",
"Direktør",
"Officer",
"Sergent",
"IT-ingeniør",
"IT-arkitekt",
"IT-revisor",
"Programmør og systemudvikler",
"UX designer",
"Webredaktør",
"Webudvikler",
"Datakonsulent",
"Idrætsinstruktør og -konsulent",
"Efterretningsofficer",
"Miljøkonsulent",
"Campingpladsbestyrer",
"Miljøkoordinator",
"Grafisk tekniker",
"Elektrotekniker",
"Vindmølleoperatør",
"Urmager",
"Byplanlægger",
"Trafikplanlægger",
"GIS-medarbejder",
"Illustrator",
"Mediegrafiker",
"Artdirector",
"Multimediedesigner",
"Praktiserende læge",
"Speciallæge",
"Struktør",
"Pakkerimedarbejder",
"Cykelbud",
"Fabriksbager",
"Møller",
"Guld- og sølvsmed",
"Ciselør",
"Produktionsleder inden for film og teater",
"Centerleder",
"Lufthavnschef",
"Kameramand",
"Tonemester",
"Studietekniker",
"Eventtekniker",
"Produktionstekniker",
"Fødevareteknolog",
"Brygmester",
"Specialist i biomedicin",
"Botaniker",
"Biokemiker",
"Havbiolog",
"Fysiolog",
"Planteforædler",
"Skoleleder",
"Døvekonsulent",
"Import- og eksportmedarbejder",
"Friskolelærer",
"Au pair",
"Børnepasser",
"Landbrugsmaskinemekaniker",
"Trafikinformationsmedarbejder",
"Togfører",
"Guide",
"Kok",
"Vært i restaurant",
"Tjener",
"Bartender",
"Korrekturlæser",
"Postfunktionær",
"Biblioteksassistent",
"Telefonist",
"Kundeservicemedarbejder",
"Natportier",
"Interviewer",
"Vekselbureaumedarbejder",
"Skattefunktionær",
"Forsikringsfunktionær",
"Revisorassistent",
"Lønbogholder",
"Lagerforvalter",
"Overstyrmand",
"Flyklarerer",
"Marketingmedarbejder",
"Kreativ chef",
"Miljøanalytiker",
"Naturvejleder",
"Procesingeniør",
"Logistiker",
"Bankdirektør",
"Civilingeniør",
"Miljøingeniør",
"Maskiningeniør",
"Værkstedsleder",
"Programdirektør",
"Lystekniker",
"IT-supporter",
"IT-tekniker",
"IT-kvalitetsmedarbejder",
"Korleder",
"Marketingchef",
"Destinationschef",
"Ordblindelærer",
"Kursusleder",
"Produktspecialist",
"Områdechef",
"Rengøringsinspektør",
"Smedelærling",
"Stenhuggerlærling",
"Shippingmedarbejder",
"Lager- og logistikelev",
"Stukkatørlærling",
"Automekanikerlærling",
"Beklædningshåndværkerelev",
"Butikselev",
"Datateknikerelev",
"Industrislagterlærling",
"Shippingassistent",
"Konditorlærling",
"Gulvlæggerlærling",
"Køleteknikerlærling",
"Bygningstruktørlærling",
"Rustfast industrimontør",
"Værktøjsmagerlærling",
"Industriteknikerlærling",
"Vagtcentralassistent",
"Juridisk chef",
"Kunstlærer",
"Lærer på skuespillerskole",
"Asfaltør",
"Jordemoder",
"Erhvervsskolelærer",
"Personalekonsulent",
"Job- og virksomhedskonsulent",
"Tekstforfatter",
"Virksomhedsudvikler",
"Byggeleder",
"Departementschef",
"Politidirektør",
"Diplomat",
"Generalsekretær",
"Leder af offentlig forvaltning",
"Konstabel",
"Speditør",
"Flyttearbejder",
"Lager- og logistikmedarbejder",
"Havnearbejder",
"Anlægsarbejder",
"Slagteriarbejder",
"Fiskeindustriarbejder",
"Industrislagter",
"Slagtermester",
"Bager",
"Konditor",
"Mejeriarbejder",
"Mejerist",
"Familievejleder",
"Socialfaglig leder",
"HR-konsulent",
"SSP-medarbejder",
"Havnefoged",
"Lufthavnsoperatør",
"Assistent til salgssupport",
"Frisør",
"Model",
"Demonstratør",
"Call centermedarbejder",
"Viceskoleleder",
"Ortopædiskomager",
"Fiskeribetjent",
"Indkøber",
"Massageterapeut",
"Levnedsmiddelinspektør",
"Ambulancefører",
"Paramediciner",
"Kunstformidler",
"Arkivar",
"Registrar",
"Bibliotekar",
"Økonom",
"Antropolog",
"Arkæolog",
"Motorcykelmekanikerlærling",
"Skibsmekanikerlærling",
"Landbrugsmaskine-mekanikerlærling",
"VVS-lærling",
"Privatpraktiserende tandplejer",
"Glarmesterlærling",
"Ejendomsserviceteknikerelev",
"Audiologiassistentelev",
"Dyrepasserelev",
"Tømrerlærling",
"Autolakererlærling",
"Bygningsmalerlærling",
"Automatikteknikerelev",
"Skorstensfejerlærling",
"Bagerlærling",
"Vagtcentralmedarbejder",
"Murerlærling",
"Elektrikerlærling",
"Rørlægger",
"Flymekanikerlærling",
"Cykelmekanikerlærling",
"Skibsmontørlærling",
"Bygningssnedkerlærling",
"Studentermedhjælp",
"Redder",
"Chaufførelev",
"Slagterlærling",
"Tagdækkerlærling",
"Organist",
"Sagsbehandler",
"Databaseadministrator",
"Bankrådgiver",
"Realkreditmedarbejder",
"Bogholder",
"Bogholderi- og regnskabsassistent",
"Assurandør",
"Valuar",
"Taksator",
"Hardware-udvikler",
"Medicoingeniør",
"Sensortekniker",
"Boghandler",
"Ekspedient",
"Fiskehandler",
"Farvehandler",
"Blomsterbinder",
"Delikatesseassistent",
"Farmakonom",
"Serviceøkonom",
"SOME-medarbejder",
"Pressesekretær",
"Fundraiser",
"Kampagnemedarbejder",
"Kommunikationskonsulent",
"IT-konsulent",
"IT-direktør",
"IT-chef",
"IT-dokumentationschef",
"Chief data officer",
"IT-projektleder",
"Børne- og ungekoordinator",
"Leder af børne- og ungdomsklub",
"Børsmægler",
"Lagerekspedient",
"Sommelier",
"Levnedsmiddelingeniør",
"Vagt",
"Dørmand",
"Barista",
"Tekster",
"Flyinstruktør",
"Helikopterfører",
"Flymaskinist",
"Klimaforsker",
"Handelsskolelærer",
"Møbelpolstrer",
"Børneværnskonsulent",
"Klargører",
"Klubmedarbejder",
"Kontorchef",
"Koordinator",
"Efterskoleforstander",
"Vicerektor",
"Politisk medarbejder",
"Politisk konsulent",
"Kommunal planlægger",
"Fuldmægtig",
"Rådgivende konsulent",
"Business intelligence manager",
"Økonomiassistent",
"Finansanalytiker",
"Gymnasielærer",
"Folkeskolelærer",
"Pædagog",
"Studiesekretær",
"Speciallærer",
"Fotografmedhjælper",
"Erhvervsdykker",
"Danselærer",
"Geograf",
"Kriminolog",
"Sociolog",
"Historiker",
"Filosof",
"Socialrådgiver",
"Politolog",
"Psykolog",
"Socialarbejder",
"Socialpædagog",
"Præst",
"Geotekniker",
"Svejseinspektør",
"Designer",
"Merchandiser",
"Visual merchandiser",
"Scenograf",
"Tandklinikassistent",
"Tandplejer",
"Keramiker",
"Gravør",
"Kunstner",
"Tegner",
"Garver",
"Landinspektør",
"Byggemontagetekniker",
"Brolægger",
"Forskningsbibliotekar",
"Anlægsgartnerarbejder",
"Cafemedarbejder",
"Kontorleder",
"Farmakonomelev",
"Rejsebureauelev",
"Tandplejerelev",
"Tandteknikerelev",
"Frisørelev",
"Receptionistelev",
"Vejrvært",
"Arrangementchef",
"Udviklingschef",
"Indretningsarkitekt",
"Autoteknolog",
"Butiksassistent",
"Skolepædagog",
"Social- og sundhedsassistent",
"Social- og sundhedshjælper",
"Kasseassistent",
"Levnedsmiddeltekniker",
"Maskinsnedker",
"Møbelsnedker",
"Automationsingeniør",
"Produktionsmedarbejder",
"Byggetekniker",
"Reklamechef",
"Sproglærer",
"Tegnsprogslærer",
"Energiingeniør",
"Dagtilbudsleder",
"Vuggestueleder",
"Plejehjemsleder",
"Kommunikationskoordinator",
"Brandchef",
"Flysikkerhedschef",
"Miljø- og sikkerhedschef",
"Bibliotekschef",
"Museumsleder",
"Kunstnerisk leder",
"Kundeservicechef",
"Rigsarkivar",
"Flymekaniker",
"Skibsmekaniker",
"Entreprenørmaskinemekaniker",
"Kranmekaniker",
"Industrimekaniker",
"Cykelmekaniker",
"Skorstensfejer",
"Industrilakerer",
"Autolakerer",
"Murer",
"Stenhugger",
"Betonmager",
"Køkkenmontør",
"Tømrer",
"Skov- og naturteknikerelev",
"Lægemiddelkonsulent",
"Bevægelsespædagog",
"Ernæringsassistent",
"Ungdomsskolelærer",
"PAU-elev",
"IT-underviser",
"VUC-lærer",
"Uddannelses- og erhvervsvejleder",
"Finansrådgiver",
"Investeringsrådgiver",
"Musiklærer",
"Hotelchef",
"Butikschef",
"Regionschef",
"Teaterteknikerelev",
"Speditørelev",
"IT-supporterelev",
"Politielev",
"Vindmølleoperatørelev",
"Gartnerelev",
"Ortopædielev",
"Fotografelev",
"Film- og tvproduktionselev",
"Procesoperatørelev",
"Optikerelev",
"Radio- og TV-fagteknikerelev",
"Handelselev",
"Elektronikoperatørelev",
"Toldelev",
"Plastmagerelev",
"Social- og sundhedshjælperelev",
"Grafikerelev",
"Forsikringselev",
"Revisorelev",
"Shippingelev",
"Regnskabselev",
"Tjenerelev",
"Finmekanikerelev",
"Oliefyrsteknikerelev",
"Urmagerelev",
"Redderelev",
"Teleteknikerelev",
"Industrioperatørelev",
"Landbrugselev",
"Kosmetologelev",
"Asfaltørelev",
"Kontorelev",
"Fitnessinstruktørelev",
"Møbelsnedkerelev",
"Serviceassistentelev",
"Mejerielev",
"Ernæringsassistentelev",
"Neurofysiologiassistentelev",
"Kostumier",
"Buntmager",
"Parykmager",
"Skrædder",
"Skomager",
"Bore- og udvindingsarbejder",
"Offshorearbejder",
"Ordrebehandler",
"Reservedelsekspedient",
"Oldfrue",
"Vicevært",
"Ledsager",
"Bedemandsassistent",
"Bedemand",
"Graver",
"Kosmetolog",
"Stylist",
"Negletekniker",
"Massør",
"Tekstildesigner",
"Kostumedesigner",
"Institutleder på universitet",
"Rektor",
"Information- og videnchef IT",
"Dokumentationsmedarbejder",
"Efterskolelærer",
"Dagplejer",
"Yogalærer",
"Dommerfuldmægtig",
"Hotelmedarbejder",
"Dagplejeleder",
"Kordegn",
"FGU-lærer",
"Værkstedsassistent",
"Økonoma",
"Artist",
"Væksthusgartner",
"Gartneriarbejder",
"Chef for gartneriproduktion",
"Anlægsgartner",
"Staldmester",
"Støberitekniker",
"Skibsbygger",
"Svejser",
"Klejnsmed",
"Laboratorieleder",
"Skovfoged",
"Fiskeassistent",
"Fisker",
"Skytte",
"Landmand",
"Pelsdyravler",
"Fængselsfunktionær",
"Livredder",
"Nødhjælpsarbejder",
"Parkeringsvagt",
"Kældermester",
"Fødevareinspektør",
"Grossist",
"Varemægler",
"Skibsfører",
"Lods",
"Skibsmaskinist",
"Maskinmester",
"Maskinassistent",
"Mejeritekniker",
"Produktionsteknolog",
"Produktionsleder",
"Værkfører",
"Fysiker",
"Astronom",
"Metrolog",
"Meteorolog",
"Kemiker",
"Geofysiker",
"Geolog",
"Statistiker",
"Aktuar",
"Demograf",
"Matematiker",
"Farmakolog",
"Biolog",
"Skovbrugsrådgiver",
"Landbrugskonsulent",
"Agronom",
"Sagsadministrator",
"Detektiv",
"Kontormedhjælper",
"Sekretær",
"Tasteoperatør",
"Bankassistent",
"Croupier",
"Av tekniker",
"Tekniker radio- og TV-udsendelser",
"Webmaster",
"Garderobeassistent",
"Butiksdetektiv",
"Beklædningsdesigner",
"Psykoterapeut",
"Klinisk psykolog",
"Produktionsingeniør",
"Regionsdirektør",
"Havearkitekt",
"Salgs- og kundeansvarlig",
"Systemadministrator",
"IT-sikkerhedskonsulent",
"Eventmanager",
"Eventassistent",
"Ejendomsadministrator",
"Ejendomsmægler",
"Reklamekonsulent",
"Auktionsleder",
"Musiker",
"Danser",
"Koreograf",
"Kirketjener",
"Driftschef",
"Chefkonsulent",
"Turismechef",
"Brandinspektør",
"Testingeniør",
"Materialetekniker",
"Kemiingeniør",
"Økonomichef",
"Cykelhandler",
"Bagermester",
"Politifuldmægtig",
"Musikterapeut",
"Kvalitetsingeniør",
"Hundetræner",
"Beslagsmed",
"Teatermedarbejder",
"Scenefunktionær",
"Sikkerhedschef",
"Plade- og konstruktionssmed",
"Smed",
"Finmekaniker",
"Værktøjsmager",
"Modelsnedker",
"Låsesmed",
"Hundefører",
"Medarbejder på et dyreinternat",
"Kørelærer",
"Instrumentbygger",
"Lydtekniker",
"Tandklinikassistentelev",
"Museumsmedhjælper",
"Bådebyggerlærling",
"Teknisk isolatørelev",
"VVS-montør",
"Blikkenslager",
"Galvanisør",
"Bådebygger",
"Lastvognsmekaniker",
"Knallertmekanikerlærling",
"Laboratorietekniker",
"Skibsmontør",
"Manuskriptforfatter",
"Teknisk kommunikator",
"Vulkanisør",
"Veterinærsygeplejerske",
"Inseminør",
"Drejer",
"CNC-operatør",
"Jern- og metalsliber",
"Karosserismed",
"Automekaniker",
"Dækmontør",
"Mekaniker",
"Filmklipper",
"Producer",
"Skuespiller",
"Jordbrugsteknolog",
"Miljøtekniker",
"Kort- og landmålingstekniker",
"Fræser",
"Transportchef",
"Porcelænsmaler",
"Robottekniker",
"Personalechef",
"Programchef",
"Chefstrateg",
"Facility manager",
"Administrationschef",
"Kvalitetschef",
"Kontorfuldmægtig",
"Advokatsekretær",
"Direktionssekretær",
"Redaktionssekretær",
"Lægesekretær",
"Administrativ lægesekretær",
"Tolder",
"Teletekniker",
"Elektrofagtekniker",
"Pottemager",
"Glarmester",
"Glasmager",
"Skiltemaler",
"Klaverstemmer",
"Kranfører",
"Truckfører",
"Sadelmager",
"Trykkeriarbejder",
"Tekstiltrykker",
"Elektriker",
"Dirigent",
"Korsanger",
"Ligestillings- og inklusionschef",
"Akupunktør",
"Orgelbygger",
"Personlig træner",
"Forlystelsesmedarbejder",
"Renseriassistent",
"Redaktionschef",
"Distributionschef",
"Lagerchef",
"Import- og eksportchef",
"Supply chain manager",
"Elektronikarbejder",
"Lokomotivfører",
"Togklargører",
"Taxichauffør",
"Risikoanalytiker",
"Værdipapiranalytiker",
"Forsikringsanalytiker",
"Investeringsanalytiker",
"Sceneinstruktør",
"Caster",
"Filminstruktør",
"Stilladsmontør",
"Nedriver",
"Brandmand",
"Tagdækker",
"Stukkatør",
"Isolatør",
"Kloakrørlægger",
"Kloakmester",
"VVS-installatør",
"Gastekniker",
"HVAC-tekniker",
"Arbejdsmiljøkonsulent",
"Fysioterapeut",
"Ernærings- og sundhedskonsulent",
"Audiolog",
"Logopæd",
"Øjenlæge",
"Radiograf",
"Kiropraktor",
"Ergoterapeut",
"Elektroingeniør",
"Fodterapeut",
"Alternativ behandler",
"Lektor",
"Oversygeplejerske",
"Specialsygeplejerske",
"Sygeplejerske",
"Elektronikingeniør",
"Telekommunikationsingeniør",
"Arkitekt",
"Landskabsarkitekt",
"Industriel designer",
"Vaskeriassistent",
"Bryggeriarbejder",
"Datalog",
"Psykiatrisk sygeplejerske",
"Ortopædist",
"Designteknolog",
"Skibsmægler",
"Medicotekniker",
"Finanschef",
"Regnskabschef",
"Filmfotograf",
"Guitarbygger",
"Frisør inden for teater- og TV-branchen",
"Hundefrisør",
"Skilærer",
"Misbrugsbehandler",
"VVS-tekniker",
"Maskinkonstruktør",
"Skibsingeniør",
"Lægesekretærelev",
"Social- og sundhedsassistentelev",
"Filmklipperelev",
"Laborantelev",
"Elektronikfagtekniker-elev",
"Finanselev",
"Kokkeelev",
"Guld- og sølvsmedelev",
"Maskinsnedkerelev",
"Teknisk designerelev",
"Uddannelsesleder",
"Kørselsleder",
"Greenkeeper",
"Kunsthåndværker",
"Neurofysiologiassistent",
"Ministerialbetjent",
"Sekretariatschef",
"Plejehjemsmedhjælper",
"Psykomotorisk terapeut",
"Sundhedsplejerske",
"TV-producer",
"Sejlmager",
"Smørrebrødsjomfru",
"Sanglærer",
"Rengøringsassistent",
"Vinduespudser",
"Bademester",
"Maskinfører",
"Buschauffør",
"Procesoperatør",
"Bygningsmaler",
"Flisemontør",
"Gulvlægger",
"Tæppemontør",
"Hospitalsserviceassistent",
"Arkivmedarbejder",
"HR-assistent",
"Korrespondent",
"Purser",
"Rideskoleassistent",
"Dyrepasser",
"Køkkenmedhjælper",
"Opvasker",
"Omdeler",
"Renovationsarbejder",
"Gadefejer",
"Måleraflæser",
"Pizzabager",
"Fastfood ekspedient",
"Butiksmedhjælper",
"Landbrugsmedhjælper",
"Gartner",
"Skovarbejder",
"Dambrugsarbejder",
"Politiinspektør",
"Speditionsleder",
"Bygningsingeniør",
"Energikonsulent",
"Elektronikfagtekniker",
"Lingvist",
"Tegnsprogstolk",
"Oversætter",
"Tolk",
"Journalist",
"Dramaturg",
"Forlagsredaktør",
"Advokat",
"Dommer",
"Notar",
"Jurist",
"Produktudvikler",
"Industritekniker",
"Laboratorieassistent",
"Biomediciner",
"Apotekerassistent",
"Apotekstekniker",
"Ortopædiingeniør",
"Klinisk tandtekniker",
"Dyreklinikassistent",
"Flyveleder",
"Flyveklarerer",
"Pilot",
"Rejsekonsulent",
"Trafikassistent",
"Billetsælger",
"Rejsebureaumedarbejder",
"Medarbejder på turistkontor",
"Inkassomedarbejder",
"Specialtandlæge",
"Dyrlæge",
"Tandlæge",
"Apoteker",
"CSR-ansvarlig",
"Projektleder",
"Afdelingsleder",
"Salgsdirektør",
"Bygningskonstruktør",
"Afdelingschef",
"Fodermester",
"Havneassistent",
"Farmaceut",
"Artdirector assistent",
"Professor",
"Faglærer",
"Automontør",
"Familieplejer",
"Blomsterdekoratør",
"Sundhedsøkonom",
"Bilsynsassistent",
"Badeassistent",
"Businesscontroller",
"Specialkonsulent",
"FVU-lærer",
"Bageriarbejder",
"Ridelærer",
"Fitness-instruktør",
"Optiker",
"Fotograf",
"Fotojournalist",
"Køkkenchef",
"Regissør",
"Salgschef",
"Elinstallatør",
"Skolekonsulent",
"Læge",
"Byggesagsbehandler",
"Økologikonsulent",
"Restaurantchef",
"Cater",
"Adjunkt",
"Faglig konsulent",
"Forsorgsmedarbejder",
"Pædagogisk konsulent",
"Sygehuslæge",
"Hospitalsmedhjælper",
"Kirkesanger",
"Kantineleder",
"Fagkonsulent",
"Handicaphjælper",
"Aftenskolelærer",
"Projektkoordinator",
"Ligestillingskonsulent",
"Brolæggerarbejde",
"Bygningsstruktør",
"Oliefyrstekniker",
"Motorcykelmekaniker",
"Buschaufførelev",
"Minkfarmmedhjælper",
"Procesteknolog",
"Rengøringsassistent i transportmidler",
"Butiks- og detailslagter",
"Audiologiassistent",
"Skiltemalerlærling",
"Rengøringsassistent i kontor",
"Ventilationstekniker",
"Skibsassistent",
"AV teknikerelev",
"Højskolelærer",
"Detailhandelselev",
"Forretningsudvikler",
"Vinkyper",
"Kulturmedarbejder",
"Zoneterapeut",
"Styrmand",
"Turistchef",
"Anæstesisygeplejerske",
"Støttepædagog",
"Salgskonsulent",
"Eventkoordinatorelev",
"Bygningssnedker",
"Finansmedarbejder",
"Kursuskoordinator",
"Automatiktekniker",
"Bioanalytiker",
"Klubpædagog",
"Vagtcentralleder",
"Flyteknikner",
"Forsyningsoperatør",
"Account manager",
"Datatekniker",
"Logistikchef",
"Tale-hørelærer",
"Plastmager",
"IT-produktchef",
"Erhvervsanalytiker",
"Halinspektør",
"Maskinoperatør",
"Kommunikationsmedarbejder",
"Anlægsstruktør",
"Filmtekniker",
"Elektronikfagteknikerelev",
"Servicetekniker",
"Mejeriingeniør",
"Poder",
"Advokatfuldmægtig",
"Omsorgshjælper",
"Kvalitetsmedarbejder",
"Forlagskonsulent",
"Flyteknikerlærling",
"Skov- og naturtekniker",
"Skolesekretær",
"IT-produktejer",
"Kontorassistent",
"Udviklingskonsulent",
"Pædagogisk faglig koordinator",
]
| true
| true
|
1c4a43a62d1b0c7b36182ef5fdb89f137fc4a846
| 11,479
|
py
|
Python
|
onmt/translate/greedy_search.py
|
comydream/OpenNMT-py
|
2f3c810069ca03b752d9886782648e576b39a06d
|
[
"MIT"
] | 1
|
2021-10-01T15:03:35.000Z
|
2021-10-01T15:03:35.000Z
|
onmt/translate/greedy_search.py
|
urialon/OpenNMT-py
|
bdca05a3fac8f864b21c86a8ad03c09895212e70
|
[
"MIT"
] | null | null | null |
onmt/translate/greedy_search.py
|
urialon/OpenNMT-py
|
bdca05a3fac8f864b21c86a8ad03c09895212e70
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
from onmt.translate.decode_strategy import DecodeStrategy
def sample_topp(logits, keep_topp):
sorted_logits, sorted_indices = torch.sort(logits,
descending=True,
dim=1)
cumulative_probs = torch.cumsum(F.softmax(sorted_logits,
dim=-1), dim=-1)
sorted_indices_to_keep = cumulative_probs.lt(keep_topp)
# keep indices until overflowing p
cumsum_mask = sorted_indices_to_keep.cumsum(dim=1)
last_included = cumsum_mask[:, -1:]
last_included.clamp_(0, sorted_indices_to_keep.size()[1] - 1)
sorted_indices_to_keep = sorted_indices_to_keep.scatter_(
1, last_included, 1)
# Set all logits that are not in the top-p to -10000.
# This puts the probabilities close to 0.
keep_indices = sorted_indices_to_keep.scatter(
1,
sorted_indices,
sorted_indices_to_keep,
)
return logits.masked_fill(~keep_indices, -10000)
def sample_topk(logits, keep_topk):
top_values, _ = torch.topk(logits, keep_topk, dim=1)
kth_best = top_values[:, -1].view([-1, 1])
kth_best = kth_best.repeat([1, logits.shape[1]]).float()
# Set all logits that are not in the top-k to -10000.
# This puts the probabilities close to 0.
ignore = torch.lt(logits, kth_best)
return logits.masked_fill(ignore, -10000)
def sample_with_temperature(logits, sampling_temp, keep_topk, keep_topp):
"""Select next tokens randomly from the top k possible next tokens.
Samples from a categorical distribution over the ``keep_topk`` words using
the category probabilities ``logits / sampling_temp``.
Args:
logits (FloatTensor): Shaped ``(batch_size, vocab_size)``.
These can be logits (``(-inf, inf)``) or log-probs (``(-inf, 0]``).
(The distribution actually uses the log-probabilities
``logits - logits.logsumexp(-1)``, which equals the logits if
they are log-probabilities summing to 1.)
sampling_temp (float): Used to scale down logits. The higher the
value, the more likely it is that a non-max word will be
sampled.
keep_topk (int): This many words could potentially be chosen. The
other logits are set to have probability 0.
keep_topp (float): Keep most likely words until the cumulated
probability is greater than p. If used with keep_topk: both
conditions will be applied
Returns:
(LongTensor, FloatTensor):
* topk_ids: Shaped ``(batch_size, 1)``. These are
the sampled word indices in the output vocab.
* topk_scores: Shaped ``(batch_size, 1)``. These
are essentially ``(logits / sampling_temp)[topk_ids]``.
"""
if sampling_temp == 0.0 or keep_topk == 1:
# For temp=0.0, take the argmax to avoid divide-by-zero errors.
# keep_topk=1 is also equivalent to argmax.
topk_scores, topk_ids = logits.topk(1, dim=-1)
if sampling_temp > 0:
topk_scores /= sampling_temp
else:
logits = torch.div(logits, sampling_temp)
if keep_topp > 0:
logits = sample_topp(logits, keep_topp)
if keep_topk > 0:
logits = sample_topk(logits, keep_topk)
dist = torch.distributions.Categorical(logits=logits)
topk_ids = dist.sample().view(-1, 1)
topk_scores = logits.gather(dim=1, index=topk_ids)
return topk_ids, topk_scores
class GreedySearch(DecodeStrategy):
"""Select next tokens randomly from the top k possible next tokens.
The ``scores`` attribute's lists are the score, after applying temperature,
of the final prediction (either EOS or the final token in the event
that ``max_length`` is reached)
Args:
pad (int): See base.
bos (int): See base.
eos (int): See base.
unk (int): See base.
batch_size (int): See base.
global_scorer (onmt.translate.GNMTGlobalScorer): Scorer instance.
min_length (int): See base.
max_length (int): See base.
ban_unk_token (Boolean): See base.
block_ngram_repeat (int): See base.
exclusion_tokens (set[int]): See base.
return_attention (bool): See base.
max_length (int): See base.
sampling_temp (float): See
:func:`~onmt.translate.greedy_search.sample_with_temperature()`.
keep_topk (int): See
:func:`~onmt.translate.greedy_search.sample_with_temperature()`.
keep_topp (float): See
:func:`~onmt.translate.greedy_search.sample_with_temperature()`.
beam_size (int): Number of beams to use.
"""
def __init__(self, pad, bos, eos, unk, batch_size, global_scorer,
min_length, block_ngram_repeat, exclusion_tokens,
return_attention, max_length, sampling_temp, keep_topk,
keep_topp, beam_size, ban_unk_token):
super(GreedySearch, self).__init__(
pad, bos, eos, unk, batch_size, beam_size, global_scorer,
min_length, block_ngram_repeat, exclusion_tokens,
return_attention, max_length, ban_unk_token)
self.sampling_temp = sampling_temp
self.keep_topk = keep_topk
self.keep_topp = keep_topp
self.topk_scores = None
self.beam_size = beam_size
def initialize(self, memory_bank, src_lengths, src_map=None, device=None,
target_prefix=None):
"""Initialize for decoding."""
(fn_map_state, memory_bank,
src_map, target_prefix) = self.initialize_tile(
memory_bank, src_lengths, src_map, target_prefix)
if device is None:
device = self.get_device_from_memory_bank(memory_bank)
super(GreedySearch, self).initialize(
memory_bank, src_lengths, src_map, device, target_prefix)
self.select_indices = torch.arange(
self.batch_size*self.beam_size, dtype=torch.long, device=device)
self.original_batch_idx = fn_map_state(torch.arange(
self.batch_size, dtype=torch.long, device=device), dim=0)
self.beams_scores = torch.zeros((self.batch_size*self.beam_size, 1),
dtype=torch.float, device=device)
return fn_map_state, memory_bank, self.memory_lengths, src_map
@property
def current_predictions(self):
return self.alive_seq[:, -1]
@property
def batch_offset(self):
return self.select_indices
def _pick(self, log_probs):
"""Function used to pick next tokens.
Args:
log_probs (FloatTensor): ``(batch_size, vocab_size)``.
"""
# maybe fix some prediction at this step by modifying log_probs
log_probs = self.target_prefixing(log_probs)
topk_ids, topk_scores = sample_with_temperature(
log_probs, self.sampling_temp, self.keep_topk, self.keep_topp)
return topk_ids, topk_scores
def align_select_indices(self):
nb_finished_beams = (self.is_finished.view(-1).size(0) -
self.select_indices.size(0))
if nb_finished_beams:
self.select_indices = torch.arange(
self.select_indices.size(0), dtype=torch.long,
device=self.select_indices.device)
def advance(self, log_probs, attn):
"""Select next tokens randomly from the top k possible next tokens.
Args:
log_probs (FloatTensor): Shaped ``(batch_size, vocab_size)``.
These can be logits (``(-inf, inf)``) or log-probs
(``(-inf, 0]``). (The distribution actually uses the
log-probabilities ``logits - logits.logsumexp(-1)``,
which equals the logits if they are log-probabilities summing
to 1.)
attn (FloatTensor): Shaped ``(1, B, inp_seq_len)``.
"""
self.align_select_indices()
self.ensure_min_length(log_probs)
self.ensure_unk_removed(log_probs)
self.block_ngram_repeats(log_probs)
topk_ids, self.topk_scores = self._pick(log_probs)
self.beams_scores += self.topk_scores
self.is_finished = topk_ids.eq(self.eos)
self.alive_seq = torch.cat([self.alive_seq, topk_ids], -1)
if self.return_attention:
if self.alive_attn is None:
self.alive_attn = attn
else:
self.alive_attn = torch.cat([self.alive_attn, attn], 0)
self.ensure_max_length()
def update_finished(self):
"""Finalize scores and predictions."""
# shape: (sum(~ self.is_finished), 1)
finished_batches = self.is_finished.view(-1).nonzero(as_tuple=False)
step = len(self)
length_penalty = self.global_scorer.length_penalty(
step, alpha=self.global_scorer.alpha)
for b in finished_batches.view(-1):
b_orig = self.original_batch_idx[b]
score = self.beams_scores[b, 0]/length_penalty
pred = self.alive_seq[b, 1:]
attention = (
self.alive_attn[:, b, :self.memory_lengths[b]]
if self.alive_attn is not None else [])
self.hypotheses[b_orig].append((score, pred, attention))
self.done = self.is_finished.all()
if self.done:
for b in range(self.batch_size):
best_hyp = sorted(
self.hypotheses[b], key=lambda x: x[0], reverse=True)
for score, pred, attn in best_hyp:
self.scores[b].append(score)
self.predictions[b].append(pred)
self.attention[b].append(attn)
return
is_alive = ~self.is_finished.view(-1)
self.alive_seq = self.alive_seq[is_alive]
self.beams_scores = self.beams_scores[is_alive]
if self.alive_attn is not None:
self.alive_attn = self.alive_attn[:, is_alive]
self.select_indices = is_alive.nonzero(as_tuple=False).view(-1)
self.original_batch_idx = self.original_batch_idx[is_alive]
self.maybe_update_target_prefix(self.select_indices)
class GreedySearchLM(GreedySearch):
def update_finished(self):
super(GreedySearchLM, self).update_finished()
self.update_memory_lengths()
def update_memory_lengths(self):
is_alive = ~self.is_finished.view(-1)
self.memory_lengths = self.memory_lengths[is_alive]
def advance(self, log_probs, attn):
super(GreedySearchLM, self).advance(log_probs, attn)
# in LM task memory_lengths is associated with currently generated src
# and therefore needs to follow the generation
self.memory_lengths += 1
def initialize(self, src, src_lengths, src_map=None, device=None,
target_prefix=None):
"""Initialize for decoding."""
if device is None:
device = src.device
(fn_map_state, _, self.memory_lengths,
src_map) = super(GreedySearchLM, self).initialize(
None, src_lengths, src_map, device, target_prefix)
src = fn_map_state(src, dim=1)
return fn_map_state, src, self.memory_lengths, src_map
| 40.996429
| 79
| 0.623748
|
import torch
import torch.nn.functional as F
from onmt.translate.decode_strategy import DecodeStrategy
def sample_topp(logits, keep_topp):
sorted_logits, sorted_indices = torch.sort(logits,
descending=True,
dim=1)
cumulative_probs = torch.cumsum(F.softmax(sorted_logits,
dim=-1), dim=-1)
sorted_indices_to_keep = cumulative_probs.lt(keep_topp)
cumsum_mask = sorted_indices_to_keep.cumsum(dim=1)
last_included = cumsum_mask[:, -1:]
last_included.clamp_(0, sorted_indices_to_keep.size()[1] - 1)
sorted_indices_to_keep = sorted_indices_to_keep.scatter_(
1, last_included, 1)
keep_indices = sorted_indices_to_keep.scatter(
1,
sorted_indices,
sorted_indices_to_keep,
)
return logits.masked_fill(~keep_indices, -10000)
def sample_topk(logits, keep_topk):
top_values, _ = torch.topk(logits, keep_topk, dim=1)
kth_best = top_values[:, -1].view([-1, 1])
kth_best = kth_best.repeat([1, logits.shape[1]]).float()
ignore = torch.lt(logits, kth_best)
return logits.masked_fill(ignore, -10000)
def sample_with_temperature(logits, sampling_temp, keep_topk, keep_topp):
if sampling_temp == 0.0 or keep_topk == 1:
topk_scores, topk_ids = logits.topk(1, dim=-1)
if sampling_temp > 0:
topk_scores /= sampling_temp
else:
logits = torch.div(logits, sampling_temp)
if keep_topp > 0:
logits = sample_topp(logits, keep_topp)
if keep_topk > 0:
logits = sample_topk(logits, keep_topk)
dist = torch.distributions.Categorical(logits=logits)
topk_ids = dist.sample().view(-1, 1)
topk_scores = logits.gather(dim=1, index=topk_ids)
return topk_ids, topk_scores
class GreedySearch(DecodeStrategy):
def __init__(self, pad, bos, eos, unk, batch_size, global_scorer,
min_length, block_ngram_repeat, exclusion_tokens,
return_attention, max_length, sampling_temp, keep_topk,
keep_topp, beam_size, ban_unk_token):
super(GreedySearch, self).__init__(
pad, bos, eos, unk, batch_size, beam_size, global_scorer,
min_length, block_ngram_repeat, exclusion_tokens,
return_attention, max_length, ban_unk_token)
self.sampling_temp = sampling_temp
self.keep_topk = keep_topk
self.keep_topp = keep_topp
self.topk_scores = None
self.beam_size = beam_size
def initialize(self, memory_bank, src_lengths, src_map=None, device=None,
target_prefix=None):
(fn_map_state, memory_bank,
src_map, target_prefix) = self.initialize_tile(
memory_bank, src_lengths, src_map, target_prefix)
if device is None:
device = self.get_device_from_memory_bank(memory_bank)
super(GreedySearch, self).initialize(
memory_bank, src_lengths, src_map, device, target_prefix)
self.select_indices = torch.arange(
self.batch_size*self.beam_size, dtype=torch.long, device=device)
self.original_batch_idx = fn_map_state(torch.arange(
self.batch_size, dtype=torch.long, device=device), dim=0)
self.beams_scores = torch.zeros((self.batch_size*self.beam_size, 1),
dtype=torch.float, device=device)
return fn_map_state, memory_bank, self.memory_lengths, src_map
@property
def current_predictions(self):
return self.alive_seq[:, -1]
@property
def batch_offset(self):
return self.select_indices
def _pick(self, log_probs):
log_probs = self.target_prefixing(log_probs)
topk_ids, topk_scores = sample_with_temperature(
log_probs, self.sampling_temp, self.keep_topk, self.keep_topp)
return topk_ids, topk_scores
def align_select_indices(self):
nb_finished_beams = (self.is_finished.view(-1).size(0) -
self.select_indices.size(0))
if nb_finished_beams:
self.select_indices = torch.arange(
self.select_indices.size(0), dtype=torch.long,
device=self.select_indices.device)
def advance(self, log_probs, attn):
self.align_select_indices()
self.ensure_min_length(log_probs)
self.ensure_unk_removed(log_probs)
self.block_ngram_repeats(log_probs)
topk_ids, self.topk_scores = self._pick(log_probs)
self.beams_scores += self.topk_scores
self.is_finished = topk_ids.eq(self.eos)
self.alive_seq = torch.cat([self.alive_seq, topk_ids], -1)
if self.return_attention:
if self.alive_attn is None:
self.alive_attn = attn
else:
self.alive_attn = torch.cat([self.alive_attn, attn], 0)
self.ensure_max_length()
def update_finished(self):
finished_batches = self.is_finished.view(-1).nonzero(as_tuple=False)
step = len(self)
length_penalty = self.global_scorer.length_penalty(
step, alpha=self.global_scorer.alpha)
for b in finished_batches.view(-1):
b_orig = self.original_batch_idx[b]
score = self.beams_scores[b, 0]/length_penalty
pred = self.alive_seq[b, 1:]
attention = (
self.alive_attn[:, b, :self.memory_lengths[b]]
if self.alive_attn is not None else [])
self.hypotheses[b_orig].append((score, pred, attention))
self.done = self.is_finished.all()
if self.done:
for b in range(self.batch_size):
best_hyp = sorted(
self.hypotheses[b], key=lambda x: x[0], reverse=True)
for score, pred, attn in best_hyp:
self.scores[b].append(score)
self.predictions[b].append(pred)
self.attention[b].append(attn)
return
is_alive = ~self.is_finished.view(-1)
self.alive_seq = self.alive_seq[is_alive]
self.beams_scores = self.beams_scores[is_alive]
if self.alive_attn is not None:
self.alive_attn = self.alive_attn[:, is_alive]
self.select_indices = is_alive.nonzero(as_tuple=False).view(-1)
self.original_batch_idx = self.original_batch_idx[is_alive]
self.maybe_update_target_prefix(self.select_indices)
class GreedySearchLM(GreedySearch):
def update_finished(self):
super(GreedySearchLM, self).update_finished()
self.update_memory_lengths()
def update_memory_lengths(self):
is_alive = ~self.is_finished.view(-1)
self.memory_lengths = self.memory_lengths[is_alive]
def advance(self, log_probs, attn):
super(GreedySearchLM, self).advance(log_probs, attn)
self.memory_lengths += 1
def initialize(self, src, src_lengths, src_map=None, device=None,
target_prefix=None):
if device is None:
device = src.device
(fn_map_state, _, self.memory_lengths,
src_map) = super(GreedySearchLM, self).initialize(
None, src_lengths, src_map, device, target_prefix)
src = fn_map_state(src, dim=1)
return fn_map_state, src, self.memory_lengths, src_map
| true
| true
|
1c4a44bbf72471eab126ed6a6523fab4eb11bffa
| 1,995
|
py
|
Python
|
efm_example.py
|
xurong-liang/cornac
|
6e0a58b3c99de8c1bd685086c8a63b29aef66e28
|
[
"Apache-2.0"
] | null | null | null |
efm_example.py
|
xurong-liang/cornac
|
6e0a58b3c99de8c1bd685086c8a63b29aef66e28
|
[
"Apache-2.0"
] | null | null | null |
efm_example.py
|
xurong-liang/cornac
|
6e0a58b3c99de8c1bd685086c8a63b29aef66e28
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Cornac Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Example for Explicit Factor Models"""
import cornac
from cornac.datasets import amazon_toy
from cornac.data import SentimentModality
from cornac.eval_methods import RatioSplit
# Load rating and sentiment information
rating = amazon_toy.load_feedback()
sentiment = amazon_toy.load_sentiment()
# Instantiate a SentimentModality, it makes it convenient to work with sentiment information
md = SentimentModality(data=sentiment)
# Define an evaluation method to split feedback into train and test sets
split_data = RatioSplit(
data=rating,
test_size=0.15,
exclude_unknowns=True,
verbose=True,
sentiment=md,
seed=123,
)
# Instantiate the EFM model
efm = cornac.models.EFM(
num_explicit_factors=40,
num_latent_factors=60,
num_most_cared_aspects=15,
rating_scale=5.0,
alpha=0.85,
lambda_x=1,
lambda_y=1,
lambda_u=0.01,
lambda_h=0.01,
lambda_v=0.01,
max_iter=100,
num_threads=1,
trainable=True,
verbose=True,
seed=123,
)
# Instantiate evaluation metrics
rmse = cornac.metrics.RMSE()
ndcg_50 = cornac.metrics.NDCG(k=50)
auc = cornac.metrics.AUC()
# Put everything together into an experiment and run it
experiment = cornac.Experiment(
eval_method=split_data, models=[efm], metrics=[rmse, ndcg_50, auc]
)
experiment.run()
| 28.098592
| 92
| 0.717293
|
import cornac
from cornac.datasets import amazon_toy
from cornac.data import SentimentModality
from cornac.eval_methods import RatioSplit
rating = amazon_toy.load_feedback()
sentiment = amazon_toy.load_sentiment()
md = SentimentModality(data=sentiment)
split_data = RatioSplit(
data=rating,
test_size=0.15,
exclude_unknowns=True,
verbose=True,
sentiment=md,
seed=123,
)
efm = cornac.models.EFM(
num_explicit_factors=40,
num_latent_factors=60,
num_most_cared_aspects=15,
rating_scale=5.0,
alpha=0.85,
lambda_x=1,
lambda_y=1,
lambda_u=0.01,
lambda_h=0.01,
lambda_v=0.01,
max_iter=100,
num_threads=1,
trainable=True,
verbose=True,
seed=123,
)
rmse = cornac.metrics.RMSE()
ndcg_50 = cornac.metrics.NDCG(k=50)
auc = cornac.metrics.AUC()
experiment = cornac.Experiment(
eval_method=split_data, models=[efm], metrics=[rmse, ndcg_50, auc]
)
experiment.run()
| true
| true
|
1c4a45973be19ede8307caaba8c7bcb5c4ecdae9
| 994
|
py
|
Python
|
config/urls.py
|
lawiz22/PLOUC-Backend-master
|
b93fa2fea8d45df9f19c3c58037e59dad4981921
|
[
"MIT"
] | null | null | null |
config/urls.py
|
lawiz22/PLOUC-Backend-master
|
b93fa2fea8d45df9f19c3c58037e59dad4981921
|
[
"MIT"
] | 3
|
2020-06-05T21:24:34.000Z
|
2022-03-11T23:50:26.000Z
|
config/urls.py
|
lawiz22/PLOUC-Backend-master
|
b93fa2fea8d45df9f19c3c58037e59dad4981921
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework.documentation import include_docs_urls
urlpatterns = [
# API (v1)
url(r'^', include('v1.accounts.urls')),
url(r'^', include('v1.credits.urls')),
url(r'^', include('v1.posts.urls')),
url(r'^', include('v1.music.urls')),
url(r'^', include('v1.private_messages.urls')),
url(r'^', include('v1.replies.urls')),
url(r'^', include('v1.user_roles.urls')),
url(r'^', include('v1.votes.urls')),
# Core
url(r'^admin/', admin.site.urls),
url(r'^', include_docs_urls(title='PLOUC.LIVE API')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| 28.4
| 78
| 0.677062
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework.documentation import include_docs_urls
urlpatterns = [
url(r'^', include('v1.accounts.urls')),
url(r'^', include('v1.credits.urls')),
url(r'^', include('v1.posts.urls')),
url(r'^', include('v1.music.urls')),
url(r'^', include('v1.private_messages.urls')),
url(r'^', include('v1.replies.urls')),
url(r'^', include('v1.user_roles.urls')),
url(r'^', include('v1.votes.urls')),
url(r'^admin/', admin.site.urls),
url(r'^', include_docs_urls(title='PLOUC.LIVE API')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| true
| true
|
1c4a45acae68baad54b3c997b9a3965d7f7d11af
| 13,496
|
py
|
Python
|
arch/models/model.py
|
bigphoton/arch
|
95a197d6b89bc2316b0d88b2b1345cbbb90088ec
|
[
"Unlicense"
] | null | null | null |
arch/models/model.py
|
bigphoton/arch
|
95a197d6b89bc2316b0d88b2b1345cbbb90088ec
|
[
"Unlicense"
] | null | null | null |
arch/models/model.py
|
bigphoton/arch
|
95a197d6b89bc2316b0d88b2b1345cbbb90088ec
|
[
"Unlicense"
] | null | null | null |
"""
Functions and objects describing optical components.
"""
import abc
import sympy
from ..connectivity import Connectivity
import arch.port as port
import numpy as np
import math
class Model(abc.ABC):
"""
Model base class. One of `block` or `ports` must be defined.
name: name of this model for indexing, string
block: block of which this model is part (optional)
ports: ports connected to this model (optional)
kwargs: keyword argument dict passed to subclass Model.define method
"""
def __init__(self, name, block=None, ports=None, **kwargs):
self.name = name
if block is not None and ports is None:
self.ports = list(block.ports)
elif ports is not None and block is None:
self.ports = list(ports)
elif ports is not None and block is not None:
raise AttributeError("One and only one of either `block` or `ports` "
"may be set.")
else:
self.ports = list()
self.__properties = set()
self.define(**kwargs)
@classmethod
def compound(cls, name, models, connectivity):
"""
Method to be implemented by subclasses. Subclasses should call the `compound` method
of `super` if they are unable to compound the input models (see snippet below).
try:
<Subclass compounding code here>
except NotImplementedError:
return super().compound(name=name, models=models, connectivity=connectivity)
"""
print ("Compounding in Model")
return NumericModel.compound(name, models, connectivity)
@property
def lineage(self):
"""
Return list of models in this model's chain of inheritance.
"""
def list_bases(c):
if issubclass(c, Model):
all_bases = [c]
for base in c.__bases__:
all_bases.extend(list_bases(base))
return all_bases
else:
return []
return list_bases(self.__class__)
@property
def properties(self):
"""
List of properties of model which change how the model is compounded or simulated.
Properties (list elements) should normally be strings.
"""
return self.__properties
@property
def in_ports(self):
return [p for p in self.ports if p.direction == port.direction.inp]
@property
def out_ports(self):
return [p for p in self.ports if p.direction == port.direction.out]
@abc.abstractmethod
def define(self, **kwargs):
"""
Method overridden by subclasses to implement the model. kwargs are
passed directly from __init__.
"""
pass
@property
def port_names(self):
return {str(e) for e in self.ports}
@property
def default_input_state(self):
"""Dictionary of default values keyed by input port"""
return {p:p.default for p in self.in_ports}
def __repr__(self):
return "<"+self.__class__.__module__+"."+self.__class__.__name__+" '"+self.name+"'>"
class NumericModel(Model):
"""
General numeric model.
out_func: function of dict keyed by input ports, returning dict keyed by output ports
"""
def define(self, out_func=lambda x:x, **kwargs):
self.properties.add("numeric")
out = out_func(self.default_input_state).keys()
described_out_ports = set(out_func(self.default_input_state).keys())
if not set(self.out_ports).issubset(described_out_ports):
print(self.out_ports)
print(described_out_ports)
raise AttributeError("Model output ports do not match ports"
" described by out_func. "
"Ports missing from `out_func` are {:}. ".format(
[p for p in self.out_ports if p not in described_out_ports]) )
self.out_func = out_func
@classmethod
def compound(cls, name, models=[], connectivity=Connectivity()):
print("Compounding in NumericalModel")
# Filter the connectivity to only cover these models
connectivity = connectivity.filtered_by_models(models)
# Get ports from models
ports = [p for m in models for p in m.ports]
# Filter external ports
ex_ports = [p for p in ports if p not in connectivity]
ex_out_ports = [p for p in ex_ports if p.direction == port.direction.out]
def _have_prereqs(model, state):
"""Does `state` contain all the prerequisite inputs for `model`"""
return all([p in state for p in model.in_ports])
def out_func(state):
mods = set(models)
# Initialise ports within loops
loops = connectivity.loops
state = {e:e.default for l in loops for e in l if isinstance(e, port.var)} | state
# Substitute ready model values until all models are substituted
while mods:
ready_mods = {mod for mod in mods if _have_prereqs(mod, state)}
for mod in ready_mods:
state |= mod.out_func(state)
state |= {pi:state[po] for po,pi in connectivity if po in state}
mods -= ready_mods
return state
return NumericModel(name=name, ports=ex_ports, out_func=out_func)
class SymbolicModel(Model):
"""
General symbolic model.
"""
def define(self, out_exprs=None, **kwargs):
self.properties.add("symbolic")
if out_exprs is not None:
self.out_exprs = out_exprs
@property
def out_exprs(self):
return self.__out_exprs
@out_exprs.setter
def out_exprs(self, new_out_exprs):
self.__out_exprs = new_out_exprs
# Refresh out_funcs
try:
self._out_func_lambda = sympy.lambdify(self.in_ports,
[self.out_exprs[p] for p in self.out_ports])
except KeyError as e:
raise KeyError(f"Output port '{e}' not described by `out_exprs` {self.out_exprs}")
def out_func(self, in_state):
"""
Compute output state from input state.
in_state: dictionary of port values keyed by port
return: dictionary of port values (including outputs) keyed by port
"""
# Since our lambda func (and sympy.lambdify) deals in arg *vectors*, derive them from the
# input dict, and derive the output dict from them.
in_state_vec = [in_state[p] for p in self.in_ports]
out_state_vec = self._out_func_lambda(*in_state_vec)
#out_state_dict = in_state | {self.out_ports[i]:out_state_vec[i] for i in range(len(out_state_vec))}
out_state_dict = {self.out_ports[i]:out_state_vec[i] for i in range(len(out_state_vec))}
return out_state_dict
@classmethod
def compound(cls, name, models=[], connectivity=Connectivity(), iter_max=10):
try:
# Filter the connectivity to only cover these models
connectivity = connectivity.filtered_by_models(models)
# Get ports from models
ports = [p for m in models for p in m.ports]
# Filter external ports
ex_ports = [p for p in ports if p not in connectivity]
ex_out_ports = [p for p in ex_ports if p.direction == port.direction.out]
ex_in_ports = [p for p in ex_ports if p.direction == port.direction.inp]
def _have_prereqs(model, state):
"""Does `state` contain all the prerequisite inputs for `model`"""
return all([p in state for p in model.in_ports])
# Substitute
state = {p:p for p in ex_in_ports}
mods = set(models)
i = 0
while mods and i < iter_max:
i += 1
ready_mods = {mod for mod in mods if _have_prereqs(mod, state)}
for mod in ready_mods:
state |= {op:oe.subs(state) for op,oe in mod.out_exprs.items()}
state |= {pi:state[po] for po,pi in connectivity if po in state}
mods -= ready_mods
# Check
if i == iter_max:
ls = connectivity.loops
print("Found loops:",list(ls))
raise NotImplementedError(
f"Reached max iteration limit ({iter_max}) but all models do not "
f"yet have their prerequisite inputs. Remaining models are {mods}")
extra_symbols = {s for oe in state.values()
for s in oe.free_symbols if s in ex_out_ports}
if extra_symbols:
raise AttributeError("Extra symbols found after substitution: {:}. Either "
"relabel as compound input port, or adjust internal connectivity "
"accordingly.".format(extra_symbols))
return SymbolicModel(name=name, ports=ex_ports, out_exprs=state)
except NotImplementedError:
return super().compound(name=name, models=models, connectivity=connectivity)
##############
## UNSORTED ##
##############
import sympy
from sympy import Matrix, sqrt, I, exp
import arch.port as port
from sympy import ImmutableMatrix, Matrix
class Linear(SymbolicModel):
"""
Linear optical model for classical and quantum optics.
unitary_matrix: square sympy Matrix of dimension n; unitary or lossy unitary.
"""
def define(self, unitary_matrix=None, **kwargs):
super().define(**kwargs)
self.properties.update({"optical", "time-independent"})
self.in_optical_ports = [p for p in self.in_ports if p.kind == port.kind.optical]
self.out_optical_ports = [p for p in self.out_ports if p.kind == port.kind.optical]
if unitary_matrix is None:
unitary_matrix = sympy.eye(len(self.out_optical_ports))
self.U = ImmutableMatrix(unitary_matrix)
if not self.U.is_square:
raise AttributeError("Linear model matrix (unitary_matrix) must be square.")
self.n_ins = self.U.rows
self.n_outs = self.U.rows
if len(self.in_optical_ports) != self.n_ins:
raise AttributeError(f"Number of input optical ports "
f"{len(self.in_optical_ports)} does not match dimension of model matrix "
f"({self.n_ins}) of model {self}:{self.name}. Add ports before adding "
f"model. Input ports were {self.in_ports} ({self.in_optical_ports} "
f"optical), output ports were {self.out_ports} ({self.out_optical_ports} "
f"optical).")
if len(self.out_optical_ports) != self.n_outs:
raise AttributeError("Number of output names {:} does not match dimension of "
"model matrix {:}. Add ports before adding model.".format(
len(self.out_optical_ports), self.n_outs))
# TODO: Should override `out_func` to use matrix multiplication for the optical ports
self.out_exprs = {op:oe for op,oe in
zip(self.out_optical_ports, self.U * Matrix(self.in_optical_ports) ) }
@classmethod
def compound(cls, name, models, connectivity):
try:
if all([isinstance(m,Linear) for m in models]):
if connectivity.has_loops:
raise NotImplementedError("Unable to hybridise models of type '{:}' "
"containing loops".format(cls))
# Put models in causal order
models = connectivity.order_models(models)
# Filter the connectivity to only cover these models
connectivity = connectivity.filtered_by_models(models)
# Get ports from models
ports = [p for m in models for p in m.ports]
# Filter external ports
ex_ports = [p for p in ports if p not in connectivity]
ex_out_ports = [p for p in ex_ports if p.direction == port.direction.out]
ex_in_ports = [p for p in ex_ports if p.direction == port.direction.inp]
# Map modes
# TODO: This routine is very expensive, possible to optimise?
modes = dict()
np = 0
# Pre-populate modes with port order
iops = [p for p in ex_in_ports if p.kind == port.kind.optical]
oops = [p for p in ex_out_ports if p.kind == port.kind.optical]
assert len(iops) == len(oops), ("Numbers of input and output optical ports does "
"not match")
for ip,op in zip(iops, oops):
modes[np] = {ip, op}
np += 1
# Map
for model in models:
for ip,op in zip(model.in_optical_ports, model.out_optical_ports):
matched = False
for mode,mode_ports in modes.items():
if (ip in mode_ports) or any([connectivity.test(xp,mp)
for mp in mode_ports for xp in [ip,op]]):
# If ports connect to ports of any known mode,
# associate them with this mode
mode_ports |= {ip, op}
matched = True
break
if not matched:
# If ports match no known mode, add a new mode,
# and associate these ports with it
modes[np] = {ip,op}
np += 1
# Invert modes[]
mode_of_port = {p:m for m in modes for p in modes[m]}
# Initial matrix
U = sympy.eye(np)
# Accumulate model matrix
for m in models:
# Map old matrix rows to new ones
mode_map = [mode_of_port[p] for p in m.in_optical_ports]
U0m = m.U
n = U0m.rows
Um = Matrix(Matrix.diag(sympy.eye(np), U0m, sympy.eye(np - n)))
# Orient matrix modes to ports
for i,j in enumerate(mode_map):
Um.row_swap(i+np,j)
Um.col_swap(i+np,j)
# Delete temp row/cols
for _ in range(np):
Um.row_del(np)
Um.col_del(np)
# print("Um:")
# sympy.pprint(Um)
# Accumulate
U = U * Um
# print("U:")
# sympy.pprint(U)
return Linear(name=name, ports=ex_ports, unitary_matrix=U)
raise NotImplementedError("Linear unable to compound input models {:}".format(
[m for m in models]))
except NotImplementedError:
return super().compound(name=name, models=models, connectivity=connectivity)
class LinearGroupDelay(Linear):
"""
Linear optical model including lumped group delay.
"""
def define(self, delay=0, **kwargs):
super().define(**kwargs)
self.properties.add("discrete-time")
try:
self.properties.remove("time-independent")
except KeyError:
pass
self.delay = delay
for port in self.ports:
port.data['delay'] = self.delay
@classmethod
def compound(cls, name, models, connectivity):
new_mod = Linear.compound(name,
models=models, connectivity=connectivity)
class SourceModel(SymbolicModel):
"""
Model for sources.
"""
def define(self, **kwargs):
super().define(**kwargs)
self.properties.add("source")
self.out_optical_ports = [p for p in self.out_ports if p.kind == port.kind.optical]
| 28.116667
| 102
| 0.680498
|
import abc
import sympy
from ..connectivity import Connectivity
import arch.port as port
import numpy as np
import math
class Model(abc.ABC):
def __init__(self, name, block=None, ports=None, **kwargs):
self.name = name
if block is not None and ports is None:
self.ports = list(block.ports)
elif ports is not None and block is None:
self.ports = list(ports)
elif ports is not None and block is not None:
raise AttributeError("One and only one of either `block` or `ports` "
"may be set.")
else:
self.ports = list()
self.__properties = set()
self.define(**kwargs)
@classmethod
def compound(cls, name, models, connectivity):
print ("Compounding in Model")
return NumericModel.compound(name, models, connectivity)
@property
def lineage(self):
def list_bases(c):
if issubclass(c, Model):
all_bases = [c]
for base in c.__bases__:
all_bases.extend(list_bases(base))
return all_bases
else:
return []
return list_bases(self.__class__)
@property
def properties(self):
return self.__properties
@property
def in_ports(self):
return [p for p in self.ports if p.direction == port.direction.inp]
@property
def out_ports(self):
return [p for p in self.ports if p.direction == port.direction.out]
@abc.abstractmethod
def define(self, **kwargs):
pass
@property
def port_names(self):
return {str(e) for e in self.ports}
@property
def default_input_state(self):
return {p:p.default for p in self.in_ports}
def __repr__(self):
return "<"+self.__class__.__module__+"."+self.__class__.__name__+" '"+self.name+"'>"
class NumericModel(Model):
def define(self, out_func=lambda x:x, **kwargs):
self.properties.add("numeric")
out = out_func(self.default_input_state).keys()
described_out_ports = set(out_func(self.default_input_state).keys())
if not set(self.out_ports).issubset(described_out_ports):
print(self.out_ports)
print(described_out_ports)
raise AttributeError("Model output ports do not match ports"
" described by out_func. "
"Ports missing from `out_func` are {:}. ".format(
[p for p in self.out_ports if p not in described_out_ports]) )
self.out_func = out_func
@classmethod
def compound(cls, name, models=[], connectivity=Connectivity()):
print("Compounding in NumericalModel")
connectivity = connectivity.filtered_by_models(models)
ports = [p for m in models for p in m.ports]
ex_ports = [p for p in ports if p not in connectivity]
ex_out_ports = [p for p in ex_ports if p.direction == port.direction.out]
def _have_prereqs(model, state):
return all([p in state for p in model.in_ports])
def out_func(state):
mods = set(models)
loops = connectivity.loops
state = {e:e.default for l in loops for e in l if isinstance(e, port.var)} | state
while mods:
ready_mods = {mod for mod in mods if _have_prereqs(mod, state)}
for mod in ready_mods:
state |= mod.out_func(state)
state |= {pi:state[po] for po,pi in connectivity if po in state}
mods -= ready_mods
return state
return NumericModel(name=name, ports=ex_ports, out_func=out_func)
class SymbolicModel(Model):
def define(self, out_exprs=None, **kwargs):
self.properties.add("symbolic")
if out_exprs is not None:
self.out_exprs = out_exprs
@property
def out_exprs(self):
return self.__out_exprs
@out_exprs.setter
def out_exprs(self, new_out_exprs):
self.__out_exprs = new_out_exprs
try:
self._out_func_lambda = sympy.lambdify(self.in_ports,
[self.out_exprs[p] for p in self.out_ports])
except KeyError as e:
raise KeyError(f"Output port '{e}' not described by `out_exprs` {self.out_exprs}")
def out_func(self, in_state):
in_state_vec = [in_state[p] for p in self.in_ports]
out_state_vec = self._out_func_lambda(*in_state_vec)
out_state_dict = {self.out_ports[i]:out_state_vec[i] for i in range(len(out_state_vec))}
return out_state_dict
@classmethod
def compound(cls, name, models=[], connectivity=Connectivity(), iter_max=10):
try:
connectivity = connectivity.filtered_by_models(models)
ports = [p for m in models for p in m.ports]
ex_ports = [p for p in ports if p not in connectivity]
ex_out_ports = [p for p in ex_ports if p.direction == port.direction.out]
ex_in_ports = [p for p in ex_ports if p.direction == port.direction.inp]
def _have_prereqs(model, state):
return all([p in state for p in model.in_ports])
state = {p:p for p in ex_in_ports}
mods = set(models)
i = 0
while mods and i < iter_max:
i += 1
ready_mods = {mod for mod in mods if _have_prereqs(mod, state)}
for mod in ready_mods:
state |= {op:oe.subs(state) for op,oe in mod.out_exprs.items()}
state |= {pi:state[po] for po,pi in connectivity if po in state}
mods -= ready_mods
if i == iter_max:
ls = connectivity.loops
print("Found loops:",list(ls))
raise NotImplementedError(
f"Reached max iteration limit ({iter_max}) but all models do not "
f"yet have their prerequisite inputs. Remaining models are {mods}")
extra_symbols = {s for oe in state.values()
for s in oe.free_symbols if s in ex_out_ports}
if extra_symbols:
raise AttributeError("Extra symbols found after substitution: {:}. Either "
"relabel as compound input port, or adjust internal connectivity "
"accordingly.".format(extra_symbols))
return SymbolicModel(name=name, ports=ex_ports, out_exprs=state)
except NotImplementedError:
return super().compound(name=name, models=models, connectivity=connectivity)
wargs):
super().define(**kwargs)
self.properties.update({"optical", "time-independent"})
self.in_optical_ports = [p for p in self.in_ports if p.kind == port.kind.optical]
self.out_optical_ports = [p for p in self.out_ports if p.kind == port.kind.optical]
if unitary_matrix is None:
unitary_matrix = sympy.eye(len(self.out_optical_ports))
self.U = ImmutableMatrix(unitary_matrix)
if not self.U.is_square:
raise AttributeError("Linear model matrix (unitary_matrix) must be square.")
self.n_ins = self.U.rows
self.n_outs = self.U.rows
if len(self.in_optical_ports) != self.n_ins:
raise AttributeError(f"Number of input optical ports "
f"{len(self.in_optical_ports)} does not match dimension of model matrix "
f"({self.n_ins}) of model {self}:{self.name}. Add ports before adding "
f"model. Input ports were {self.in_ports} ({self.in_optical_ports} "
f"optical), output ports were {self.out_ports} ({self.out_optical_ports} "
f"optical).")
if len(self.out_optical_ports) != self.n_outs:
raise AttributeError("Number of output names {:} does not match dimension of "
"model matrix {:}. Add ports before adding model.".format(
len(self.out_optical_ports), self.n_outs))
self.out_exprs = {op:oe for op,oe in
zip(self.out_optical_ports, self.U * Matrix(self.in_optical_ports) ) }
@classmethod
def compound(cls, name, models, connectivity):
try:
if all([isinstance(m,Linear) for m in models]):
if connectivity.has_loops:
raise NotImplementedError("Unable to hybridise models of type '{:}' "
"containing loops".format(cls))
models = connectivity.order_models(models)
connectivity = connectivity.filtered_by_models(models)
ports = [p for m in models for p in m.ports]
ex_ports = [p for p in ports if p not in connectivity]
ex_out_ports = [p for p in ex_ports if p.direction == port.direction.out]
ex_in_ports = [p for p in ex_ports if p.direction == port.direction.inp]
modes = dict()
np = 0
iops = [p for p in ex_in_ports if p.kind == port.kind.optical]
oops = [p for p in ex_out_ports if p.kind == port.kind.optical]
assert len(iops) == len(oops), ("Numbers of input and output optical ports does "
"not match")
for ip,op in zip(iops, oops):
modes[np] = {ip, op}
np += 1
for model in models:
for ip,op in zip(model.in_optical_ports, model.out_optical_ports):
matched = False
for mode,mode_ports in modes.items():
if (ip in mode_ports) or any([connectivity.test(xp,mp)
for mp in mode_ports for xp in [ip,op]]):
mode_ports |= {ip, op}
matched = True
break
if not matched:
modes[np] = {ip,op}
np += 1
mode_of_port = {p:m for m in modes for p in modes[m]}
U = sympy.eye(np)
for m in models:
mode_map = [mode_of_port[p] for p in m.in_optical_ports]
U0m = m.U
n = U0m.rows
Um = Matrix(Matrix.diag(sympy.eye(np), U0m, sympy.eye(np - n)))
for i,j in enumerate(mode_map):
Um.row_swap(i+np,j)
Um.col_swap(i+np,j)
for _ in range(np):
Um.row_del(np)
Um.col_del(np)
U = U * Um
return Linear(name=name, ports=ex_ports, unitary_matrix=U)
raise NotImplementedError("Linear unable to compound input models {:}".format(
[m for m in models]))
except NotImplementedError:
return super().compound(name=name, models=models, connectivity=connectivity)
class LinearGroupDelay(Linear):
def define(self, delay=0, **kwargs):
super().define(**kwargs)
self.properties.add("discrete-time")
try:
self.properties.remove("time-independent")
except KeyError:
pass
self.delay = delay
for port in self.ports:
port.data['delay'] = self.delay
@classmethod
def compound(cls, name, models, connectivity):
new_mod = Linear.compound(name,
models=models, connectivity=connectivity)
class SourceModel(SymbolicModel):
def define(self, **kwargs):
super().define(**kwargs)
self.properties.add("source")
self.out_optical_ports = [p for p in self.out_ports if p.kind == port.kind.optical]
| true
| true
|
1c4a46e3681387e261c445f0fe0ee20614d7a18e
| 26,847
|
py
|
Python
|
src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/aio/operations/_nat_gateways_operations.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/aio/operations/_nat_gateways_operations.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/aio/operations/_nat_gateways_operations.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NatGatewaysOperations:
"""NatGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
nat_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
nat_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified nat gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def get(
self,
resource_group_name: str,
nat_gateway_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NatGateway":
"""Gets the specified nat gateway in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NatGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.NatGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NatGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "_models.NatGateway",
**kwargs: Any
) -> Optional["_models.NatGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NatGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NatGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NatGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "_models.NatGateway",
**kwargs: Any
) -> AsyncLROPoller["_models.NatGateway"]:
"""Creates or updates a nat gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param parameters: Parameters supplied to the create or update nat gateway operation.
:type parameters: ~azure.mgmt.network.v2021_03_01.models.NatGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NatGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2021_03_01.models.NatGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NatGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.NatGateway":
"""Updates nat gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param parameters: Parameters supplied to update nat gateway tags.
:type parameters: ~azure.mgmt.network.v2021_03_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NatGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.NatGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NatGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NatGatewayListResult"]:
"""Gets all the Nat Gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NatGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_03_01.models.NatGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NatGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NatGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/natGateways'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NatGatewayListResult"]:
"""Gets all nat gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NatGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_03_01.models.NatGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NatGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NatGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways'} # type: ignore
| 49.080439
| 191
| 0.665139
|
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NatGatewaysOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
nat_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
url = self._delete_initial.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
async def begin_delete(
self,
resource_group_name: str,
nat_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
async def get(
self,
resource_group_name: str,
nat_gateway_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NatGateway":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
async def _create_or_update_initial(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "_models.NatGateway",
**kwargs: Any
) -> Optional["_models.NatGateway"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self._create_or_update_initial.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(parameters, 'NatGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NatGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
async def begin_create_or_update(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "_models.NatGateway",
**kwargs: Any
) -> AsyncLROPoller["_models.NatGateway"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
async def update_tags(
self,
resource_group_name: str,
nat_gateway_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.NatGateway":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.update_tags.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NatGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NatGatewayListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list_all.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NatGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/natGateways'}
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NatGatewayListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NatGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways'}
| true
| true
|
1c4a47a2ef218891b453fdd516b5d165a1dedf97
| 6,511
|
py
|
Python
|
FACTScontrol.py
|
amahoro12/anne
|
9b68c71c491bde4f57c2cbbf78a377239a9026d8
|
[
"MIT"
] | null | null | null |
FACTScontrol.py
|
amahoro12/anne
|
9b68c71c491bde4f57c2cbbf78a377239a9026d8
|
[
"MIT"
] | null | null | null |
FACTScontrol.py
|
amahoro12/anne
|
9b68c71c491bde4f57c2cbbf78a377239a9026d8
|
[
"MIT"
] | null | null | null |
import pandapower.control as ct
## Creates custom PI-controllers for the shunt and series FACTS device used in the test network.
# SHUNT CONTROLLER
class ShuntFACTS(ct.basic_controller.Controller):
def __init__(self, net, busVoltageInd, convLim, shuntIndex=0, q_mvar_rating=50, max_iter=30, in_service=True,
recycle=False, order=0, level=0, **kwargs):
# construct through superclass
super().__init__(net, in_service=in_service, recycle=recycle, order=order, level=level,
initial_powerflow=True, **kwargs)
# Initialise class variables
self.shuntIndex = shuntIndex
self.busVoltageInd = busVoltageInd
self.ref = 1.0 # reference value to reach
self.convLim = convLim # limit threshold for convergence
self.meas = self.net.res_bus.vm_pu[busVoltageInd]
self.applied = False
self.q_mvar_max = q_mvar_rating
self.q_mvar_min = -q_mvar_rating
self.iter_counter = 0 # count number of iterations
self.maxed_counter = 0 # To count iterations if maxed out and cant converge to v_ref
self.max_iter = max_iter # maximum umber of iterations
self.v_delta = 0
self.v_delta_accum = 0
# return boolean for if controled has converged to ref value
def is_converged(self):
self.meas = self.net.res_bus.vm_pu[self.busVoltageInd]
# Converged if within limit or output maxed for three iterations without convergence
if abs(self.meas - self.ref) < self.convLim or self.maxed_counter >= 4 or self.iter_counter == self.max_iter:
self.applied = True
return self.applied
# In case the controller is not yet converged, the control step is executed.
def control_step(self):
# Measurement
self.meas = self.net.res_bus.vm_pu[self.busVoltageInd]
self.v_delta = self.meas - self.ref
# Control Coefficients
K_p = 10 # Factor 10 is to cap at rating when v_delta is +/- 0.1 pu.
K_i = 15
# PI-control equation
self.net.shunt.q_mvar[self.shuntIndex] = K_p * self.q_mvar_max * (
self.v_delta) + K_i * self.q_mvar_max * self.v_delta_accum
# Make sure output don't exceed rating
if self.net.shunt.q_mvar[self.shuntIndex] + 0.00001 >= self.q_mvar_max:
self.net.shunt.q_mvar[self.shuntIndex] = self.q_mvar_max
self.maxed_counter += 1
elif self.net.shunt.q_mvar[self.shuntIndex] - 0.00001 <= self.q_mvar_min:
self.net.shunt.q_mvar[self.shuntIndex] = self.q_mvar_min
self.maxed_counter += 1
# Update for posible next iter of control
self.v_delta_accum += self.v_delta
self.iter_counter += 1
# Finalize function MIGHT BE NEEDED IF RESET OF SOME CLASS VARIABLES NEEDED: DEPENDS ON HOW CALLED IN MAIN MODEL
def finalize_control(self):
self.applied = False
self.v_delta_accum = 0
self.iter_counter = 0 # count number of iterations
self.maxed_counter = 0 # To count iterations if maxed out and cant converge to v_ref
# Series CONTROLLER
class SeriesFACTS(ct.basic_controller.Controller):
def __init__(self, net, lineLPInd, convLim, x_line_pu, max_iter=30, switchInd=1, serIndex=0, x_comp_rating=0.4,
in_service=True, recycle=False, order=0, level=0, **kwargs):
# construct through superclass
super().__init__(net, in_service=in_service, recycle=recycle, order=order, level=level,
initial_powerflow=True, **kwargs)
# Initialise class variables
self.switchInd = switchInd
self.x_line_pu = x_line_pu
self.serIndex = serIndex
self.lineLPInd = lineLPInd
self.ref = 50 # reference value to reach
self.convLim = convLim # limit threshold for convergence
self.meas = 0
self.applied = False
self.x_comp_max = x_comp_rating
self.x_comp_min = -x_comp_rating
self.iter_counter = 0 # count number of iterations
self.maxed_counter = 0 # To count iterations if maxed out and cant converge to v_ref
self.max_iter = max_iter
self.lp_delta = 0
self.lp_delta_accum = 0
# return boolean for if controled has converged to ref value
def is_converged(self):
self.meas = self.net.res_line.loading_percent[self.lineLPInd]
# Converged if within limit or output maxed for three iterations without convergence
if abs(self.meas - self.ref)/100 < self.convLim or self.maxed_counter >= 4 or self.iter_counter == self.max_iter:
self.applied = True
return self.applied
# In case the controller is not yet converged, the control step is executed.
def control_step(self):
# Make sure it is enabled, set to False
self.net.switch.closed[self.switchInd] = False
# Measurement
self.meas = self.net.res_line.loading_percent[self.lineLPInd]
self.lp_delta = (self.meas - self.ref) / 100 # div by 100 to get value between 0-1
# Control Coefficients
K_p = 20
K_i = 15
# PI-control equation
op = self.x_line_pu * (K_p * self.x_comp_max * (self.lp_delta) + K_i * self.x_comp_max * self.lp_delta_accum)
# Make sure output don't exceed rating
if op + 0.00001 >= self.x_line_pu * self.x_comp_max:
op = self.x_line_pu * self.x_comp_max
self.maxed_counter += 1
elif op - 0.00001 <= self.x_line_pu * self.x_comp_min:
op = self.x_line_pu * self.x_comp_min
self.maxed_counter += 1
# Bypassing series device if impedance close to 0 and set output to last value.
if abs(op) < 0.0001: # Helping with convergence
self.net.switch.closed[self.switchInd] = True # ACTUAL network
else:
# Set output of device if not bypassed
self.net.impedance.loc[self.serIndex, ['xft_pu', 'xtf_pu']] = op
# Update for posible next iter of control
self.lp_delta_accum += self.lp_delta
self.iter_counter += 1
# Finalize function MIGHT BE NEEDED IF RESET OF SOME CLASS VARIABLES NEEDED: DEPENDS ON HOW CALLED IN MAIN MODEL
def finalize_control(self):
self.applied = False
self.lp_delta_accum = 0
self.iter_counter = 0 # count number of iterations
self.maxed_counter = 0 # To count iterations if maxed out and cant converge to v_ref
| 43.993243
| 121
| 0.657196
|
import pandapower.control as ct
convLim, shuntIndex=0, q_mvar_rating=50, max_iter=30, in_service=True,
recycle=False, order=0, level=0, **kwargs):
super().__init__(net, in_service=in_service, recycle=recycle, order=order, level=level,
initial_powerflow=True, **kwargs)
self.shuntIndex = shuntIndex
self.busVoltageInd = busVoltageInd
self.ref = 1.0
self.convLim = convLim
self.meas = self.net.res_bus.vm_pu[busVoltageInd]
self.applied = False
self.q_mvar_max = q_mvar_rating
self.q_mvar_min = -q_mvar_rating
self.iter_counter = 0
self.maxed_counter = 0
self.max_iter = max_iter
self.v_delta = 0
self.v_delta_accum = 0
def is_converged(self):
self.meas = self.net.res_bus.vm_pu[self.busVoltageInd]
if abs(self.meas - self.ref) < self.convLim or self.maxed_counter >= 4 or self.iter_counter == self.max_iter:
self.applied = True
return self.applied
def control_step(self):
self.meas = self.net.res_bus.vm_pu[self.busVoltageInd]
self.v_delta = self.meas - self.ref
K_p = 10
K_i = 15
self.net.shunt.q_mvar[self.shuntIndex] = K_p * self.q_mvar_max * (
self.v_delta) + K_i * self.q_mvar_max * self.v_delta_accum
if self.net.shunt.q_mvar[self.shuntIndex] + 0.00001 >= self.q_mvar_max:
self.net.shunt.q_mvar[self.shuntIndex] = self.q_mvar_max
self.maxed_counter += 1
elif self.net.shunt.q_mvar[self.shuntIndex] - 0.00001 <= self.q_mvar_min:
self.net.shunt.q_mvar[self.shuntIndex] = self.q_mvar_min
self.maxed_counter += 1
# Update for posible next iter of control
self.v_delta_accum += self.v_delta
self.iter_counter += 1
# Finalize function MIGHT BE NEEDED IF RESET OF SOME CLASS VARIABLES NEEDED: DEPENDS ON HOW CALLED IN MAIN MODEL
def finalize_control(self):
self.applied = False
self.v_delta_accum = 0
self.iter_counter = 0 # count number of iterations
self.maxed_counter = 0 # To count iterations if maxed out and cant converge to v_ref
# Series CONTROLLER
class SeriesFACTS(ct.basic_controller.Controller):
def __init__(self, net, lineLPInd, convLim, x_line_pu, max_iter=30, switchInd=1, serIndex=0, x_comp_rating=0.4,
in_service=True, recycle=False, order=0, level=0, **kwargs):
# construct through superclass
super().__init__(net, in_service=in_service, recycle=recycle, order=order, level=level,
initial_powerflow=True, **kwargs)
# Initialise class variables
self.switchInd = switchInd
self.x_line_pu = x_line_pu
self.serIndex = serIndex
self.lineLPInd = lineLPInd
self.ref = 50 # reference value to reach
self.convLim = convLim # limit threshold for convergence
self.meas = 0
self.applied = False
self.x_comp_max = x_comp_rating
self.x_comp_min = -x_comp_rating
self.iter_counter = 0 # count number of iterations
self.maxed_counter = 0 # To count iterations if maxed out and cant converge to v_ref
self.max_iter = max_iter
self.lp_delta = 0
self.lp_delta_accum = 0
# return boolean for if controled has converged to ref value
def is_converged(self):
self.meas = self.net.res_line.loading_percent[self.lineLPInd]
# Converged if within limit or output maxed for three iterations without convergence
if abs(self.meas - self.ref)/100 < self.convLim or self.maxed_counter >= 4 or self.iter_counter == self.max_iter:
self.applied = True
return self.applied
# In case the controller is not yet converged, the control step is executed.
def control_step(self):
# Make sure it is enabled, set to False
self.net.switch.closed[self.switchInd] = False
# Measurement
self.meas = self.net.res_line.loading_percent[self.lineLPInd]
self.lp_delta = (self.meas - self.ref) / 100 # div by 100 to get value between 0-1
# Control Coefficients
K_p = 20
K_i = 15
# PI-control equation
op = self.x_line_pu * (K_p * self.x_comp_max * (self.lp_delta) + K_i * self.x_comp_max * self.lp_delta_accum)
# Make sure output don't exceed rating
if op + 0.00001 >= self.x_line_pu * self.x_comp_max:
op = self.x_line_pu * self.x_comp_max
self.maxed_counter += 1
elif op - 0.00001 <= self.x_line_pu * self.x_comp_min:
op = self.x_line_pu * self.x_comp_min
self.maxed_counter += 1
if abs(op) < 0.0001:
self.net.switch.closed[self.switchInd] = True
else:
self.net.impedance.loc[self.serIndex, ['xft_pu', 'xtf_pu']] = op
self.lp_delta_accum += self.lp_delta
self.iter_counter += 1
def finalize_control(self):
self.applied = False
self.lp_delta_accum = 0
self.iter_counter = 0
self.maxed_counter = 0
| true
| true
|
1c4a482bb9e01f84eb19da55a5549ca75bbd457d
| 128,109
|
py
|
Python
|
tests/arm_tests.py
|
SantiagoRomani/gdb_arm
|
9e3c2eec2c41337b2a88222a87ad0b2f418111c7
|
[
"MIT"
] | 2
|
2021-05-20T09:30:31.000Z
|
2022-02-23T03:34:51.000Z
|
tests/arm_tests.py
|
SantiagoRomani/gdb_arm
|
9e3c2eec2c41337b2a88222a87ad0b2f418111c7
|
[
"MIT"
] | null | null | null |
tests/arm_tests.py
|
SantiagoRomani/gdb_arm
|
9e3c2eec2c41337b2a88222a87ad0b2f418111c7
|
[
"MIT"
] | null | null | null |
""" Groups of tests for gdb_arm """
from num_analyzer import NumberAnalyzer
from string_analyzer import CharAnalyzer
from string_analyzer import StringAnalyzer
from data_analyzer import DataAnalyzer
from adr_analyzer import AddressAnalyzer
from reg_analyzer import RegisterAnalyzer
from reg_analyzer import RegisterBitsAnalyzer
from reg_analyzer import RegisterListAnalyzer
from imm_analyzer import ImmediateOpAnalyzer
from imm_analyzer import ImmediateRSAnalyzer
from op2_analyzer import Op2Analyzer
from opdat_analyzer import OpdatAnalyzer
from instdat_analyzer import InstdatAnalyzer
from instmul_analyzer import InstmulAnalyzer
from instjmp_analyzer import InstjmpAnalyzer
from opldst_analyzer import Opldst2Analyzer
from opldst_analyzer import Opldst3Analyzer
from instmem_analyzer import InstmemAnalyzer
from instmsc_analyzer import InstmscAnalyzer
from arm_analyzer import ArmAnalyzer
number_analyzer = NumberAnalyzer()
char_analyzer = CharAnalyzer()
string_analyzer = StringAnalyzer()
data_analyzer = DataAnalyzer()
address_analyzer = AddressAnalyzer()
register_analyzer = RegisterAnalyzer()
regbit_analyzer = RegisterBitsAnalyzer()
reglst_analyzer = RegisterListAnalyzer()
immediate_op_analyzer = ImmediateOpAnalyzer()
immediate_sr_analyzer = ImmediateRSAnalyzer()
op2_analyzer = Op2Analyzer()
opdat_analyzer = OpdatAnalyzer()
instdat_analyzer = InstdatAnalyzer()
instmul_analyzer = InstmulAnalyzer()
instjmp_analyzer = InstjmpAnalyzer()
opldst2_analyzer = Opldst2Analyzer()
opldst3_analyzer = Opldst3Analyzer()
instmem_analyzer = InstmemAnalyzer()
instmsc_analyzer = InstmscAnalyzer()
arm_analyzer = ArmAnalyzer()
hex_test = [('', [], -1001), # T10.0.0 error: empty input
(' ', [], -1001), # T10.0.1 > T10.0.0 error: white spaces
('0x', [], -1005), # T10.0.3 > T10.2.0 error: leading '0x', missing hex digits
(' 0x', [], -1005), # T10.0.1 > T10.0.3 > T10.2.0 / idem with leading white space
('0x1', [1], 1000), # T10.0.3 > T10.2.1 hex number: single digit
(' 0x1', [1], 1000), # T10.0.1 > T10.0.3 > T10.2.1 / idem with white leading space
(' 0xA', [10], 1000), # T10.0.1 > T10.0.3 > T10.2.1 / idem with a letter digit
('0x01', [1], 1000), # T10.0.3 > T10.2.1 / with leading zeros
(' 0x001', [1], 1000), # T10.0.1 > T10.0.3 > T10.2.1 / idem with leading spaces
('0x10', [16], 1000), # T10.0.3 > T10.2.1 / two digits
('0x2864', [10340], 1000), # T10.0.3 > T10.2.1 / four digits
('0xF3AE', [62382], 1000), # T10.0.3 > T10.2.1 / four digits, with hex letters
('0xb14a', [45386], 1000), # T10.0.3 > T10.2.1 / (lower case hex letters)
('0xb14A', [45386], 1000), # T10.0.3 > T10.2.1 / (mixed lower / upper case)
('0xR124', [], -1005), # T10.0.3 > T10.2.2 error: illegal digits (first one)
('0x51V4', [], -1005), # T10.0.3 > T10.2.2 / (third one)
('0x514W', [], -1005), # T10.0.3 > T10.2.2 / (last one)
('0x10002EF0', [268447472], 1000), # T10.0.3 > T10.2.1 big hex number: eight digits
('0x10002EF00', [], -1006) # T10.0.3 > T10.2.1+override too long number: nine digits (>=2^32)
]
dec_test = [('0', [0], 1000), # T10.0.4 > T10.3.0 dec/oct number: the zero
(' 0', [0], 1000), # T10.0.1 > T10.0.4 > T10.3.0 / idem with leading space
('1', [1], 1000), # T10.0.7 > T10.5.1 dec number: single digit
(' 1', [1], 1000), # T10.0.1 > T10.0.7 > T10.5.1 / idem with white space
('-1', [-1], 1000), # T10.0.5 > T10.4.1 / negative number
(' -1', [-1], 1000), # T10.0.1 > T10.0.5 > T10.4.1 / negative num. with leading spaces
('10', [10], 1000), # T10.0.7 > T10.5.1 / two digits
('2864', [2864], 1000), # T10.0.7 > T10.5.1 / four digits
('-2864', [-2864], 1000), # T10.0.5 > T10.4.1 / four digits negative number
('+2864', [2864], 1000), # T10.0.6 > T10.6.1 / four digits positive number
('r12', [], -1001), # T10.0.8 error: illegal digits (first one)
('5V6', [], -1004), # T10.0.6 > T10.5.2 / (second one)
('514W', [], -1004), # T10.0.6 > T10.5.2 / (last one)
('-', [], -1004), # T10.0.5 > T10.4.0 / no digits digit after '-'
('+', [], -1004), # T10.0.6 > T10.6.0 / no digits digit after '+'
('-r12', [], -1004), # T10.0.5 > T10.4.2 / illegal first digit after '-'
('+r12', [], -1004), # T10.0.6 > T10.6.2 / illegal first digit after '-'
('-5V6', [], -1004), # T10.0.5 > T10.4.2 / illegal middle digit after '-'
('4684474720', [], -1006), # T10.0.6 > T10.5.1+override long dec number (>=2^32)
('-2147483649', [], -1006) # T10.0.5 > T10.4.1+override long neg. dec number (<-2^31)
]
oct_test = [('000', [0], 1000), # T10.0.4 > T10.3.1 oct number: zeroes
(' 00', [0], 1000), # T10.0.1 > T10.0.4 > T10.3.1 / idem with leading space
('01', [1], 1000), # T10.0.4 > T10.3.1 oct number: single digit
(' 01', [1], 1000), # T10.0.1 > T10.0.4 > T10.3.1 / idem with white space
('001', [1], 1000), # T10.0.4 > T10.3.1 / several zeros before digit
('010', [8], 1000), # T10.0.4 > T10.3.1 oct number: two digits
('02764', [1524], 1000), # T10.0.4 > T10.3.1 / four digits
('02864', [], -1003), # T10.0.4 > T10.3.2 error: malformed octal number
('0r12', [], -1003), # T10.0.4 > T10.3.2 error: illegal digits (first one after first 0)
('05V6', [], -1003), # T10.0.4 > T10.3.2 / (second one)
('0514W', [], -1003), # T10.0.4 > T10.3.2 / (last one)
('00r12', [], -1003), # T10.0.4 > T10.3.2 / illegal first digit after several 0s
('063710000000', [], -1006) # T10.0.4 > T10.3.1+override long oct number (>=2^32)
]
bin_test = [('0b', [], -1002), # T10.0.2 > T10.1.0 error: leading '0b', missing bin digits
(' 0b', [], -1002), # T10.0.1 > T10.0.2 > T10.1.0 / idem with leading white space
('0b1', [1], 1000), # T10.0.2 > T10.1.1 bin number: single bit
(' 0b1', [1], 1000), # T10.0.1 > T10.0.2 > T10.1.1 / idem with white space
(' 0b0', [0], 1000), # T10.0.1 > T10.0.2 > T10.1.1 / idem white space & zero bit
('0b01', [1], 1000), # T10.0.2 > T10.1.1 / leading zero
(' 0b001', [1], 1000), # T10.0.1 > T10.0.2 > T10.1.1 / leading spaces & leading zeros
('0b10', [2], 1000), # T10.0.2 > T10.1.1 two bits
('0b0110', [6], 1000), # T10.0.2 > T10.1.1 four bits
('0bR101', [], -1002), # T10.0.2 > T10.1.2 error: illegal bits (first one)
('0b01V4', [], -1002), # T10.0.2 > T10.1.2 / (third one)
('0b110W', [], -1002), # T10.0.2 > T10.1.2 / (last one)
('0b0140', [], -1002), # T10.0.2 > T10.1.2 / (non-binary digit)
('0b10000000000000001000000000000000', [2147516416], 1000), # T10.0.2 > T10.1.1 32 bits
('0b100000000000000010000000000000001', [], -1006) # T10.0.2 > T10.1.1+override 33 bits
]
chr_test = [('', [], -1101), # T11.0.0 error: no single quote
("'", [], -1101), # T11.0.2 > T11.1.0 error: open single quote, missing char
(' n\'', [], -1101), # T11.0.1 > T11.0.3 error: missing quote before characters
("''", [], -1102), # T11.0.2 > T11.1.1 error: empty single quotes
("' ", [32], -1104), # T11.0.2 > T11.1.2 > T11.2.0 error: unclosed single quoted char
("' 0", [32], -1105), # T11.0.2 > T11.1.2 > T11.2.2 error: more than one character
("' '", [32], 1000), # T11.0.2 > T11.1.2 > T11.2.1 successful single char capture
(" ' '", [32], 1000), # T11.0.1 > T11.0.2 > T11.1.2 > T11.2.1 / idem with leading space
('" "', [], -1101), # T11.0.3 error: missing single quote
('\'\"\'', [34], 1000), # T11.0.2 > T11.1.2 > T11.2.1 capture double quote as single char
('\'\n\'', [], -1103) # T11.0.2 > T11.1.3 illegal character in single quotes
]
str_test = [('', [], -1201), # T12.0.0 error: no double quote
("'", [], -1201), # T12.0.3 error: unexpected single quote
('"', [], -1201), # T12.0.2 > T12.1.0 error: open double quote, missing string
(' n\"', [], -1201), # T12.0.1 > T12.0.3 error: missing quote before characters
('""', [], -1202), # T12.0.2 > T12.1.1 error: empty double quotes
('" ', [32], -1204), # T12.0.2 > T12.1.2 > T12.2.0 error: unclosed double quotes
('" 0', [32, 48], -1204), # T12.0.2 > T12.1.2 > T12.2.2 > T12.2.0 / idem with two chars
('" "', [32], 1000), # T12.0.2 > T12.1.2 > T12.2.1 successful single-char string
(' " "', [32], 1000), # T12.0.1 > T12.0.2 > T12.1.2 > T12.2.1 / idem with leading space
('"0123456789"', [48, 49, 50, 51, 52, 53, 54, 55, 56, 57], 1000), # T12.0.2 > T12.1.2 > T12.2.2 > T12.2.1
('"abcdefghijklmnopqrstuvwxyz"', [97, 98, 99, 100, 101, 102, 103, # alphabetic digits
104, 105, 106, 107, 108, 109, 110, 111, 112,
113, 114, 115, 116, 117, 118, 119, 120, 121,
122], 1000), # lower case letters
('"ABCDEFGHIJKLMNOPQRSTUVWXYZ"', [65, 66, 67, 68, 69, 70, 71, 72,
73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
84, 85, 86, 87, 88, 89, 90], 1000), # upper case letters
('"!#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~"', [33, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 58, 59, 60,
61, 62, 63, 64, 91, 92, 93, 94, 95, 96, 123,
124, 125, 126], 1000), # punctuation letters
('\"\'\"', [39], 1000), # T12.0.2 > T12.1.2 > T12.2.1 capture single quote as a string
('\"\n\"', [], -1203), # T12.0.2 > T12.1.3 illegal character after double quote
('\" \n\"', [32], -1203) # T12.0.2 > T12.1.2 > T12.2.2 > T12.2.3 idem after a valid char
]
dat_test = [('', [], -2101), # T21.0.0 error: missing data directive
(' ', [], -2101), # T21.0.1 > T21.0.0 idem with leading space
('.', [], -2101), # T21.0.2 > T21.1.0 error: missing directive after '.'
('f', [], -2101), # T21.0.3 error: missing '.'
('.f', [], -2104), # T21.0.2 > T21.1.6 error: unknown data directive
('.byte', [], -2102), # T21.0.2 > T21.1.1a error: missing data values
('.byte ', [1], -2102), # T21.0.2 > T21.1.1b > T21.2.0 error: missing data values
('.byte2', [], -2103), # T21.0.2 > T21.1.1c error: missing space after directive
('.byte 2', [1, 2], 1000), # T21.0.2 > T21.1.1b > T21.2.1a success: get one byte
('.byte 20', [1, 20], 1000), # T21.0.2 > T21.1.1b > T21.2.1a idem with two digits
('.byte -20', [1, 236], 1000), # T21.0.2 > T21.1.1b > T21.2.1a idem with negative number
('.byte 2000', [1], -2107), # T21.0.2 > T21.1.1b > T21.2.1a + override data >= 2**8
('.byte -200', [1], -2107), # T21.0.2 > T21.1.1b > T21.2.1a + override data < -2**7
('.byte 45r', [1], -1004), # T21.0.2 > T21.1.1b > T21.2.1a + override unexpected decimal digit
('.byte 45,', [1, 45], -2102), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.0 error: missing data
('.byte 45, ', [1, 45], -2106), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.3 unrecognizeable info
('.byte 200, 0xF4', [1, 200, 244], 1000), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.1a get two bytes
('.byte \'2\'', [1, 50], 1000), # T21.0.2 > T21.1.1b > T21.2.2a success: get one char
('.byte \'2\', \'F\'', [1, 50, 70], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.2a get two chars
('.byte \'2\', 0123', [1, 50, 83], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.1a one char + one num.
('.byte \'2\' , 0123', [1, 50, 83], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.1a with extra space
('.byte \'2\', 0123 ', [1, 50, 83], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.1a with trailing space
('.byte 0b110, \'e\'', [1, 6, 101], 1000), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.2a one num. + one char
('.byte 0b110 , \'e\'', [1, 6, 101], 1000), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.2a with extra space
('.byte 0b110, \'e\' ', [1, 6, 101], 1000),
# T21.0.2 > T21.1.1b > T21.2.1b > T21.2.2a with trailing space
('.byte \'e\' c', [1], -2105), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.1c wrong delimiter
('.byte \'e\', c', [1, 101], -2106), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.3 unrecognizeable info
('.byte c', [1], -2106), # T21.0.2 > T21.1.1b > T21.2.3 unrecognizeable info
('.hword', [], -2102), # T21.0.2 > T21.1.2a error: missing data values
('.hword ', [2], -2102), # T21.0.2 > T21.1.2b > T21.3.0 error missing halfwords
('.hword2', [], -2103), # T21.0.2 > T21.1.2c error: missing space after directive
('.hword 2000', [2, 2000], 1000), # T21.0.2 > T21.1.2b > T21.3.1a success: capture a halfword
('.hword 2000, 0b0010', [2, 2000, 2], 1000), # T21.0.2 > T21.1.2b > T21.3.1b > T21.3.1a two halfwords
('.hword 02000, -1, 0xF00A', [2, 1024, 65535, 61450], 1000), # success: three halfwords
('.hword \'e\'', [2], -2106), # T21.0.2 > T21.1.2b > T21.3.2 unrecognizeable info
('.hword 045r', [2], -1003), # T21.0.2 > T21.1.2b > T21.3.1a + override unexpected hexa digit
('.hword 45,', [2, 45], -2102), # T21.0.2 > T21.1.2b > T21.3.1b > T21.3.0 error: missing data
('.hword 2 , -0123 ', [2, 2, 0xFF85], 1000), # T21.0.2 > T21.1.2b > T21.3.1b > T21.3.1a extra space
('.hword -45000', [2], -2107), # T21.0.2 > T21.1.2b > T21.3.1a + overrride error: data < -2**15
('.word', [], -2102), # T21.0.2 > T21.1.3a error: missing data values
('.word ', [4], -2102), # T21.0.2 > T21.1.3b > T21.4.0 error missing words
('.wordh', [], -2103), # T21.0.2 > T21.1.3c error: missing space after directive
('.word 2000', [4, 2000], 1000), # T21.0.2 > T21.1.3b > T21.4.1a success: capture a word
('.word -2147483648, 0b0010', [4, 2147483648, 0b0010], 1000), # T21.0.2 > T21.1.3b > T21.4.1b > T21.4.1a
('.word 020000000, -1, 0x1F00A', [4, 0o20000000, 4294967295, 0x1F00A], 1000), # three words
('.word r45', [4], -2106), # T21.0.2 > T21.1.3b > T21.4.2 unrecognizeable info
('.word 0b45', [4], -1002), # T21.0.2 > T21.1.3b > T21.4.1a + override unexpected binary digit
('.word 0x4X5', [4], -1005), # T21.0.2 > T21.1.3b > T21.4.1a + override unexpected hexa digit
('.word 0x400000000', [4], -1006), # T21.0.2 > T21.1.3b > T21.4.1a + override too long value (>2^32)
('.word 45,', [4, 45], -2102), # T21.0.2 > T21.1.3b > T21.4.1b > T21.4.0 error: missing data
('.word 2 , -0123 ', [4, 2, 4294967173], 1000), # T21.0.2 > T21.1.3b > T21.4.1b > T21.4.1a
('.word 4294967295', [4, 4294967295], 1000), # T21.0.2 > T21.1.3b > T21.4.1a success: maximum int
('.ascii', [], -2102), # T21.0.2 > T21.1.4a error: missing string
('.asciz', [], -2102), # T21.0.2 > T21.1.5a error: missing string
('.ascii ', [1], -2102), # T21.0.2 > T21.1.4b > T21.5.0 : missing string
('.asciz ', [1], -2102), # T21.0.2 > T21.1.5b > T21.6.0 : missing string
('.ascii5', [], -2103), # T21.0.2 > T21.1.4c error: missing space after directive
('.asciz8', [], -2103), # T21.0.2 > T21.1.5c error: missing space after directive
('.ascii \' \'', [1, 32], 1000), # T21.0.2 > T21.1.4b > T21.5.1a success: get one char
('.asciz \' \'', [1, 32, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.1a success: get one char + '\0'
('.ascii \'a\', \'b\' ,\'c\' , \'d\' ', [1, 97, 98, 99, 100], 1000), # > T21.5.1b > T21.5.1a
('.asciz \'a\', \'b\' ,\'c\' , \'d\' ', [1, 97, 0, 98, 0, 99, 0, 100, 0], 1000), # > T21.6.1b > T21.6.1a
('.ascii "0123456789"', [1, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57], 1000), # T21.0.2 > T21.1.4b > T21.5.2a
('.asciz "abcdef"', [1, 97, 98, 99, 100, 101, 102, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.2a
('.ascii \"b\", \"a\"', [1, 98, 97], 1000), # T21.0.2 > T21.1.4b > T21.5.2b > T21.5.2a
('.asciz \"a\", \"b\"', [1, 97, 0, 98, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.2b > T21.6.2a
('.ascii \"b\", \'a\'', [1, 98, 97], 1000), # T21.0.2 > T21.1.4b > T21.5.2b > T21.5.1a
('.asciz \'a\', \"b\"', [1, 97, 0, 98, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.1b > T21.6.2a
('.ascii \' ', [1], -1104), # T21.0.2 > T21.1.4b > T21.5.1a + override unclosed char
('.ascii \" ', [1], -1204), # T21.0.2 > T21.1.4b > T21.5.2a + override unclosed string
('.asciz \' ', [1], -1104), # T21.0.2 > T21.1.5b > T21.6.1a + override unclosed char
('.asciz \" ', [1], -1204), # T21.0.2 > T21.1.5b > T21.6.2a + override unclosed string
('.ascii \'\'', [1], -1102), # T21.0.2 > T21.1.4b > T21.5.1a + override empty char
('.ascii \"\"', [1], -1202), # T21.0.2 > T21.1.4b > T21.5.2a + override empty string
('.asciz \'\'', [1], -1102), # T21.0.2 > T21.1.5b > T21.6.1a + override empty char
('.asciz \"\"', [1], -1202), # T21.0.2 > T21.1.5b > T21.6.2a + override empty string
('.ascii \' 0\'', [1], -1105), # T21.0.2 > T21.1.4b > T21.5.2a + override more than one character
('.asciz \' 0\'', [1], -1105), # T21.0.2 > T21.1.5b > T21.6.2a + override idem after .ascii
('.ascii \'a\', \"bc , \'d\"', [1, 97, 98, 99, 32, 44, 32, 39, 100], 1000), # > T21.5.1b > T21.5.2a
('.asciz \',\', \",,\"', [1, 44, 0, 44, 44, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.1a success capture ','
('.ascii \'\t\'', [1], -1103), # T21.0.2 > T21.1.4b > T21.5.1c + override illegal character ''
('.asciz \'\t\'', [1], -1103), # T21.0.2 > T21.1.5b > T21.6.1c + override idem after .ascii
('.ascii \"\t\"', [1], -1203), # T21.0.2 > T21.1.4b > T21.5.2c + override illegal character ""
('.asciz \" \t\"', [1], -1203), # T21.0.2 > T21.1.5b > T21.6.2c + override idem after valid char
('.ascii \'"\'a', [1], -2105), # T21.0.2 > T21.1.4b > T21.5.1c unexpected separator
('.ascii \"\'a\"b', [1], -2105), # T21.0.2 > T21.1.4b > T21.5.2c unexpected separator
('.asciz \'"\'a', [1], -2105), # T21.0.2 > T21.1.5b > T21.6.1c unexpected separator
('.asciz \"\'a\"b', [1], -2105), # T21.0.2 > T21.1.5b > T21.6.2c unexpected separator
('.ascii \' a\'', [1], -1105), # T21.0.2 > T21.1.4b > T21.5.2a + override more than one character
('.asciz \' a\'', [1], -1105), # T21.0.2 > T21.1.5b > T21.6.2a + override idem after .ascii
('.ascii a\'', [1], -2106), # T21.0.2 > T21.1.4b > T21.5.3 non recognizable info
('.asciz a\'', [1], -2106), # T21.0.2 > T21.1.5b > T21.6.3 non recognizable info
(' .asciz \'a\'', [1, 97, 0], 1000) # T21.0.1 > T21.0.2 > T21.1.5b > T21.6.1a success with leading space
]
adr_test = [('', [], -2001), # T20.0.0 error: missing address
(' ', [], -2001), # T20.0.1 > T20.0.0 idem white leading space
('0x', [], -2002), # T20.0.2 > T20.1.0 error: '0x' but missing hex digits
('x0', [], -2001), # T20.0.3 error: missing address start
(' 0x8001', [], -2003), # T20.0.1 > T20.0.2 > T20.1.1a address but missing trailing space
('0xF3AE ', [0xF3AE], 1000), # T20.0.2 > T20.0.2 > T20.1.1b success address with trailing space
('0xR124', [], -2003), # T20.0.2 > T20.1.2 illegal address (first digit)
('0x51V4', [], -2003), # T20.0.2 > T20.1.1c illegal address (in-the-middle)
('0x514W', [], -2003), # T20.0.2 > T20.1.1c illegal address (last one)
('0xF0002E00 ', [0xF0002E00], 1000), # T20.0.2 > T20.1.1b big hex address: eight digits
('0x10002EF00 ', [], -2004) # T20.0.2 > T20.1.1b + override long hex address (> 2^32)
]
reg_test = [('', [], -1301), # T13.0.0 error: missing register
(' ', [], -1301), # T13.0.1 > T13.0.0 / idem with leading space
('1', [], -1302), # T13.0.4 error: unknown register identifier
('r', [], -1303), # T13.0.2 > T13.1.0 error: missing register number
('ra', [], -1304), # T13.0.2 > T13.1.2 error: wrong reg number
('r1a', [], -1304), # T13.0.2 > T13.1.2 error: wrong reg number
('r-1', [], -1304), # T13.0.2 > T13.1.1 + override : negative reg number
('r16', [], -1304), # T13.0.2 > T13.1.1 + override : too high reg number
('r12', [12], 1000), # T13.0.2 > T13.1.1 success: two digit reg number
('r0', [0], 1000), # T13.0.2 > T13.1.1 success: one digit reg number
('sp', [13], 1000), # T13.0.3 success: stack pointer
('lr', [14], 1000), # T13.0.3 success: link register
('pc', [15], 1000) # T13.0.3 success: program counter
]
rbt_test = [('', [], -1401), # T14.0.0 error: missing register
(' ', [], -1401), # T14.0.1 > T14.0.0 / idem with leading space
('1', [], -1302), # T14.0.2c + override unknown register identifier
('r', [], -1303), # T14.0.2a + override missing register number
('ra', [], -1304), # T14.0.2a + override wrong reg number
('r1a', [], -1304), # T14.0.2c + override wrong reg number
('r-1', [], -1303), # T14.0.2b + override negative reg number
('r16', [], -1304), # T14.0.2a + override too high reg number
('r0', [0x1], 1000), # T14.0.2a success: single register
('r15', [0x8000], 1000), # T14.0.2a : maximum single reg value
('r0-r5', [0x3F], 1000), # T14.0.2b > T14.1.1 success: reg range (min, max)
('r12-r2', [0x1FFC], 1000), # T14.0.2b > T14.1.1 : (max, min)
('lr-pc', [0xC000], 1000), # T14.0.2b > T14.1.1 : (symbolic)
('sp-r12', [0x3000], 1000), # T14.0.2b > T14.1.1 : (symbolic & numeric, two bits)
('sp-r13', [0x2000], 1000), # T14.0.2b > T14.1.1 : (symbolic & numeric, one bit)
('r4-', [0x10], -1403), # T14.0.2b > T14.1.0 error: missing second reg in range list
('r8-1', [0x100], -1302), # T14.0.2a > T14.1.1 + override wrong second reg
('r9-r16', [0x200], -1304) # T14.0.2a > T14.1.1 + override too high second reg number
]
rlt_test = [('', [], -1501), # T15.0.0 error: missing register list
(' ', [], -1501), # T15.0.1 > T15.0.0 : idem with leading space
('1', [], -1502), # T15.0.3 error: missing '{'
('{', [], -1503), # T15.0.2 > T15.1.0 error: missing registers
('{1', [], -1302), # T15.0.2 > T15.1.1a + override : unknown register identifier
('{r', [], -1303), # T15.0.2 > T15.1.1a + override : missing register number
('{ra', [], -1304), # T15.0.2 > T15.1.1a + override : wrong reg number
('{r1a', [], -1304), # T15.0.2 > T15.1.1a + override : wrong reg number
('{r-1', [], -1303), # T15.0.2 > T15.1.1a + override : negative reg number
('{r16', [], -1304), # T15.0.2 > T15.1.1a + override : too high reg number
('{r0', [], -1503), # T15.0.2 > T15.1.1a error: unclosed single register
('{r0}', [0x1], 1000), # T15.0.2 > T15.1.1c success: single register
('{r0-r5}', [0x3F], 1000), # T15.0.2 > T15.1.1c success: single range
('{r0-r5 }', [0x3F], 1000), # : idem with trailing space
('{r12-r2, lr', [0x1FFC], -1503), # > T15.1.1b > T15.1.1a error: missing '}' after list
('{r12 - r2, lr}', [0x5FFC], 1000), # > T15.1.1b > T15.1.1c success: range + single register
('{ pc, r1 -r2, sp- r12, r5}', [0xB026], 1000), # : several ranges, with spaces
('{r4-}', [], -1403), # > T15.1.1a + override : missing second reg in range list
('{r14, r8-1', [0x4000], -1302), # > T15.1.1a + override : wrong second reg
('{r9-r16, r13}', [], -1304), # > T15.1.1a + override : too high second reg number
('{r14,r8}', [0x4100], 1000), # success: no space after ','
('{ r9 , r13 }', [0x2200], 1000), # success: extra spaces
('{r14,}', [0x4000], -1504), # > T15.1.1b > T15.1.2 error: missing register after ','
('{r14, }', [0x4000], -1504), # > T15.1.1b > T15.1.2 : missing register after ', '
('{r9-r15, sp13}', [0xFE00], -1402) # > T15.1.1b + override : unrecognized register id
]
imo_test = [('', [], -1601), # T16.0.0 error: missing immediate value
(' ', [], -1601), # T16.0.1 > T16.0.0 idem with leading space
('2', [], -1602), # T16.0.3 error: missing '#'
('#', [], -1603), # T16.0.2 > T16.1.0 error: missing value after '#'
('# ', [], -1604), # T16.0.2 > T16.1.1 error: unexpected space after '#'
('#f', [], -1605), # T16.0.2 > T16.1.4 error: unrecognizable info after '#'
('#20', [20], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success: simple byte value
('#\'f\'', [102], 1000), # T16.0.2 > T16.1.3 > T16.2.0 success: simple char value
('#-20', [], -1606), # T16.0.2 > T16.1.2 + override : impossible fixup for negative number
('#2000', [0xE7D], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: in-the-middle bits
('#0xC0000034', [0x1D3], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: split bits
('#0xFF000000', [0x4FF], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: maximum rotation
('#0xFF0000FF', [], -1606), # T16.0.2 > T16.1.2 + override : impossible fixup for 16 bits
('#0x102', [], -1606), # T16.0.2 > T16.1.2 + override : impossible fixup for odd rotations
('#0x104', [0xF41], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: odd immediate mask
('#0x108', [0xF42], 1000), # T16.0.2 > T16.1.2 > T16.2.0 : even immediate mask
('#45r', [], -1004), # T16.0.2 > T16.1.2 + override : unexpected decimal digit
('#\'e\' c', [101], -1607), # T16.0.2 > T16.1.3 > T16.2.1 error: unexpected text after imm val.
('#0b111111100000000000', [0xBFE], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: binary
('#0b1002000', [], -1002), # T16.0.2 > T16.1.2 + override : invalid binary digit
('#012000000005', [0x255], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: octal
('#012000900005', [], -1003), # T16.0.2 > T16.1.2 + override : invalid octal digit
('#45d', [], -1004), # T16.0.2 > T16.1.2 + override : invalid decimal digit
('#0x4X5', [], -1005), # T16.0.2 > T16.1.2 + override : invalid hexa digit
('#0x400000000', [], -1006), # T16.0.2 > T16.1.2 + override : too long value (>2^32)
('#0x08000002', [0x382], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: MSB = 1 at IM
('#\'', [], -1605), # T16.0.2 > T16.1.4 error: unclosed char
('#\' ', [], -1104), # T16.0.2 > T16.1.3 + override : unclosed char
('#\'\'', [], -1102), # T16.0.2 > T16.1.3 + override : empty char
('#\' 0\'', [], -1105), # T16.0.2 > T16.1.3 + override : more than one character
('#\'\t\'', [], -1103), # T16.0.2 > T16.1.3 + override : illegal character ''
('#\"t\"', [], -1605), # T16.0.2 > T16.1.4 error: illegal character '"'
(' #\'a\'', [97], 1000) # T16.0.1 > T16.0.2 > T16.1.3 > T16.2.0 success with leading space
]
ims_test = [('', [], -1701), # T17.0.0 error: missing immediate value
(' ', [], -1701), # T17.0.1 > T17.0.0 idem with leading space
('2', [], -1702), # T17.0.3 error: missing '#'
('#', [], -1703), # T17.0.2 > T17.1.0 error: missing value after '#'
('# ', [], -1704), # T17.0.2 > T17.1.1 error: unexpected space after '#'
('#f', [], -1705), # T17.0.2 > T17.1.3 error: unrecognizable info after '#'
('#2', [2], 1000), # T17.0.2 > T17.1.2 > T17.2.0 success: valid number of shifts
('#-20', [], -1706), # T17.0.2 > T17.1.2 + override : negative number of shifts
('#040', [], -1706), # T17.0.2 > T17.1.2 + override : too high number of shifts
('#0x1C', [28], 1000), # T17.0.2 > T17.1.2 > T17.2.0 success: hexa number
('#0b10101', [21], 1000), # T17.0.2 > T17.1.2 > T17.2.0 success: binary number
('#0b10020', [], -1002), # T17.0.2 > T17.1.2 + override : invalid binary digit
('#019', [], -1003), # T17.0.2 > T17.1.2 + override : invalid octal digit
('#4d', [], -1004), # T17.0.2 > T17.1.2 + override : invalid decimal digit
('#0xX', [], -1005), # T17.0.2 > T17.1.2 + override : invalid hexa digit
(' #0x1F', [31], 1000) # T17.0.1 > T17.0.2 > T17.1.2 > T17.2.0 success with leading space
]
op2_test = [('', [], -2201), # T22.0.0 error: missing second operand
(' ', [], -2203), # T22.0.3 idem with leading space
('2', [], -2203), # T22.0.3 error: missing '#'
('#', [], -1603), # T22.0.1 + override : missing value after '#'
('# ', [], -1604), # T22.0.1 + override : unexpected space after '#'
('#f', [], -1605), # T22.0.1 + override : unrecognizable info after '#'
('#20', [0x02000014], 1000), # T22.0.1 success: simple byte value
('#\'f\'', [0x02000066], 1000), # T22.0.1 success: simple char value
('#-20', [], -1606), # T22.0.1 + override : impossible fixup for negative number
('#0xC0000034', [0x020001D3], 1000), # T22.0.1 success fixup: split bits
('#0x102', [], -1606), # T22.0.1 + override : impossible fixup for odd rotations
('#\'e\' c', [], -1607), # T22.0.1 + override : unexpected text after imm val.
('#0b1002000', [], -1002), # T22.0.1 + override : invalid binary digit
('#012000900005', [], -1003), # T22.0.1 + override : invalid octal digit
('#45d', [], -1004), # T22.0.1 + override : invalid decimal digit
('#0x4X5', [], -1005), # T22.0.1 + override : invalid hexa digit
('#0x400000000', [], -1006), # T22.0.1 + override : too long value (2^32)
('#\'', [], -1605), # T22.0.1 + override : unclosed char
('#\' ', [], -1104), # T22.0.1 + override : unclosed char
('#\'\'', [], -1102), # T22.0.1 + override : empty char
('#\' 0\'', [], -1105), # T22.0.1 + override : more than one character
('#\'\t\'', [], -1103), # T22.0.1 + override : illegal character ''
('#\"t\"', [], -1605), # T22.0.1 + override : illegal character '"'
(' #\'a\'', [0x02000061], 1000), # T22.0.1 success with leading space
('r', [], -1303), # T22.0.2a + override : missing register number
('ra', [], -1304), # T22.0.2a + override : wrong reg number
('r1a', [], -1304), # T22.0.2a + override : wrong reg number
('r-1', [], -1304), # T22.0.2a + override : negative reg number
('r16', [], -1304), # T22.0.2a + override : too high reg number
('r12', [12], 1000), # T22.0.2a success: single reg
('r0 ', [0], 1000), # T22.0.2a success: single reg with trailing space
(' sp', [13], 1000), # T22.0.2a success: single reg with leading space
('r1,', [1], -2204), # T22.0.2b > T22.1.0 error: missing shift mode
('r2, ', [2], -2204), # T22.0.2b > T22.1.1 > T22.1.0 : idem with trailing space
('r3, lslx', [3], -2206), # T22.0.2b > T22.1.1 > T22.1.2c : missing space after shift mode
('r3, r0', [3], -2206), # T22.0.2b > T22.1.1 > T22.1.2c : missing space after shift mode
('r3, #0', [3], -2206), # T22.0.2b > T22.1.1 > T22.1.2c : missing space after shift mode
('r4, xl', [4], -2206), # T22.0.2b > T22.1.1 > T22.1.3 : unrecognized shift mode
('r5, lsl', [5], -2205), # T22.0.2b > T22.1.1 > T22.1.2a : missing space after shift mode
('r6, lsr ', [6], -2205), # > T22.1.2b > T22.2.0 : missing info after shift mode
('r7, asr x', [7], -2207), # > T22.1.2b > T22.2.3 : wrong info after shift mode
('r8, ror r', [8], -1303), # > T22.1.2b > T22.2.1 + override: missing register number
('r9, lsl ra', [9], -1304), # > T22.1.2b > T22.2.1 + override: wrong reg number
('r10, lsr r1a', [10], -1304), # > T22.1.2b > T22.2.1 + override: wrong reg number
('r11, asr r-1', [11], -1304), # > T22.1.2b > T22.2.1 + override: negative reg number
('r12, ror r16', [12], -1304), # > T22.1.2b > T22.2.1 + override: too high reg number
('r13, lsl r12', [0xC1D], 1000), # > T22.1.2b > T22.2.1 success: LSL reg
('sp, lsr r0 ', [0x3D], 1000), # > T22.1.2b > T22.2.1 : LSR reg with trailing space
('r1,asr lr', [0xE51], 1000), # > T22.1.2b > T22.2.1 : ASR reg no space after ','
('r8, ror #', [8], -1703), # > T22.1.2b > T22.2.2 + override: missing value after '#'
('r9, lsl # ', [9], -1704), # > T22.1.2b > T22.2.2 + override: unexpected space after '#'
('r10, lsr #f', [10], -1705), # > T22.1.2b > T22.2.2 + override: unrecognizable info after '#'
('r11, asr #2', [0x14B], 1000), # > T22.1.2b > T22.2.2 success: valid number of shifts
('r12, ror #-20', [12], -1706), # > T22.1.2b > T22.2.2 + override: negative number of shifts
('r13, lsl #040', [13], -1706), # > T22.1.2b > T22.2.2 + override: too high number of shifts
('pc, lsr #0x1C ', [0xE2F], 1000), # > T22.1.2b > T22.2.2 success LSR imm with trailing space
('r1,asr #0b10101', [0xAC1], 1000), # > T22.1.2b > T22.2.2 : ASR bin imm, no space after ','
('r8, ror #0b10020', [8], -1002), # > T22.1.2b > T22.2.2 + override: invalid binary digit
('r9, lsl #019', [9], -1003), # > T22.1.2b > T22.2.2 + override: invalid octal digit
('r10, lsr #4d', [10], -1004), # > T22.1.2b > T22.2.2 + override: invalid decimal digit
('r11, asr #0xX', [11], -1005), # > T22.1.2b > T22.2.2 + override: invalid hexa digit
(' r12 , ror #0x1F ', [0xFEC], 1000), # > T22.1.2b > T22.2.2 success with lead/trail spaces
('r13, lsl r12 a', [13], -1304), # > T22.1.2b > T22.2.1 + override: unexpected text after parse
('r12, ror #0x1F b', [12], -1005) # > T22.1.2b > T22.2.2 + override: idem for immediate parsing
]
opd_test = [('', [], -2301), # T23.0.0 error: missing operands
(' ', [], -2303), # T23.0.2 error: idem with leading space
('2', [], -1302), # T23.0.1a + override : unrecognizable register
('2,', [], -1302), # T23.0.1b + override : unrecognizable operand with ','
('r', [], -1303), # T23.0.1a + override : missing register number
('ra', [], -1304), # T23.0.1a + override : wrong reg number
('r16', [], -1304), # T23.0.1a + override : too high reg number
('r12', [], -2302), # T23.0.1a error: good dest reg, missing other ops
('r0 ', [], -2302), # T23.0.1a error: missing ',' after dest reg
('r1,', [0x1000], -2304), # T23.0.1b > T23.1.0 error: missing source operands
('r2, ', [0x2000], -2306), # T23.0.1b > T23.1.3 error: missing source operands
('r3, 3', [0x3000], -2306), # T23.0.1b > T23.1.3 error: wrong source op 1
('r4, ra', [0x4000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : wrong reg number
('r5, r1a', [0x5000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : wrong reg number
('r6, r-1', [0x6000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : negative reg number
('r7, r16', [0x7000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : too high reg number
('r8, r12', [0x8800C], 1000), # T23.0.1b > T23.1.1 success: two registers
('r9,r1 ', [0x99001], 1000), # T23.0.1b > T23.1.1 success: idem with no space after ','
(' sp , lr ', [0xDD00E], 1000), # T23.0.1b > T23.1.1 success: idem with extra spaces
('r10, r1,', [0x0A000], -2204), # T23.0.1b > T23.1.1 + override : missing shift register
('r11, r2, ', [0x0B000], -2204), # T23.0.1b > T23.1.1 + override : idem with space
('r12, r3, 3', [0x3C000], -2308), # T23.0.1b > T23.1.2b > T23.2.2 error: wrong op 2
('r13, r4, ra', [0x4D000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : wrong reg number
('r14, r5, r1a', [0x5E000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : wrong reg number
('r15, r6, r-1', [0x6F000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : negative reg number
('r0, r7, r16', [0x70000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : too high reg number
('r1, r8, r12', [0x8100C], 1000), # T23.0.1b > T23.1.2b > T23.2.1 success: three registers
('r2,r9,r1 ', [0x92001], 1000), # T23.0.1b > T23.1.2a : idem with no space after ','
('r3, #', [0x03000], -1603), # T23.0.1b > T23.1.1 + override : missing value after '#'
('r4, # ', [0x04000], -1604), # T23.0.1b > T23.1.1 + override : unexpected space after '#'
('r5, #f', [0x05000], -1605), # T23.0.1b > T23.1.1 + override : unrecognizable info after '#'
('r6, #20', [0x02066014], 1000), # T23.0.1b > T23.1.1 success: dest reg + immediate value
('r7, #\'f\'', [0x02077066], 1000), # T23.0.1b > T23.1.1 success: dest reg + immediate char
('r8, #-20', [0x08000], -1606), # T23.0.1b > T23.1.1 + override : impossible fixup for negative num.
('r9,#0xC0000034', [0x020991D3], 1000), # T23.0.1b > T23.1.1 success fixup: split bits
('r10, #0x102', [0x0A000], -1606), # T23.0.1b > T23.1.1 + override : impossible fixup for odd rotations
('r11, #\'e\' c', [0xB000], -1607), # T23.0.1b > T23.1.1 + override : unexpected text after imm val.
('r12, #0b1002000', [0x0C000], -1002), # T23.0.1b > T23.1.1 + override : invalid binary digit
('r13, #012000900005', [0x0D000], -1003), # > T23.1.1 + override : invalid octal digit
('r14, #45d', [0x0E000], -1004), # T23.0.1b > T23.1.1 + override : invalid decimal digit
('r15, #0x4X5', [0x0F000], -1005), # T23.0.1b > T23.1.1 + override : invalid hexa digit
('r0, #\'', [0x0], -1605), # T23.0.1b > T23.1.1 + override : unclosed char
('r1, #\' ', [0x01000], -1104), # T23.0.1b > T23.1.1 + override : unclosed char
('r2, #\'\'', [0x02000], -1102), # T23.0.1b > T23.1.1 + override : empty char
('r3, #\' 0\'', [0x03000], -1105), # T23.0.1b > T23.1.1 + override : more than one character
('r4, #\'\t\'', [0x04000], -1103), # T23.0.1b > T23.1.1 + override : illegal character ''
('r5, lslx', [0x05000], -2306), # T23.0.1b > T23.1.3 error: unrecognized source operand
('r5, r10, lslx', [0xA5000], -2308), # T23.0.1b > T23.1.2b > T23.2.2 error: wrong second operand
('r5, r10, r1', [0xA5001], 1000), # T23.0.1b > T23.1.2b > T23.2.1 success: three registers
('r5, r10, #2', [0x20A5002], 1000), # T23.0.1b > T23.1.2b > T23.2.1 success: two regs, one immediate
('r6, r1, xl', [0x16000], -2308), # T23.0.1b > T23.1.2b > T23.2.2 error: wrong second operand
('r7, r2, lsl', [0x07000], -2205), # T23.0.1b > T23.1.1 + override : missing space after shift mode
('r8, r3, lsr ', [0x08000], -2205), # T23.0.1b > T23.1.1 + override : missing info after shift mode
('r9, r4, asr x', [0x09000], -2207), # T23.0.1b > T23.1.1 + override : wrong info after shift mode
('r10, r5, ror r', [0x0A000], -1303), # T23.0.1b > T23.1.1 + override : missing register number
('r11, r6, lsl ra', [0x0B000], -1304), # T23.0.1b > T23.1.1 + override : wrong reg number
('r12, r7, ror r16', [0x0C000], -1304), # T23.0.1b > T23.1.1 + override : too high reg number
('r13, r8, lsl r12', [0xDDC18], 1000), # T23.0.1b > T23.1.1 success: LSL reg
('r14, sp, lsr r0 ', [0xEE03D], 1000), # T23.0.1b > T23.1.1 : LSR reg with trailing space
('r15, r1,asr lr', [0xFFE51], 1000), # T23.0.1b > T23.1.1 : ASR reg no space after ','
('r0, r8, ror #', [0], -1703), # T23.0.1b > T23.1.1 + override : missing value after '#'
('r1, r9, lsl # ', [0x01000], -1704), # T23.0.1b > T23.1.1 + override : unexpected space after '#'
('r2, r10, lsr #f', [0x02000], -1705), # T23.0.1b > T23.1.1 + override : unrecognizable info after '#'
('r3, r11, asr #2', [0x3314B], 1000), # T23.0.1b > T23.1.1 success: valid number of shifts
('r4, r12, ror #-20', [0x04000], -1706), # > T23.1.1 + override : negative number of shifts
('r5, r13, lsl #040', [0x05000], -1706), # > T23.1.1 + override : too high number of shifts
('r5, r13, lsl #00', [0x05500D], 1000), # > T23.1.1 success: true LSL #0
('r6, pc, lsr #0x1C ', [0x66E2F], 1000), # > T23.1.1 success LSR imm with trailing space
('r6, pc, lsr #0x0 ', [0x6600F], 1000), # > T23.1.1 converting LSR #0 into LSL #0
('r7,r1,asr #0b10101', [0x77AC1], 1000), # > T23.1.1 : ASR bin imm, no space after ','
('r7,r1,asr #0b0', [0x77001], 1000), # > T23.1.1 converting ASR #0 into LSL #0
('r8, r13, lsl r12 a', [0x08000], -1304), # > T23.1.1 + override : unexpected text after parse
('r9, r12, ror #0x1F b', [0x09000], -1005), # > T23.1.1 + override : idem for immediate parsing
('r9, r12, ror #0x1F', [0x99FEC], 1000), # > T23.1.1 success ROR with 31 shifts
('r9, r12, ror #0x0', [0x9906C], 1000), # > T23.1.1 coding ROR #0 as RRX
('r13, r7, r8, lsl r12 ', [0x7DC18], 1000), # > T23.1.2 > T23.2.1 success: three regs, last shift reg
('r14 , r8 , sp , lsr r10', [0x8EA3D], 1000), # > T23.1.2 > T23.2.1 : idem with trailing spaces
('r15,r9,r1,asr lr', [0x9FE51], 1000), # > T23.1.2 > T23.2.1 : idem with space after ','
('r13, r7, r8, lsl #12 ', [0x7D608], 1000), # > T23.1.2 > T23.2.1 success: three regs, last shift imm
('r14 , r8 , sp , lsr #10', [0x8E52D], 1000), # > T23.1.2 > T23.2.1 : idem with trailing spaces
('r15,r9,r1,asr #31', [0x9FFC1], 1000), # > T23.1.2 > T23.2.1 : idem with space after ','
('r15,r9,r1,asr r32', [0x9F000], -1304), # > T23.1.2 > T23.2.1 + override : wrong range reg number
('r15,r9,r1,asr #32', [0x9F000], -1706), # > T23.1.2 > T23.2.1 + override : invalid number of shifts
('r15,r9,r1,asr r', [0x9F000], -1303), # > T23.1.2 > T23.2.1 + override : missing reg number
('r15,r9,r1,asr ', [0x9F000], -2205) # > T23.1.2 > T23.2.1 + override : missing info after shift
]
idt_test = [('', [], -3101), # T31.0.0 error: missing data instruction
(' ', [], -3101), # T31.0.1 > T31.0.0 error: idem with leading space
('2', [], -3103), # T31.0.3 error: unrecognizable instruction
('and', [], -3102), # T31.0.2a error: missing operands after instr.
('eor ', [4, 0xE0200000], -3102), # T31.0.2b > T31.3.0 error: missing operands after instr.
('sub 2,', [4, 0xE0400000], -1302), # T31.0.2b > T31.3.1 + override : unrecognizable operand with ','
('rsb r', [4, 0xE0600000], -1303), # T31.0.2b > T31.3.1 + override : missing register number
('add r16', [4, 0xE0800000], -1304), # T31.0.2b > T31.3.1 + override : too high reg number
('adc r12', [4, 0xE0A00000], -2302), # T31.0.2b > T31.3.1 + override : good dest reg, missing other ops
('sbc ', [4, 0xE0C00000], -2303), # T31.0.2b > T31.3.1 + override : missing dest reg
('rsc r1,', [4, 0xE0E00000], -2304), # T31.0.2b > T31.3.1 + override : missing source operands
('orr r2, ', [4, 0xE1800000], -2306), # T31.0.2b > T31.3.1 + override : missing source operands
('bic r3, 3', [4, 0xE1C00000], -2306), # T31.0.2b > T31.3.1 + override : wrong source op 1
('and r12, r3, 3', [4, 0xE0000000], -2308), # > T31.3.1 + override : wrong op 2
('eor r3, #', [4, 0xE0200000], -1603), # > T31.3.1 + override : missing value after '#'
('sub r4, # ', [4, 0xE0400000], -1604), # > T31.3.1 + override : unexpected space after '#'
('rsb r5, #f', [4, 0xE0600000], -1605), # > T31.3.1 + override : unrecognizable info after '#'
('add r10, #0x102', [4, 0xE0800000], -1606), # > T31.3.1 + override : impossible fixup for odd rotations
('adc r11, #\'e\' c', [4, 0xE0A00000], -1607), # > T31.3.1 + override : unexpected text after imm val.
('sbc r10, r1,', [4, 0xE0C00000], -2204), # > T31.3.1 + override : missing shift register
('rsc r7, r2, lsl', [4, 0xE0E00000], -2205), # > T31.3.1 + override : missing space after shift mode
('orr r9, r4, asr x', [4, 0xE1800000], -2207), # > T31.3.1 + override : wrong info after shift mode
('bic r0, r8, ror #', [4, 0xE1C00000], -1703), # > T31.3.1 + override : missing value after '#'
('and r1, r9, lsl # ', [4, 0xE0000000], -1704), # > T31.3.1 + override : unexpected space after '#'
('eor r2, r10, lsr #f', [4, 0xE0200000], -1705), # > T31.3.1 + override : unrecognizable info after '#'
('sub r4, r12, ror #-20', [4, 0xE0400000], -1706), # > T31.3.1 + override : negative number of shifts
('rsb r12, #0b1002000', [4, 0xE0600000], -1002), # > T31.3.1 + override : invalid binary digit
('add r13, #012000900005', [4, 0xE0800000], -1003), # > T31.3.1 + override : invalid octal digit
('adc r14, #45d', [4, 0xE0A00000], -1004), # > T31.3.1 + override : invalid decimal digit
('sbc r15, #0x4X5', [4, 0xE0C00000], -1005), # > T31.3.1 + override : invalid hexa digit
('rsc r2, #\'\'', [4, 0xE0E00000], -1102), # > T31.3.1 + override : empty char
('orr r4, #\'\t\'', [4, 0xE1800000], -1103), # > T31.3.1 + override : illegal character ''
('bic r1, #\' ', [4, 0xE1C00000], -1104), # > T31.3.1 + override : unclosed char
('and r3, #\' 0\'', [4, 0xE0000000], -1105), # > T31.3.1 + override : more than one character
('eors', [4, 0xE0200000], -3102), # T31.0.2c > T31.1.2a error: data operands
('eoral', [4, 0xE0200000], -3102), # T31.0.2c > T31.1.1a error: data operands
('tsts', [4, 0xE1100000], -3102), # T31.0.2c > T31.1.2a : missing operands
('tsts ', [4, 0xE1100000], -3102), # T31.0.2c > T31.1.2b > T31.3.0 : missing operands
('teqst', [4, 0xE1300000], -3105), # T31.0.2c > T31.1.2c error: wrong text after instruction
('cmpxx', [4, 0xE1500000], -3104), # T31.0.2c > T31.1.3 error: unknown instruction condition
('cmneq', [4, 0xE1700000], -3102), # T31.0.2c > T31.1.1a error: missing ops after pred.inst.
('movne ', [4, 0x11A00000], -3102), # T31.0.2c > T31.1.1b > T31.3.0 : idem after space
('mvncss', [4, 0x21E00000], -3102), # T31.0.2c > T31.1.1c > T31.2.1a : idem after set flag
('mvncsx', [4, 0x21E00000], -3105), # T31.0.2c > T31.1.1c > T31.2.2 : wrong text after pred.inst
('mvncssx', [4, 0x21E00000], -3105), # T31.0.2c > T31.1.1c > T31.2.1c : wrong text after pred.inst + flag
('andhss', [4, 0x20000000], -3102), # T31.0.2c > T31.1.1c > T31.2.1a : missing operands after set flag
('andhss ', [4, 0x20100000], -3102), # T31.0.2c > T31.1.1c > T31.2.1b > T31.3.0 : after set flag + space
('eorccx', [4, 0x30200000], -3105), # T31.0.2c > T31.1.1c > T31.2.2 : wrong text after pred.inst
('sublosx', [4, 0x30400000], -3105), # T31.0.2c > T31.1.1c > T31.2.1c : wrong text after pred.inst + flag
('cmp', [], -3102), # T31.0.2a error: missing operands after instr.
('cmn ', [4, 0xE1700000], -3102), # T31.0.2b > T31.3.0 error: missing operands after instr.
('mov 2,', [4, 0xE1A00000], -1302), # T31.0.2b > T31.3.1 + override : unrecognizable operand with ','
('mvn r', [4, 0xE1E00000], -1303), # T31.0.2b > T31.3.1 + override : missing register number
('tst r16', [4, 0xE1100000], -1304), # T31.0.2b > T31.3.1 + override : too high reg number
('teq r12', [4, 0xE1300000], -2302), # T31.0.2b > T31.3.1 + override : good dest reg, missing other ops
('cmp ', [4, 0xE1500000], -2303), # T31.0.2b > T31.3.1 + override : missing source 1 reg
('cmn r1,', [4, 0xE1700000], -2304), # T31.0.2b > T31.3.1 + override : missing source operands
('mov r2, ', [4, 0xE1A00000], -2306), # T31.0.2b > T31.3.1 + override : missing source operands
('mvn r3, 3', [4, 0xE1E00000], -2306), # T31.0.2b > T31.3.1 + override : wrong source op 1
('tst r3, #', [4, 0xE1100000], -1603), # > T31.3.1 + override : missing value after '#'
('teq r4, # ', [4, 0xE1300000], -1604), # > T31.3.1 + override : unexpected space after '#'
('cmp r5, #f', [4, 0xE1500000], -1605), # > T31.3.1 + override : unrecognizable info after '#'
('mov r10, #0x102', [4, 0xE1A00000], -1606), # > T31.3.1 + override : impossible fixup for odd rotations
('mvn r11, #\'e\' c', [4, 0xE1E00000], -1607), # > T31.3.1 + override : unexpected text after imm val.
('tst r7, r2, lsl', [4, 0xE1100000], -2205), # > T31.3.1 + override : missing space after shift mode
('teq r9, r4, asr x', [4, 0xE1300000], -2207), # > T31.3.1 + override : wrong info after shift mode
('cmp r0, r8, ror #', [4, 0xE1500000], -1703), # > T31.3.1 + override : missing value after '#'
('cmn r1, r9, lsl # ', [4, 0xE1700000], -1704), # > T31.3.1 + override : unexpected space after '#'
('mov r2, r10, lsr #f', [4, 0xE1A00000], -1705), # > T31.3.1 + override : unrecognizable info after '#'
('mvn r4, r12, ror #-20', [4, 0xE1E00000], -1706), # > T31.3.1 + override : negative number of shifts
('tst r12, #0b1002000', [4, 0xE1100000], -1002), # > T31.3.1 + override : invalid binary digit
('teq r13, #012000900005', [4, 0xE1300000], -1003), # > T31.3.1 + override : invalid octal digit
('cmp r14, #45d', [4, 0xE1500000], -1004), # > T31.3.1 + override : invalid decimal digit
('cmn r15, #0x4X5', [4, 0xE1700000], -1005), # > T31.3.1 + override : invalid hexa digit
('mov r2, #\'\'', [4, 0xE1A00000], -1102), # > T31.3.1 + override : empty char
('mvn r4, #\'\t\'', [4, 0xE1E00000], -1103), # > T31.3.1 + override : illegal character ''
('tst r1, #\' ', [4, 0xE1100000], -1104), # > T31.3.1 + override : unclosed char
('teq r3, #\' 0\'', [4, 0xE1300000], -1105), # > T31.3.1 + override : more than one character
('eorsx', [4, 0xE0200000], -3105), # T31.0.2c > T31.1.2c error: wrong text after 's'
('eorx', [4, 0xE0200000], -3104), # T31.0.2c > T31.1.3 error: wrong text after inst.
('rsb r5, r10, #2', [4, 0xE26A5002], 1000), # T31.0.2b > T31.3.1 success: two regs, one immediate
('add r13, r8, lsl r12', [4, 0xE08DDC18], 1000), # T31.0.2b > T31.3.1 : LSL reg
('adc r14, sp, lsr r0 ', [4, 0xE0AEE03D], 1000), # T31.0.2b > T31.3.1 : LSR reg with trailing space
('sbc r15, r1,asr lr', [4, 0xE0CFFE51], 1000), # T31.0.2b > T31.3.1 : ASR reg no space after ','
('rsc r6, pc, lsr #0x1C ', [4, 0xE0E66E2F], 1000), # T31.0.2b > T31.3.1 : LSR imm with trailing space
('rsc r6, pc, lsr #0x0 ', [4, 0xE0E6600F], 1000), # : LSR #0 -> LSL #0
('orrs r7,r1,asr #0b10101', [4, 0xE1977AC1], 1000), # > T31.1.2b > T31.3.1:ASR bin imm, no space after ','
('orrs r7,r1,asr #0b0', [4, 0xE1977001], 1000), # : ASR #0 -> LSL #0
('bicmi r13, r7, r8, lsl r12 ', [4, 0x41C7DC18], 1000), # > T31.1.1b > T31.3.1 : three regs, shift reg
('andpls r14 , r8 , sp , lsr r10', [4, 0x5018EA3D], 1000), # > T31.1.1c > T31.2.1b > T31.3.1 : cond. + 's'
('eorvss r15,r9,#\'f\'', [4, 0x6239F066], 1000), # > T31.1.1c > T31.2.1b > T31.3.1 : cond.+'s'+ imm.
('subvc r9,#0xC0000034', [4, 0x724991D3], 1000), # T31.0.2c > T31.1.1b > T31.3.1 : one reg + one imm.
('rsbhis r8 , sp , lsr #10', [4, 0x8078852D], 1000), # > T31.1.1c > T31.2.1b > T31.3.1: reg + shifted reg
('addls r9,r1,asr r15', [4, 0x90899F51], 1000), # > T31.1.1b > T31.3.1 : idem with no 's'
('tst r7,r1, #0b10101', [4, 0xE1100000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'tst'
('teq r13,r7,r8,lsl r12', [4, 0xE1300000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'teq'
('cmppl r14,r8,sp,lsr r10', [4, 0x51500000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'cmp'
('cmnvss r15,r9,#\'f\'', [4, 0x61700000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'cmn'
('movvc r1,r9, #0xC000', [4, 0x71A00000], -2311), # T31.0.2b > T31.3.1 + override : 3 ops with 'mov'
('mvnhis r8, lr, sp, lsr pc', [4, 0x81F00000], -2311), # > T31.3.1 + override : 3 os with 'mvn'
('tst r7, #0b10101', [4, 0xE3170015], 1000), # T31.0.2b > T31.3.1 : 'tst' + reg + imm
('teqlss r7,r8,lsl r12', [4, 0x91370C18], 1000), # > T31.1.1c > T31.2.1b > T31.3.1: 'teq'+reg+shifted reg
('cmpge r14, r8', [4, 0xA15E0008], 1000), # > T31.1.1c > T31.3.1 : 'cmp' + reg + reg
('cmnlt r15, #\'f\'', [4, 0xB37F0066], 1000), # > T31.1.1c > T31.3.1 : 'cmn' + reg + char
('movgts r1, #0xC000', [4, 0xC3B01903], 1000), # > T31.1.1c > T31.2.1b > T31.3.1: 'mov' + reg + imm
('mvnle lr, sp, lsr #15', [4, 0xD1E0E7AD], 1000), # > T31.1.1c > T31.3.1 : 'mvn'+reg+shifted reg
('mov r2, #-1', [4, 0xE3E02000], 1000), # T31.0.2b > T31.3.1 : 'mov' + reg + NOT imm
('mvn r3, #0xFFF00FFF', [4, 0xE3A03AFF], 1000), # T31.0.2b > T31.3.1 : 'mvn' + reg + NOT imm
('and r4, #-200', [4, 0xE3C440C7], 1000), # T31.0.2b > T31.3.1 : 'and' + reg + NOT imm
('bic r5, #0xFFC03FFF', [4, 0xE20559FF], 1000), # T31.0.2b > T31.3.1 : 'bic' + reg + NOT imm
('add r6, #-300', [4, 0xE2466F4B], 1000), # T31.0.2b > T31.3.1 : 'add' + reg + NOT imm
('sub r7, #0xFF100000', [4, 0xE287760F], 1000), # T31.0.2b > T31.3.1 : 'mvn' + reg + NOT imm
('cmp r8, #-1000', [4, 0xE3780FFA], 1000), # T31.0.2b > T31.3.1 : 'cmp' + reg + NOT imm
('cmn r9, #0xFFC04000', [4, 0xE35909FF], 1000) # T31.0.2b > T31.3.1 : 'cmn' + reg + NOT imm
]
iml_test = [('', [], -3201), # T32.0.0 error: missing multiplication instr.
(' ', [], -3201), # T32.0.1 > T32.0.0 error: idem with leading space
('2', [], -3203), # T32.0.3 error: unrecognizable instruction
('mul', [], -3202), # T32.0.2a error: missing operands after instr.
('mla ', [4, 0xE0200090], -3202), # T32.0.2b > T32.3.0 error: missing operands after instr.
('umull 2,', [4, 0xE0800090], -1302), # T32.0.2b > T32.3.1b + override : unrecognizable operand with ','
('smull r', [4, 0xE0C00090], -1303), # T32.0.2b > T32.3.1b + override : missing register number
('umlal r16', [4, 0xE0A00090], -1304), # T32.0.2b > T32.3.1b + override : too high reg number
('smlal r12', [4, 0xE0E00090], -3202), # T32.0.2b > T32.3.1a error: good dest reg, missing other ops
('mul ', [4, 0xE0000090], -1301), # T32.0.2b > T32.3.1a + override : missing reg1
('mla r1,', [4, 0xE0210090], -3202), # T32.0.2b > T32.3.1b > T32.4.0 : missing source operands
('umull r2, ', [4, 0xE0802090], -1301), # > T32.4.1b + override : missing reg2
('smull r3, gu', [4, 0xE0C03090], -1302), # > T32.4.1b + override : wrong op 2
('umlal r12, r3, e3', [4, 0xE0A3C090], -1302), # > T32.5.1b + override : wrong op 3
('smlal r3, r4, r5, ', [4, 0xE0E43095], -1301), # > T32.6.1 + override : missing reg4
('mul r3, r4, r5, r6', [4, 0xE0030594], -3207), # > T32.6.1 + override : four regs with 'mul'
('mla r3, r4, r5', [4, 0xE0230594], -3202), # > T32.6.1 + override : three regs with 'mla'
('mul r3, r4, r5', [4, 0xE0030594], 1000), # > T32.5.1a success: three regs with 'mul'
('mla r3, r4, r5, r6', [4, 0xE0236594], 1000), # > T32.6.1 success: four regs with 'mla'
('umull r10, r11, r12, r13', [4, 0xE08BAD9C], 1000), # > T32.6.1 : four regs with 'umull'
('umlal r1, r11, r2, r3', [4, 0xE0AB1392], 1000), # > T32.6.1 : four regs with 'umlal'
('smull r10, r11, lr, r10', [4, 0xE0CBAA9E], 1000), # > T32.6.1 : four regs with 'smull'
('smlal sp, lr, r0, r7', [4, 0xE0EED790], 1000), # > T32.6.1 : four regs with 'smlal'
('mul pc, r0, r7', [4, 0xE0000090], -3208), # > T32.5.1a + override : use of PC as Rd
('mul r0, pc, r8', [4, 0xE0000090], -3208), # > T32.5.1a + override : use of PC as Rm
('mla r0, r7, pc', [4, 0xE0200097], -3208), # > T32.5.1a + override : use of PC as Rs
('umlal r10, pc, r6, r9', [4, 0xE0A0A090], -3208), # + override : use of PC as RdHi
('smlal pc, r9, r8, r7', [4, 0xE0E00090], -3208), # + override : use of PC as RdLo
('mul r3, r3, r5', [4, 0xE0030593], 1000), # + warning : Rd should be different from Rm
('mla r5, r5, r5, r1', [4, 0xE0251595], 1000), # + warning : Rd should be different from Rm
('mla r3, r4, r3, r4', [4, 0xE0234394], 1000), # success : should work
('mla r3, r4, r3, r3', [4, 0xE0233394], 1000), # success : should work
('umull r6, r7, r7, r6', [4, 0xE0876697], 1000), # + warning : RdHi, RdLo and Rm must all be dif
('smull r9, r10, r9,r9', [4, 0xE0CA9999], 1000), # + warning : RdHi, RdLo and Rm must all be dif
('umlal r6, r6, r7, r6', [4, 0xE0A66697], 1000), # + warning : RdHi and RdLo must be different
('smlal r8, r9, r10,r8', [4, 0xE0E9889A], 1000), # success : should work
('muleq', [4, 0xE0000090], -3202), # T32.0.2c > T32.1.1a error : cond & missing ops
('muls', [4, 0xE0000090], -3202), # T32.0.2c > T32.1.2a error : 's'' & missing ops
('mulz', [4, 0xE0000090], -3204), # T32.0.2c > T32.1.3 error : wrong text after
('muleqs', [4, 0x00000090], -3202), # > T32.1.1c > T32.2.1a error : missing ops
('muleqsz', [4, 0x00000090], -3205), # > T32.1.2b > T32.2.1c error : missing ops
('smull r3, r4', [4, 0xE0C03090], -3202), # > T32.4.1a error : missing ops
('smull r3, r4,', [4, 0xE0C43090], -3202), # > T32.5.0 error : missing ops
('smull r3, r4, r5', [4, 0xE0C43095], -3202), # > T32.5.1a error : missing ops
('smull r3, r4, r5,', [4, 0xE0C43095], -3202), # > T32.6.0 error : missing ops
('muleq r3, r4, r5', [4, 0x00030594], 1000), # T32.0.2c > T32.1.1b > success : 'mul' + cond
('mlanes r3, r4, r5, r6', [4, 0x10336594], 1000), # > T32.1.1c > T32.2.1b > : 'mla' + cond + 's'
('umulls r10, r11, r12, r13', [4, 0xE09BAD9C], 1000), # T32.0.2c > T32.1.2b > : 'umull' + 's'
('umlalle r1, r11, r2, r3', [4, 0xD0AB1392], 1000), # T32.0.2c > T32.1.1b > : 'umlal' + cond
('smulllex r10, r11, lr, r10', [4, 0xD0C00090], -3205), # T32.0.2c > T32.1.1c > T32.2.2 : error after cond
('smlalsy sp, lr, r0, r7', [4, 0xE0E00090], -3205) # T32.0.2c > T32.1.2c : error after 's'
]
ibr_test = [('', [], -3301), # T33.0.0 error: missing branch instr.
(' ', [], -3301), # T33.0.1 > T33.0.0 error: idem with leading space
('2', [], -3303), # T33.0.5 error: unrecognizable instruction
('blo', [], -3302), # T33.0.2a error: missing offset after instr.
('bleq ', [4, 0x0B000000], -3302), # T33.0.2b > T33.3.0 : missing offset after instr.
('blox', [4], -3303), # T33.0.2c error: unexpected text after instr.
('bx', [], -3304), # T33.0.3a error: missing reg after instr.
('blx ', [4, 0xE12FFF30], -3304), # T33.0.3b > T33.4.0 error: missing reg after instr.
('blxo', [4, 0xE12FFF30], -3303), # T33.0.3c > T33.2.2 error: unexpected text after instr.
('b', [], -3302), # T33.0.4a error: missing offset after instr.
('bl ', [4, 0xEB000000], -3302), # T33.0.4b > T33.3.0 error: missing offset after instr.
('bly', [4, 0xEB000000], -3303), # T33.0.4c > T33.1.2 error: unexpected text after instr.
('beq', [4, 0xEA000000], -3302), # T33.0.4c > T33.1.1a error: missing offset after instr.
('blne ', [4, 0x1B000000], -3302), # T33.0.4c > T33.1.1b > T 33.3.0 : missing offset after instr.
('blnex', [4, 0x1B000000], -3303), # T33.0.4c > T33.1.1c : unexpected text after instr.
('bxeq', [4, 0xE12FFF10], -3302), # T33.0.3c > T33.2.1a error: missing offset after instr.
('blxeq ', [4, 0x012FFF30], -3304), # T33.0.3c > T33.2.1b > T 33.4.0 : missing offset after instr.
('blxeqx', [4, 0x012FFF30], -3303), # T33.0.3c > T33.2.1c : unexpected text after instr.
('blt f', [4, 0xBA000000], -3305), # T33.0.2b > T33.3.2 error: wrong offset
('bls 0b12', [4, 0x9A000000], -1002), # T33.0.2b > T33.3.1 + override : unexpected binary digit
('blls 0192', [4, 0x9B000000], -1003), # > T33.1.1b > T33.3.1 + override: unexpected octal digit
('bllo -192a', [4, 0x3B000000], -1004), # > T33.1.1b > T33.3.1 + override: unexpected decimal digit
('blvc 0xA3G0', [4, 0x7B000000], -1005), # > T33.1.1b > T33.3.1 + override: unexpected hexa digit
('bvc 0xA30000000', [4, 0x7A000000], -1006), # > T33.3.1 + override: too long hex address
('bxvc 0xA300', [4, 0x712FFF10], -1302), # > T33.2.1b > T33.4.1 + override: unrecognized reg
('blxcc r', [4, 0x312FFF30], -1303), # > T33.2.1b > T33.4.1 + override: missing reg number
('bxcc rf', [4, 0x312FFF10], -1304), # > T33.2.1b > T33.4.1 + override: wrong reg number
('bxmi r16', [4, 0x412FFF10], -1304), # > T33.2.1b > T33.4.1 + override: wrong reg number
('bx r6', [4, 0xE12FFF16], 1000), # T33.0.3b > T33.4.1 success: 'bx' jump
('blxpl r6', [4, 0x512FFF36], 1000), # > T33.2.1b > T33.4.1 success: 'blx' jump
('blxlt r15', [4, 0xB12FFF3F], 1000), # > T33.2.1b > T33.4.1 warning: use of pc (r15)
('b 0xA300', [4, 0xEA0028C0], 1000), # T33.0.4b > T33.3.1 success: 'b' jump
('bl -1300', [4, 0xEBFFFEBB], 1000), # T33.0.4b > T33.3.1 success: 'bl' negative jump
('blt 073000000', [4, 0xBA3B0000], 1000), # > T33.3.1 success: 'blt' octal jump
('bleq 0x730000', [4, 0x0B1CC000], 1000), # > T33.3.1 success: 'bleq' hexa jump
('bhi 0xA30000', [4, 0x8A28C000], 1000), # > T33.3.1 success: 'b' jump
('blgt 0x1302', [4, 0xCB000000], -3307), # > T33.3.1 + override : misaligned address
('bllt 0x73000000', [4, 0xBB000000], -3308), # > T33.3.1 + override : out of range offset
('blal -73000000', [4, 0xEB000000], -3308), # > T33.3.1 + override : out of range negative offset
('bal -7300001', [4, 0xEA000000], -3307) # > T33.3.1 + override : misaligned negative address
]
am2_test = [('', [], -2401), # T24.0.0 error: missing addressing mode
(' ', [], -2401), # T24.0.1 > T24.0.0 error: idem with leading space
('2', [], -2402), # T24.0.3 error: missing '['
('[', [], -2403), # T24.0.2 > T24.1.0 error: missing info after '['
('[2', [], -2403), # T24.0.2 > T24.1.2 : unrecognizable register
('[r', [], -1303), # T24.0.2 > T24.1.1a + override : missing register number
('[ra', [], -1304), # T24.0.2 > T24.1.1a + override : wrong reg number
('[r16', [], -1304), # T24.0.2 > T24.1.1a + override : too high reg number
('[r12', [], -2404), # T24.0.2 > T24.1.1a error: good base reg, missing closure
('[r0 ', [], -2404), # T24.0.2 > T24.1.1a error: missing ',' after base reg
('[r1,', [0x01810000], -2405), # T24.0.2 > T24.1.1b > T24.2.0 : missing displacement
('[r2]!', [0x01820000], -2410), # T24.0.2 > T24.1.1c > T24.7.2 : unexpected text after ']'
('[r3, 3', [0x01830000], -2406), # > T24.1.1b > T24.2.1 > T24.2.6 : wrong displacement
('[r4, ra', [0x01840000], -1304), # > T24.2.1 > T24.2.5a + override: wrong reg number
('[r5, r1a', [0x01850000], -1304), # > T24.2.1 > T24.2.5a + override: wrong reg number
('[r6, +r1', [0x01860000], -2404), # > T24.2.1 > T24.2.2 > T24.3.1a : check positive reg displ.
('[r7, -r6', [0x01070000], -2404), # > T24.2.1 > T24.2.3 > T24.3.1a : check negative reg displ.
('[r8, -', [0x01080000], -2405), # > T24.2.3 > T24.3.0 : EOSeq after '-'
('[r8, -3.2', [0x01080000], -2406), # > T24.2.3 > T24.3.2 : wrong reg after '-'
('[r5, r10, ', [0x0385000A], -2407), # > T24.2.5b > T24.5.1 > T24.5.0 : missing shift mode
('[r7, r2, lsl', [0x03870002], -2408), # > T24.2.5b > T24.5.1 > T24.5.2a: missing space after shift
('[r8, r3, lsr ', [0x03880003], -2408), # > T24.5.2b > T24.6.0 : missing info after shift mode
('[r10, r5, ror r', [0x038A0005], -1702), # > T24.5.2b > T24.6.2 : idem
('[r1, r9, lsl # ', [0x03810009], -1704), # > T24.5.2b > T24.6.1a + override : unexpected space after '#'
('[r3, r11, asr #2', [0x0383000B], -2404), # > T24.5.2b > T24.6.1a : valid scaled reg, missing ']'
('[r8, #', [0x01880000], -2405), # > T24.2.1 > T24.2.4 > T24.4.0 : missing displacement
('[r4, # ', [0x01840000], -2406), # > T24.2.1 > T24.2.4 > T24.4.2 : unexpected space after '#'
('[r5, #\'f\'', [0x01850000], -2406), # > T24.2.1 > T24.2.4 > T24.4.2 : unrecognizable info after '#'
('[r6, #20', [0x01860000], -2404), # > T24.2.1 > T24.2.4 > T24.4.1a : base + imm. displ., missing ']'
('[r8, #-20', [0x01880000], -2404), # > T24.2.1 > T24.2.4 > T24.4.1a : idem for negative imm. displ.
('[r9,#0xC0000034]', [0x1890000], -2411), # > T24.4.1b + override : too long immediate displacement
('[r12, #0b1002000]', [0x018C0000], -1002), # + override : invalid binary digit
('[r13, #012000900005]', [0x018D0000], -1003), # + override : invalid octal digit
('[r14, #45d]', [0x018E0000], -1004), # + override : invalid decimal digit
('[r15, #0x4X5]', [0x018F0000], -1005), # + override : invalid hexa digit
('[ r6, #+0]', [0x01860000], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : success base + imm. displ.
('[r6, #20]', [0x01860014], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : success base + imm. displ.
('[r7, #+4095]', [0x01870FFF], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : maximum positive imm. displ.
('[r8, #-20]', [0x01080014], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : base + negative imm. displ.
('[r9, #-4095]', [0x01090FFF], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : minimum negative imm. displ.
('[r10]', [0x018A0000], 1000), # T24.0.2 > T24.1.1c > T24.7.0 : success base only
('[sp ]', [0x018D0000], 1000), # T24.0.2 > T24.1.1c > T24.7.0 : idem with trailing space
('[r9,r1]', [0x03890001], 1000), # > T24.1.1b > T24.2.5c > T24.7.0: success base + reg. displacement
('[ sp , lr ]', [0x038D000E], 1000), # > T24.1.1b > T24.2.5c > T24.7.0: idem with extra spaces
('[r1, +r6]', [0x03810006], 1000), # > T24.2.2 > T24.3.1c > T24.7.0 : check positive reg displ.
('[r6, -r7]', [0x03060007], 1000), # > T24.2.3 > T24.3.1c > T24.7.0 : check negative reg displ.
('[r5, r15]', [0x01850000], -2412), # > T24.2.5b + override : PC not allowed as Rm
('[r5, r10, ]', [0x0385000A], -2409), # > T24.2.5b > T24.5.1 > T24.5.3 : missing shift mode
('[r5, r10, lslx]', [0x0385000A], -2409), # > T24.2.5b > T24.5.1 > T24.5.3 : wrong shift mode
('[r7, +r2, lsl]', [0x03870002], -2409), # > T24.3.1b > T24.5.1 > T24.5.2c : missing space after shift
('[r8, -r3, lsr ]', [0x03080003], -2409), # > T24.3.1b > T24.6.2 : missing info after shift mode
('[r9, r4, asr x]', [0x03890004], -1702), # > T24.5.2b > T24.6.2 : wrong info after shift mode
('[r0, r8, ror #]', [0x03800008], -1703), # > T24.5.2b > T24.6.1a + override : missing value after '#'
('[r2, r10, lsr #f]', [0x0382000A], -1705), # > T24.5.2b > T24.6.1a + override : unrecogn. info after '#'
('[r4, r12, ror #-20]', [0x0384000C], -1706), # > T24.6.1b + override : negative number of shifts
('[r5, r13, lsl #040]', [0x0385000D], -1706), # > T24.6.1b + override : too high number of shifts
('[r5, r13, lsl #0]', [0x0385000D], 1000), # > T24.6.1b > T24.7.0 : true LSL #0
('[r6, lr, lsr #0x1C] ', [0x03860E2E], 1000), # > T24.6.1b > T24.7.1> T24.7.0: success with trailing space
('[r5, r13, lsl #00]', [0x0385000D], 1000), # > T24.6.1b > T24.7.0 : true LSL #0
('[r6, sp, lsr #0x0 ]', [0x0386000D], 1000), # > T24.6.1b > T24.7.0 : converting LSR #0 into LSL #0
('[r7,-r1,asr #0b10101]', [0x03070AC1], 1000), # : ASR bin imm, no space after ','
('[r7,+r1,asr #0b0]', [0x03870001], 1000), # : converting ASR #0 into LSL #0
('[r9, r12, ror #0x1F]', [0x03890FEC], 1000), # : success ROR with 31 shifts
('[r9, r12, ror #0x0]', [0x0389006C], 1000) # : coding ROR #0 as RRX
]
am3_test = [('', [], -2501), # T25.0.0 error: missing addressing mode
(' ', [], -2501), # T25.0.1 > T25.0.0 error: idem with leading space
('2', [], -2502), # T25.0.3 error: missing '['
('[', [], -2503), # T25.0.2 > T25.1.0 error: missing info after '['
('[2', [], -2503), # T25.0.2 > T25.1.2 : unrecognizable register
('[r', [], -1303), # T25.0.2 > T25.1.1a + override : missing register number
('[ra', [], -1304), # T25.0.2 > T25.1.1a + override : wrong reg number
('[r16', [], -1304), # T25.0.2 > T25.1.1a + override : too high reg number
('[r12', [], -2504), # T25.0.2 > T25.1.1a error: good base reg, missing closure
('[r0+', [], -1304), # T25.0.2 > T25.1.1a + override : missing ',' after base reg
('[r1,', [0x01C10000], -2505), # T25.0.2 > T25.1.1b > T25.2.0 : missing displacement
('[r2]!', [0x01C20000], -2510), # T25.0.2 > T25.1.1c > T25.7.2 : unexpected text after ']'
('[r3, 3', [0x01C30000], -2506), # > T25.1.1b > T25.2.1 > T25.2.6 : wrong displacement
('[r4, ra', [0x01C40000], -1304), # > T25.2.1 > T25.2.5a + override: wrong reg number
('[r5, r1a', [0x01C50000], -1304), # > T25.2.1 > T25.2.5a + override: wrong reg number
('[r6, +r1', [0x01C60000], -2504), # > T25.2.1 > T25.2.2 > T25.3.1a : check positive reg displ.
('[r7, -r6', [0x01470000], -2504), # > T25.2.1 > T25.2.3 > T25.3.1a : check negative reg displ.
('[r8, -', [0x01480000], -2505), # > T25.2.3 > T25.3.0 : EOSeq after '-'
('[r8, -3.2', [0x01480000], -2506), # > T25.2.3 > T25.3.2 : wrong reg after '-'
('[r5, r10, ', [0x01C50000], -2513), # > T25.2.5b : scaled reg. displ. not allowed
('[r7, r2, lsl', [0x01C70000], -2513), # > T24.2.5b : idem
('[r8, #', [0x01C80000], -2505), # > T25.2.1 > T25.2.4 > T25.4.0 : missing displacement
('[r4, # ', [0x01C40000], -2506), # > T25.2.1 > T25.2.4 > T25.4.2 : unexpected space after '#'
('[r5, #\'f\'', [0x01C50000], -2506), # > T25.2.1 > T25.2.4 > T25.4.2 : unrecognizable info after '#'
('[r6, #20', [0x01C60000], -2504), # > T25.2.1 > T25.2.4 > T25.4.1a : base + imm. displ., missing ']'
('[r9, #0x134]', [0x1C90000], -2511), # > T25.4.1b + override : too long immediate displacement
('[r12, #0b0001103]', [0x01CC0000], -1002), # + override : invalid binary digit
('[r13, #012009005]', [0x01CD0000], -1003), # + override : invalid octal digit
('[r14, #4+5]', [0x01CE0000], -1004), # + override : invalid decimal digit
('[r15, #0xX45]', [0x01CF0000], -1005), # + override : invalid hexa digit
('[ r6, #+0]', [0x01C60000], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : success base + imm. displ.
('[r6 ,#195]', [0x01C60C03], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : success base + imm. displ.
(' [r7, #+255]', [0x01C70F0F], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : maximum positive imm. displ.
('[r8, # -80]', [0x01480500], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : base + negative imm. displ.
('[r9, #-255 ]', [0x01490F0F], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : minimum negative imm. displ.
('[r9,# - 25]', [0x01490109], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : negative with white spaces
('[r9, # + 25]', [0x01C90109], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : positive with white spaces
('[r10]', [0x01CA0000], 1000), # T25.0.2 > T25.1.1c > T25.7.0 : success base only
('[sp ]', [0x01CD0000], 1000), # T25.0.2 > T25.1.1c > T25.7.0 : idem with trailing space
('[r9,r1]', [0x01890001], 1000), # > T25.1.1b > T25.2.5c > T25.7.0: success base + reg. displacement
('[ sp , lr ]', [0x018D000E], 1000), # > T25.1.1b > T25.2.5c > T25.7.0: idem with extra spaces
('[r1, +r6]', [0x01810006], 1000), # > T25.2.2 > T25.3.1c > T25.7.0 : check positive reg displ.
('[r1, + r6]', [0x01810006], 1000), # > T25.2.2 > T25.3.1c > T25.7.0 : idem with white space
('[r6, -r7]', [0x01060007], 1000), # > T25.2.3 > T25.3.1c > T25.7.0 : check negative reg displ.
('[r6,- r7] ', [0x01060007], 1000), # > T25.3.1c > T25.7.1 > T25.7.0 : idem with white space
('[r5, r15]', [0x01C50000], -2512), # > T25.2.5b + override : PC not allowed as Rm
('[r5, r10+]', [0x01C50000], -1304), # > T25.2.5b + override : wrong text after reg. number
('[r5, +r10,]', [0x01C50000], -2513) # > T25.2.2 > T25.3.1b : scaled reg. displ. not allowed
]
im2_test = [('', [], -3401), # T34.0.0 error: missing memory transfer inst.
(' ', [], -3401), # T34.0.1 > T34.0.0 error: idem with leading space
('2', [], -3402), # T34.0.3 error: missing 'ld' or 'st'
('ld', [4, 0xE0000000], -3402), # T34.0.2 > T34.1.0 error: missing inst. continuation
('st ', [4, 0xE0000000], -3402), # T34.0.2 > T34.1.4 error: missing inst. continuation
('str', [4, 0xE0000000], -3403), # T34.0.2 > T34.1.1 > T34.2.0 : missing space after inst.
('ldr ', [4, 0xE4100000], -3405), # > T34.1.1 > T34.2.1 > T34.5.0 : missing destination register
('sts', [4, 0xE0000000], -3408), # T34.0.2 > T34.1.2 + override : 's' not allowed for store inst.
('ldx', [4, 0xE0000000], -3402), # T34.0.2 > T34.1.4 : unrecognized mem. transfer inst.
('ldrb', [4, 0xE0000000], -3403), # > T34.1.1 > T34.2.2 > T34.3.0 : missing space after inst.
('strb ', [4, 0xE4400000], -3405), # > T34.2.2 > T34.3.1 > T34.5.0 : missing destination register
('ldrby', [4, 0xE0000000], -3404), # > T34.2.2 > T34.3.2 : wrong text after inst.
('ldrb e', [4, 0xE4500000], -1302), # > T34.3.1 > T34.5.1a + override: unknown reg
('str r', [4, 0xE4000000], -1303), # > T34.2.1 > T34.5.1a + override: missing reg number
('ldr rb', [4, 0xE4100000], -1304), # > T34.2.1 > T34.5.1a + override: wrong reg number
('ldrb r1', [4, 0xE4500000], -3406), # > T34.2.1 > T34.5.1a error: missing ',' after dest. reg
('strb r2,', [4, 0xE4402000], -3407), # > T34.5.1b > T34.6.0 error: missing info after dest. reg
('streq', [4, 0x00000000], -3403), # > T34.2.3 > T34.4.0 : missing space after inst.
('ldrne ', [4, 0x14100000], -3405), # > T34.2.3 > T34.4.1 > T34.5.0 : missing destination register
('strles', [4, 0xD0000000], -3408), # > T34.2.3 > T34.4.4 + override : 's' not allowed for store inst.
('ldrlox', [4, 0x30000000], -3404), # > T34.2.3 > T34.4.5 : unrecognized mem. transfer inst.
('ldrmib', [4, 0x40000000], -3403), # > T34.2.3 > T34.4.2 > T34.3.0 : missing space after inst.
('strmib ', [4, 0x44400000], -3405), # > T34.4.2 > T34.3.1 > T34.5.0 : missing destination register
('ldrhsbx', [4, 0x20000000], -3404), # > T34.4.2 > T34.3.2 : wrong text after inst.
('ldrhsb r2, 2', [4, 0x24502000], -2402), # > T34.6.1 > T34.6.3 + override : missing '['
('strvcb r3, [', [4, 0x74403000], -2403), # > T34.6.3 + override : missing info after '['
('ldrge r4, [2', [4, 0xA4104000], -2403), # > T34.6.3 + override : unrecognizable register
('strltb r5,[r', [4, 0xB4405000], -1303), # > T34.6.3 + override : missing register number
('ldrvc r6, [r16', [4, 0x74106000], -1304), # + override : too high reg number
('ldr lr, [r12', [4, 0xE410E000], -2404), # + override : good base reg, missing closure
('str sp, [r0 ', [4, 0xE400D000], -2404), # + override : missing ',' after base reg
('ldrb r15, [r1,', [4, 0xE450F000], -2405), # + override : missing displacement
('strb pc, [r2]!', [4, 0xE440F000], -2410), # + override : unexpected text after ']'
('ldrvsb r4,[r3, 3', [4, 0x64504000], -2406), # + override : wrong displacement
('strge r5, [r5, r1a', [4, 0xA4005000], -1304), # + override : wrong reg number
('ldrltb r6, [r5, r10, ', [4, 0xB4506000], -2407), # + override : missing shift mode
('strlsb r7, [r7, r2, lsl', [4, 0x94407000], -2408), # + override : missing space after shift
('strgt r9, [r8, r3, lsr ', [4, 0xC4009000], -2408), # + override : missing info after shift mode
('ldr r11, [r10, r5, ror r', [4, 0xE410B000], -1702), # + override : idem
('ldrb r12, [r1, r9, lsl # ', [4, 0xE450C000], -1704), # + override : unexpected space after '#'
('strb r13,[r9,#0xC0000034]', [4, 0xE440D000], -2411), # + override : too long immediate displacement
('ldr r0, [r12, #0b1002000]', [4, 0xE4100000], -1002), # + override : invalid binary digit
('strhi r1, [r13, #018000005]', [4, 0x84001000], -1003), # + override : invalid octal digit
('strlob r2, [r14, #5d4]', [4, 0x34402000], -1004), # + override : invalid decimal digit
('ldrplb r3, [r15, #0x4r]', [4, 0x54503000], -1005), # + override : invalid hexa digit
('ldrb r3, [r15, #0x400000000]', [4, 0xE4503000], -1006), # + override : too big number
('ldrcsb r4, [ r6, #+0]', [4, 0x25D64000], 1000), # > T34.6.3 : success base + imm. displ.
('ldr r5, [r6, #20]', [4, 0xE5965014], 1000), # : success base + imm. displ.
('str r6,[r7, #+4095]', [4, 0xE5876FFF], 1000), # : maximum positive imm. displ.
('ldreqb r7, [r8, #-20]', [4, 0x05587014], 1000), # : base + negative imm. displ.
('strccb r8, [r9, #-4095] ', [4, 0x35498FFF], 1000), # : minimum negative imm. displ.
('ldr r9, [r10]', [4, 0xE59A9000], 1000), # : success base only
('str r10,[r9,+r1]', [4, 0xE789A001], 1000), # : success base + reg. displacement
('str r10, [r5, r15]', [4, 0xE400A000], -2412), # + override : PC not allowed as Rm
('strb r11, [r0, r8, ror #]', [4, 0xE440B000], -1703), # + override : missing value after '#'
('ldrle r12, [r2, r10, lsr #f]', [4, 0xD410C000], -1705), # + override : unrecogn. info after '#'
('strmib r13, [r4, r12, ror #-20]', [4, 0x4440D000], -1706), # override : negative number of shifts
('ldrplb r14, [r5, r13, lsl #040]', [4, 0x5450E000], -1706), # override : too high number of shifts
('ldrvs r15,[r6, lr, lsr #0x1C] ', [4, 0x6796FE2E], 1000), # : success with trailing space
('str r0, [r5, r13, lsl #00]', [4, 0xE785000D], 1000), # : true LSL #0
('ldr r1, [r6, sp, lsr #0x0 ]', [4, 0xE796100D], 1000), # : converting LSR #0 into LSL #0
('str r2, [r7,-r1,asr #0b10101]', [4, 0xE7072AC1], 1000), # : ASR bin imm, no space after ','
('ldr r3 ,[r7,+r1,asr #0b0]', [4, 0xE7973001], 1000), # : converting ASR #0 into LSL #0
('ldrb r4,[r9, r12, ror #0x1F]', [4, 0xE7D94FEC], 1000), # : success ROR with 31 shifts
('strb r5, [r9, r12, ror #0x0]', [4, 0xE7C9506C], 1000) # : coding ROR #0 as RRX
]
im3_test = [('lds', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.2 > T34.8.0 error: wrong memory transfer inst.
('strz', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.1 > T34.2.6 error: wrong memory transfer inst.
('strs', [4, 0xE0000000], -3408), # > T34.1.1 > T34.2.5 + override : 's' not allowed for store inst.
('strh', [4, 0xE00000B0], -3403), # > T34.1.1 > T34.2.4 > T34.9.0 error: missing space after inst.
('ldrs', [4, 0xE0000000], -3404), # > T34.1.1 > T34.2.5 > T34.10.0 : wrong memory transfer inst.
('ldrh ', [4, 0xE01000B0], -3405), # > T34.2.4 > T34.9.1 > T34.11.0 : missing destination reg
('ldrsb', [4, 0xE01000D0], -3403), # > T34.2.5 > T34.10.1 > T34.9.0 : missing space after inst.
('ldrsh', [4, 0xE01000F0], -3403), # > T34.2.5 > T34.10.1 > T34.9.0 : missing space after inst.
('ldrsi', [4, 0xE0000000], -3404), # > T34.2.5 > T34.10.2 : missing space after inst.
('ldrsb ', [4, 0xE01000D0], -3405), # > T34.10.1 > T34.9.1 > T34.11.0: missing destination reg
('ldrsb e', [4, 0xE01000D0], -1302), # > T34.11.1a + override : wrong text after inst.
('ldrsbt', [4, 0xE01000D0], -3404), # > T34.10.1 > T34.9.2 : wrong memory transfer inst.
('ldsb', [4, 0xE01000D0], -3403), # > T34.8.2 > T34.9.0 : missing space after inst.
('ldsh ', [4, 0xE01000F0], -3405), # > T34.8.2 > T34.9.1 > T34.11.0 : missing destination reg
('ldsu ', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.2 > T34.8.3 : wrong memory transfer inst.
('strneh', [4, 0x100000B0], -3403), # > T34.2.3 > T34.4.3 > T34.9.0 : missing space after inst.
('ldscc', [4, 0x30000000], -3404), # > T34.1.2 > T34.8.1 > T34.10.0 : wrong memory transfer inst.
('ldreqs', [4, 0x00000000], -3404), # > T34.2.3 > T34.4.4 > T34.10.0 : wrong memory transfer inst.
('ldrlssb', [4, 0x901000D0], -3403), # > T34.4.4 > T34.10.1 > T34.9.0 : missing space after inst.
('ldshsb r2', [4, 0x201000D0], -3406), # > T34.9.1 > T34.11.1a error: missing ',' after destination reg
('ldrhsh r2,', [4, 0x201020B0], -3407), # > T34.11.1b > T34.12.0 : missing info after dest. reg
('strleh r10, r12', [4, 0xD000A0B0], -2502), # T34.11.1b > T34.12.1 + override : missing '['
('strlsh r10, [12', [4, 0x9000A0B0], -2503), # T34.11.1b > T34.12.1 + override : missing reg after '['
('strloh r8, [r12', [4, 0x300080B0], -2504), # T34.11.1b > T34.12.1 + override : missing closure
('streqh r9, [r1,', [4, 0x000090B0], -2505), # T34.11.1b > T34.12.1 + override : missing displacement
('ldsccb r1,[r2]!', [4, 0x301010D0], -2510), # T34.11.1b > T34.12.1 + override: unexpected text after ']'
('strh r2, [r3, 3', [4, 0xE00020B0], -2506), # + override : wrong displacement
('ldsvch r4, [r5, r1a', [4, 0x701040F0], -1304), # + override : wrong reg number
('ldrvssb r5, [r7, -r6', [4, 0x601050D0], -2504), # + override : check negative reg displ.
('strplh r9, [r5, r10, ', [4, 0x500090B0], -2513), # + override : scaled reg. displ. not allowed
('ldsmib r10, [r9, #0x134]', [4, 0x4010A0D0], -2511), # + override : too long immediate displacement
('ldrgtsb r11 , [ r6, #+0]', [4, 0xC1D6B0D0], 1000), # > T34.11.1b > T34.12.1 success: base + imm. displ.
('strh r12, [r6 ,#195]', [4, 0xE1C6CCB3], 1000), # : base + imm. displ.
('ldrlsh r3, [r10, #-180]', [4, 0x915A3BB4], 1000), # : base + negative imm. displ.
('ldsgeh r13, [r8, # -80]', [4, 0xA158D5F0], 1000), # : base + negative imm. displ.
('ldshsb r14,[r9, #-255 ]', [4, 0x2159EFDF], 1000), # : minimum negative imm. displ.
('strhih pc, [r10]', [4, 0x81CAF0B0], 1000), # : success base only
(' ldrgtsh lr, [ pc ]', [4, 0xC1DFE0F0], 1000), # : idem with trailing space
('ldsvsb r10,[r9,r1]', [4, 0x6199A0D1], 1000), # : success base + reg. displacement
('ldrlssh r0, [ sp , lr ]', [4, 0x919D00FE], 1000), # : idem with extra spaces
('strleh r1, [r6, -r7]', [4, 0xD10610B7], 1000), # : check negative reg displ.
('ldsb r9, [r5, r15]', [4, 0xE01090D0], -2512) # + override : PC not allowed as Rm
]
imm_test = [('ldm', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.3 > T34.13.0 error: wrong memory transfer inst.
('stmz', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.3 > T34.13.3 error: wrong memory transfer inst.
('ldmia', [4, 0xE8900000], -3403), # > T34.13.2 > T34.15.0 : missing space after inst.
('stmdb ', [4, 0xE9000000], -3405), # > T34.15.1 > T34.16.0 : missing destination reg
('ldmibe', [4, 0xE9900000], -3404), # > T34.13.2 > T34.15.2 : wrong memory transfer inst.
('ldmib e', [4, 0xE9900000], -1302), # > T34.16.1a + override : wrong register
('stmne', [4, 0x10000000], -3404), # > T34.13.1 > T34.14.0 : wrong memory transfer inst.
('ldmccda', [4, 0x38100000], -3403), # > T34.14.1 > T34.15.0 : missing space after inst.
('ldmccde', [4, 0x30000000], -3404), # > T34.14.2 error: missing space after inst.
('ldmeqia r', [4, 0x08900000], -1303), # > T34.16.1a + override : missing reg number
('ldmhsfd r2', [4, 0x28900000], -3406), # > T34.16.1a error: missing ',' after destination reg
('ldmhsfa r2,', [4, 0x28120000], -3407), # > T34.16.1b > T34.18.0 : missing info after dest. reg
('stmhiea r2!', [4, 0x89020000], -3406), # > T34.16.1c > T34.17.0 : missing ',' after destination reg
('stmhiea r2!,', [4, 0x89220000], -3407), # > T34.17.2 > T34.18.0 : missing info after dest. reg
('stmea r2!d', [4, 0xE9020000], -3404), # > T34.17.3 error: wrong text after '!'
('stmccib r3,1', [4, 0x39830000], -1502), # > T34.18.1 + override : missing '{'
('ldmmied r4!, {', [4, 0x49B40000], -1503), # + override : missing registers
('ldmplia r5, {1', [4, 0x58950000], -1302), # + override : unknown register identifier
('stmneda r6! , {r', [4, 0x18260000], -1303), # > T34.17.1 + override : missing register number
('stmia r7,{ra', [4, 0xE8870000], -1304), # + override : wrong reg number
('ldmfd r8, {r0', [4, 0xE8980000], -1503), # + override : unclosed single register
('stmed r9, {r14,}', [4, 0xE9890000], -1504), # + override : missing register after ','
('ldmfd r13!, {r4-}', [4, 0xE8BD0000], -1403), # + override : missing second reg in range list
('ldmfd r13!, {r14, }', [4, 0xE8BD0000], -1504), # + override : missing register after ', '
('ldmeqda r10!, {r0}', [4, 0x083A0001], 1000), # > T34.18.1 success: single register
('ldmalib r11 , {r0-r5}', [4, 0xE99B003F], 1000), # : single range
('stmccdb r12!, {pc, r1-r2, sp-r12, r5}', [4, 0x392CB026], 1000), # : several ranges, with spaces
('stmea r13!, {r14,r8}', [4, 0xE92D4100], 1000), # : no space after ','
('ldmfd r13!, { r9 , r13 }', [4, 0xE8BD2200], 1000) # : extra spaces
]
iil_test = [('str r0, =', [4, 0xE4000000], -3409), # > T34.6.2 + override : 'str' cannot use '=' loading
('ldrb r0,=', [4, 0xE4500000], -3409), # > T34.6.2 + override : neither 'ldrb'
('ldrh r0,=', [4, 0xE01000B0], -2502), # > T34.12.1 + override error: nor 'ldrh'
('ldr r0, =', [4, 0xE4100000], -3410), # > T34.6.2 > T34.7.0 error: missing number for immediate load
('ldr r0, = ', [4, 0xE4100000], -3410), # > T34.7.1 > T34.7.0 : idem with tranling space
('ldr r0, =t', [4, 0xE4100000], -3410), # > T34.7.1 > T34.7.3 : idem with tranling rubbish
('ldr r1, =0b00130', [4, 0xE4101000], -1002), # > T34.7.2 + override: invalid binary digit
('ldr r2, =00180', [4, 0xE4102000], -1003), # + override: invalid octal digit
('ldr r3, = -18a', [4, 0xE4103000], -1004), # + override: invalid decimal digit
('ldr r4, =0x10GA', [4, 0xE4104000], -1005), # + override: invalid hexa digit
('ldr r5, =0x100000000', [4, 0xE4105000], -1006), # + override: too big number
('ldr r6, =+0', [4, 0xE59F6FF8, 0], 1000), # > T34.7.2 success: set a relative pc loading
('ldrhi r7, = 00317652', [4, 0x859F7FF8, 0x19FAA], 1000), # : octal number
('ldrlt lr, =-1000', [4, 0xB59FEFF8, -1000], 1000), # : negative number
('ldr pc, = 0x8000', [4, 0xE59FFFF8, 0x8000], 1000) # : hexa number (load PC)
]
imi_test = [('', [], -3501), # T35.0.0 error: missing miscellanea instruction
(' ', [], -3501), # T35.0.1 > T35.0.0 : idem with space
('ldr', [], -3503), # T35.0.4 error: unrecognizable instruction
('push', [], -3502), # T35.0.2a error: missing operands
(' clz', [], -3502), # T35.0.1 > T35.0.3a error: idem with leading space
('pop ', [4, 0xE8BD0000], -3502), # > T35.0.2b > T35.2.0 : idem with a trailing space
('clz ', [4, 0xE1600010], -3502), # > T35.0.3b > T35.4.0 : idem for 'clz'
('clz 2', [4, 0xE1600010], -1302), # > T35.4.1a + override : unrecognizable register
('clz r', [4, 0xE1600010], -1303), # > T35.4.1a + override : missing register number
('clz r16', [4, 0xE1600010], -1304), # > T35.4.1a + override : too high reg number
('push 1', [4, 0xE92D0000], -1502), # > T35.2.1 + override : missing '{'
('pop {', [4, 0xE8BD0000], -1503), # + override : missing registers
('pushne {1', [4, 0x192D0000], -1302), # + override : unknown register identifier
('pophs {r', [4, 0x28BD0000], -1303), # + override : missing register number
('pushhi {ra', [4, 0x892D0000], -1304), # + override : wrong reg number
('poplo {r0', [4, 0x38BD0000], -1503), # + override : unclosed single register
('pushge {r14,}', [4, 0xA92D0000], -1504), # + override : missing register after ','
('popcc {r4-}', [4, 0x38BD0000], -1403), # + override : missing second reg in range list
('pushvs {r14, }', [4, 0x692D0000], -1504), # + override : missing register after ', '
('pusheq', [4, 0xE92D0000], -3502), # T35.0.2c > T35.1.1a error: missing operands
('popcce', [4, 0x38BD0000], -3504), # T35.0.2c > T35.1.1c error: wrong text after inst.
('popce', [4, 0xE8BD0000], -3504), # T35.0.2c > T35.1.2 error: wrong text after inst.
('pushle ', [4, 0xD92D0000], -3502), # > T35.1.1b > T35.2.0 error: missing operands
('clzh', [4, 0xE1600010], -3504), # T35.0.3c > T35.3.2 error: wrong text after inst.
('clzhi', [4, 0xE1600010], -3502), # T35.0.3c > T35.3.1a error: missing operands
('clzhi ', [4, 0x81600010], -3502), # > T35.3.1b > T35.4.0 err: missing operands
('clzhii', [4, 0x81600010], -3504), # T35.0.3c > T35.3.1c error: wrong text after inst.
('clzhs r15,', [4, 0x2160F010], -3502), # > T35.4.1b > T35.5.0 : missing operands
('clzhs r15 z,', [4, 0x21600010], -1304), # > T35.4.1a + override : wrong reg
('clzhs r15, ', [4, 0x2160F010], -3505), # > T35.4.1c > T35.5.2 : wrong info after Rd
('clzls r15,r6', [4, 0x9160F016], 1000), # > T35.4.1b > T35.5.1 : success 'clz' + cond
('pushls {r14}', [4, 0x992D4000], 1000), # > T35.1.1b > T35.2.1 : success 'push' + cond
('pop {r0, r4-r10, r14}', [4, 0xE8BD47F1], 1000) # > T35.2.1 : success 'pop'
]
data_arm = [('', [], -4001), # T40.0.0 error: missing initial hex address
('2', [], -4002), # T40.0.4 error: wrong initial address
('>', [], -4003), # T40.0.2a error: missing space after '>'
('>a', [], -4003), # T40.0.2c error: unexpected char after '>'
(' ', [], -4001), # T40.0.1 > T40.0.0 error: white leading space
('0x', [], -2002), # T40.0.3 + override : leading '0x', missing hex digits
(' 0x8001', [], -2003), # T40.0.1 > T40.0.3 + override : missing space after address
(' 0x8001 ', [0x8001], -4004), # T40.0.1 > T40.0.3 > T40.1.0 error: right address, missing info
('0x10002EF00 .byte 2', [], -2004), # T40.0.3 + override : long hex address (> 2^32)
('0x8000.f', [], -2003), # T40.0.3 + override : missing space after address
('0x8000 .f', [0x8000], -2104), # T40.0.3 > T40.1.1 + override : unknown data dir
('0x8024 .byte', [0x8024], -2102), # T40.0.3 > T40.1.1 + override : address & directive, missing val
('0x8000 .byte ', [0x8000], -2102), # T40.0.3 > T40.1.1 + override : missing data values
('0x8000 .byte2', [0x8000], -2103), # T40.0.3 > T40.1.1 + override : missing space after directive
('0x8024 .byte 23', [0x8024, [1, 23]], 1000), # T40.0.3 > T40.1.1 success: capture one byte
('> ', [0x8025], -4004), # T40.0.2b > T40.2.0 error: missing info after '>'
('> .byte 2', [0x8025, [1, 2]], 1000), # T40.0.2b > T40.2.1 success: .byte directive after '>'
('> .byte 3', [0x8026, [1, 3]], 1000), # T40.0.2b > T40.2.1 success: '>' after '>'
('> .byte 230', [0x8027, [1, 230]], 1000), # T40.0.2b > T40.2.1 success : '>' after .byte (1 value)
('0x802F .byte 23, 0xCB', [0x802F, [1, 23, 0xCB]], 1000), # T40.0.3 > T40.1.1 success: capture two bytes
('0x802F .byte \'e\' c', [0x802F], -2105), # T40.0.3 > T40.1.1 + override : wrong delimiter
('0x802F .byte \'e\', c', [0x802F], -2106), # T40.0.3 > T40.1.1 + override : unrecognizeable info
('0x802F .byte 2000', [0x802F], -2107), # T40.0.3 > T40.1.1 + override : data >= 2**8
('0x901B .hword 2300, 0xCB0', [0x901B, [2, 2300, 0xCB0]], 1000), # T40.0.2b > T40.1.1 / misaligned h
(' > .hword 230', [0x9020, [2, 230]], 1000), # T40.0.2b > T40.2.1 '>' after .hword (2 values)
('0x901A .hword 2300, 0xCB0', [0x901A, [2, 2300, 0xCB0]], 1000), # T40.0.3 > T40.1.1 / aligned h
(' > .hword 320', [0x901E, [2, 320]], 1000), # T40.0.2b > T40.2.1 '>' after .hword (h aligned)
('0xCbf8 .word 230000, 0xCB000', [0xCBF8, [4, 230000, 0xCB000]], 1000), # T40.0.3 > T40.1.1 / aligned w
('0xCbf9 .word 230000, 0xCB000', [0xCBF9, [4, 230000, 0xCB000]], 1000), # / misaligned w (1)
('0xCbfa .word 230000, 0xCB000', [0xCBFA, [4, 230000, 0xCB000]], 1000), # / misaligned w (2)
('0xCbfb .word 230000, 0xCB000', [0xCBFB, [4, 230000, 0xCB000]], 1000), # / misaligned w (3)
('> .word 010', [0xCC04, [4, 8]], 1000), # T40.0.2b > T40.2.1 '>' after .word (2 values)
('0xa03c .ascii \'2\'', [0xA03C, [1, 50]], 1000), # T40.0.3 > T40.1.1 success: .ascii directive
('> .word 0x010', [0xA040, [4, 16]], 1000), # T40.0.2b > T40.2.1 '>' after .ascii (1 value)
('0xa03b .asciz \'2\', \"0xCB\"', [0xA03B, [1, 50, 0, 48, 120, 67, 66, 0]], 1000), # / two strings
('> .word 0b010', [0xA044, [4, 2]], 1000), # T40.0.2b > T40.2.1 '>' after .asciz (7 values)
('0xa03c .ascii \' ', [0xA03C], -1104), # T40.0.3 > T40.1.1 + override : unclosed char
('0xa03c .ascii \" ', [0xA03C], -1204), # : unclosed string
('0xa03c .asciz \' ', [0xA03C], -1104), # : unclosed char
('0xa03c .asciz \" ', [0xA03C], -1204), # : unclosed string
('0xa03c .ascii \'\'', [0xA03C], -1102), # : empty char
('0xa03c .ascii \"\"', [0xA03C], -1202), # : empty string
('0xa03c .asciz \'\'', [0xA03C], -1102), # : empty char
('0xa03c .asciz \"\"', [0xA03C], -1202), # : empty string
('0xc30a .ascii \'\t\'', [0xC30A], -1103), # : illegal character ''
('0xc30a .asciz \'\t\'', [0xC30A], -1103), # : idem after .ascii
('0xc30a .ascii \"\t\"', [0xC30A], -1203), # : illegal character ""
('0xc30a .asciz \" \t\"', [0xC30A], -1203), # : idem after valid char
('0x3000 .ascii \' t\'', [0x3000], -1105), # : more than one character
('0x3000 .asciz \' t\'', [0x3000], -1105), # : idem after .ascii
('0x1000 .byte 0b012', [0x1000], -1002), # : unexpected binary digit
('0x2000 .hword 0408', [0x2000], -1003), # : unexpected octal digit
('0x2000 .hword 4oo8', [0x2000], -1004), # : unexpected decimal digit
('0x2000 .hword 408000', [0x2000], -2107), # : out of range dec. number
('0x2000 .hword -48000', [0x2000], -2107), # : out of range neg. number
('0x4000 .word 0x40x', [0x4000], -1005), # : unexpected hexa digit
('0x4000 .word 0x400000000', [0x4000], -1006), # : too long num. (>2^32 bits)
('0x4000 .word 0x4, 0x', [0x4000], -1005), # : unexpected hexa digit
('0xfffffffc .ascii \'0\'', [0xFFFFFFFC, [1, 48]], 1000), # almost in the address space limit
('> .word 0b1', [0x100000000, [4, 1]], -4006), # T40.0.2b > T40.2.1 '>' after .asciz (7 values)
]
idat_arm = [('0x8000 2', [0x8000], -4005), # T40.0.3 > T40.1.7 error: unrecognizable instruction
('0x8004 and', [0x8004], -3102), # T40.0.3 > T40.1.2 + override : missing operands after instr.
('0x8008 eor ', [0x8008], -3102), # T40.0.3 > T40.1.2 + override : missing operands after instr.
('0x800C sub 20,', [0x800C], -1302), # : unrecognizable operand with ','
('0x8010 rsb r', [0x8010], -1303), # : missing register number
('0x8014 add r65', [0x8014], -1304), # : too high reg number
('0x8018 adc r12', [0x8018], -2302), # : good dest reg, missing other ops
('0x801C sbc ', [0x801C], -2303), # : missing dest reg
('0x8020 rsc r1,', [0x8020], -2304), # : missing source operands
('0x8024 orr r2, ', [0x8024], -2306), # : missing source operands
('0x8028 bic r3, gu', [0x8028], -2306), # : wrong source op 1
('0x802C and r12, r3, e3', [0x802C], -2308), # : wrong op 2
('0x8030 eor r3, #', [0x8030], -1603), # : missing value after '#'
('0x8034 sub r4, # ', [0x8034], -1604), # : unexpected space after '#'
('0x8038 rsb r5, #f', [0x8038], -1605), # : unrecognizable info after '#'
('0x803C add r10, #0x1002', [0x803C], -1606), # : impossible fixup for odd rotations
('0x8040 adc r11, #\'c\' 5', [0x8040], -1607), # : unexpected text after imm val.
('0x8044 sbc r10, r1,', [0x8044], -2204), # : missing shift register
('0x8048 rsc r7, r2, lsl', [0x8048], -2205), # : missing space after shift mode
('0x804C orr r9, r4, asr x', [0x804C], -2207), # : wrong info after shift mode
('0x8050 bic r0, r8, ror #', [0x8050], -1703), # : missing value after '#'
('0x8054 and r1, r9, lsl # ', [0x8054], -1704), # : unexpected space after '#'
('0x8058 eor r2, r10, lsr #f3', [0x8058], -1705), # : unrecognizable info after '#'
('0x805C sub r4, r12, ror #-2', [0x805C], -1706), # : negative number of shifts
('0x8060 orrs', [0x8060], -3102), # : missing data instruction operands
('0x8064 teqslo', [0x8064], -3105), # : wrong text after instruction
('0x8068 cmnlyy', [0x8068], -3104), # : unknown instruction condition
('0x8068 cmnls r0, #90', [0x8068, [4, 0x9370005A]], 1000), # T40.0.3 > T40.1.2 success: 1 reg, 1 imm.
('> rsbals r6, r11, #256', [0x806C, [4, 0xE27B6C01]], 1000), # T40.0.2b > T40.2.2 success: 2 regs, 1 imm.
('> addgt r12, r12, lsl r12', [0x8070, [4, 0xC08CCC1C]], 1000), # T40.0.2b > T40.2.2 : LSL reg
('0x8080 adcs r1, r2, lsr r0 ', [0x8080, [4, 0xE0B11032]], 1000), # T40.0.3 > T40.1.2 : LSR reg with space
('> rscles pc, lr, lsr #0x1F ', [0x8084, [4, 0xD0FFFFAE]], 1000), # 40.0.2b > T40.2.2 : LSR imm with space
('0x8088 bicmis r10, r11, r12, lsl r12', [0x8088, [4, 0x41DBAC1C]], 1000), # : three regs, shift reg
('0x8088 bicmis r0, r1, r2, lsl #0', [0x8088, [4, 0x41D10002]], 1000), # : three regs, LSL #0
('0x8088 bicmis r0, r1, r2, ror #0', [0x8088, [4, 0x41D10062]], 1000), # : three regs, ROR #0 -> RRX
('> tst r7,r1, #01010', [0x808C], -2310), # > T40.2.2 + override : 3 ops with 'tst'
('> movvc r1,r9, #0xC000', [0x808C], -2311), # > T40.2.2 + override : 3 ops with 'mov'
('> tst r7, #01010', [0x808C, [4, 0xE3170F82]], 1000), # T40.0.2b > T40.2.2 : 'tst' + reg + imm
('> teqlts r7,r8,lsl #12', [0x8090, [4, 0xB1370608]], 1000), # T40.0.2b > T40.2.2 : 'teq'+reg+shifted reg
('> mov r2, #-100', [0x8094, [4, 0xE3E02063]], 1000), # T40.0.2b > T40.2.2 : 'mov' + reg + NOT imm
('> and r4, #-250', [0x8098, [4, 0xE3C440F9]], 1000), # T40.0.2b > T40.2.2 : 'and' + reg + NOT imm
('> add r6, #-3120', [0x809C, [4, 0xE2466EC3]], 1000), # T40.0.2b > T40.2.2 : 'add' + reg + NOT imm
('0xA0008 cmp r8, #-1004', [0xA0008, [4, 0xE3780FFB]], 1000), # T40.0.3 > T40.1.2 : 'cmp' + reg + NOT imm
('> .byte -1', [0xA000C, [1, 255]], 1000), # T40.0.2b > T40.2.1 : automatic inc. +1
('> bics r5, #-255', [0xA0010, [4, 0xE21550FE]], 1000), # T40.0.2b > T40.2.2 : adjust adr. 3 bytes
('> .hword -2', [0xA0014, [2, 65534]], 1000), # T40.0.2b > T40.2.1 : automatic inc. +2
('> movvss r9,#0xC0000', [0xA0018, [4, 0x63B09703]], 1000), # T40.0.2b > T40.2.2 : adjust adr. 2 bytes
(' > .byte -1, -2, -3', [0xA001C, [1, 255, 254, 253]], 1000), # T40.0.2b > T40.2.1 : automatic inc. +3
(' > cmnne r5, #-256', [0xA0020, [4, 0x13550C01]], 1000), # T40.0.2b > T40.2.2 : adjust adr. 1 byte
('> r5, #-256', [0xA0024], -4005), # T40.0.2b > T40.2.7 : unrecognized inst.
('0xA0025 cmp r9, #1004', [0xA0025, [4, 0xE3590FFB]], 1000), # warning : address missaligned 1 byte
('0xA0026 cmp r10, #1008', [0xA0026, [4, 0xE35A0E3F]], 1000), # warning : address missaligned 1 byte
(' 0xA0027 cmp r11, #1012', [0xA0027, [4, 0xE35B0FFD]], 1000), # warning : address missaligned 1 byte
('0x8068 .word -4', [0x8068, [4, 4294967292]], 1000) # final test: set auto-address as before the first
# test in this series that makes use of '>'
]
imul_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> ', [0x8000], -4005), # T40.0.2b > T40.1.7 error: unrecognizable instruction
('> 2', [0x8000], -4005), # T40.0.2b > T40.1.7 error: unrecognizable instruction
('> mul', [0x8000], -3202), # T40.0.2b > T40.2.3 + override : missing operands after instr.
('> mla ', [0x8000], -3202), # T40.0.2b > T40.2.3 + override : missing operands after instr.
('> umull 2', [0x8000], -1302), # : wrong register
('> umull 2,', [0x8000], -1302), # : wrong register with ','
('> umull r', [0x8000], -1303), # : missing register number
('> smull r65', [0x8000], -1304), # : too high reg number
('> umlal r12', [0x8000], -3202), # : missing other regs
('> mul ', [0x8000], -1301), # : missing other regs
('0x90FC mul r1,', [0x90FC], -3202), # : missing source operands
('> mla r2, ', [0x8000], -1301), # : missing source operands
('> smlal r3, gu', [0x8000], -1302), # : wrong reg2
('> umlal r12, r3, e3', [0x8000], -1302), # : wrong reg3
('> mul r3, r4, r5, r6', [0x8000], -3207), # : four registers with 'mul'
('> smlal r3, r4, r5, ', [0x8000], -1301), # : missing reg4
('> mla r3, r4, r5', [0x8000], -3202), # : three regs with 'mla'
('> mul r1, r10, r8', [0x8000, [4, 0xE001089A]], 1000), # success: three regs with 'mul'
('0xA000 mla r13, r14, r0, r0', [0xA000, [4, 0xE02D009E]], 1000), # success: four regs with 'mla'
('> umull sp, lr, r12, r13', [0xA004, [4, 0xE08EDD9C]], 1000), # success: four regs with 'umull'
('> mul r10, pc, r7', [0xA008], -3208), # + override: use of PC as Rm
('> smulllex r10, r11, lr, r10', [0xA008], -3205), # + override: error after cond
('> mulz', [0xA008], -3204) # + override: wrong text after
]
ijmp_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> blo', [0x8000], -3302), # T40.0.2b > T40.2.4 + override: missing offset
('0x9004 bleq ', [0x9004], -3302), # T40.0.3 > T40.1.4 + override : missing offset
('> blox', [0x8000], -4005), # T40.0.2b > T40.2.4 + override: unexpected text after inst
('0xA0000 bx', [0xA0000], -3304), # T40.0.3 > T40.1.4 + override : missing reg after instr.
('> blxo', [0x8000], -4005), # T40.0.2b > T40.2.4 + override: unexpected text after inst
('0x10 blt f', [0x10], -3305), # T40.0.3 > T40.1.4 + override : wrong offset
('> bls 0b12', [0x8000], -1002), # T40.0.3 > T40.1.4 + override : unexpected binary digit
('> blls 0192', [0x8000], -1003), # : unexpected octal digit
('> bllo -192a', [0x8000], -1004), # : unexpected decimal digit
('> blvc 0xA3G0', [0x8000], -1005), # : unexpected hexa digit
('> bvc 0xA30000000', [0x8000], -1006), # : too long hex address
('> bxvc 0xA300', [0x8000], -1302), # : unrecognized reg
('> blxcc r', [0x8000], -1303), # : missing reg number
('> bxcc rf', [0x8000], -1304), # : wrong reg number
('> bxmi r16', [0x8000], -1304), # : wrong reg number
('> blgt 0x1302', [0x8000], -3307), # : misaligned address
('> bllt 0x73000000', [0x8000], -3308), # : out of range offset
('> blal -73000000', [0x8000], -3308), # : out of range neg. offset
('> bal -7300001', [0x8000], -3307), # : misaligned negative address
('> bx r6 ', [0x8000, [4, 0xE12FFF16]], 1000), # T40.0.2b > T40.2.4 success: 'bx' jump
('> blxpl r6', [0x8004, [4, 0x512FFF36]], 1000), # : 'blx' jump
('0x7A0C blxlt r15', [0x7A0C, [4, 0xB12FFF3F]], 1000), # > T40.1.4 warning: use of pc (r15)
('> b 0xA300', [0x7A10, [4, 0xEA000A3A]], 1000), # > T40.2.4 success: 'b' jump
('0xFFF8 bl 1300', [0xFFF8, [4, 0xEBFFC145]], 1000), # > T40.1.4 success: 'bl' negative jump
('> blt 073000000', [0xFFFC, [4, 0xBA3ABFFF]], 1000), # > T40.2.4 success: 'blt' octal jump
('> bleq 0x730000', [0x10000, [4, 0x0B1C7FFE]], 1000), # > T40.2.4 success: 'bleq' hexa jump
('0x7FF8 bhi 0xA30000', [0x7FF8, [4, 0x8A28A000]], 1000), # > T40.1.4 success: 'bhi' jump
('> bge 0x2008000', [0x7FFC, [4, 0xAA7FFFFF]], 1000), # : forward jump limit
('0x2000000 blhs 0x8', [0x2000000, [4, 0x2B800000]], 1000), # : backward jump limit
('0x400000 blhs 0xC', [0x400000, [4, 0x2BF00001]], 1000), # : another backward jump
('0x4000 blhi 0x4000', [0x4000, [4, 0x8BFFFFFE]], 1000), # : jump onto same address
('0x4000 blhi 0x4008', [0x4000, [4, 0x8B000000]], 1000), # : jump onto advanced pc
('0x4001 blhi 0x4008', [0x4001, [4, 0x8BFFFFFF]], 1000) # : jump from misaligned adr.
]
imem_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> ld', [0x8000], -4005), # T40.0.2b > T40.2.5 + override: missing inst. continuation
('> st ', [0x8000], -4005), # + override: missing inst. continuation
('> str', [0x8000], -3403), # + override: missing space after inst.
('> ldr ', [0x8000], -3405), # + override: missing destination register
('> sts', [0x8000], -3408), # + override: 's' not allowed for store inst.
('> ldx', [0x8000], -4005), # + override: unrecognized mem. transfer inst.
('> ldrby', [0x8000], -3404), # + override: wrong text after inst.
('> ldrb e', [0x8000], -1302), # + override: unknown reg
('> str r', [0x8000], -1303), # + override: missing reg number
('> ldr rb', [0x8000], -1304), # + override: wrong reg number
('> ldrb r1', [0x8000], -3406), # + override: missing ',' after dest. reg
('> strb r2,', [0x8000], -3407), # + override: missing info after dest. reg
('> ldrhsb r2, 2', [0x8000], -2402), # + override: missing '['
('> strvcb r3, [', [0x8000], -2403), # + override: missing info after '['
('> ldrge r4, [2', [0x8000], -2403), # + override: unrecognizable register
('> strltb r5,[r', [0x8000], -1303), # + override: missing register number
('> ldrvc r6, [r16', [0x8000], -1304), # + override: too high reg number
('> ldr lr, [r12', [0x8000], -2404), # + override: good base reg, missing closure
('> ldrb r15, [r1,', [0x8000], -2405), # + override: missing displacement
('> strb pc, [r2]!', [0x8000], -2410), # + override: unexpected text after ']'
('> ldrvsb r4,[r3, 3', [0x8000], -2406), # + override: wrong displacement
('> ldrltb r6, [r5, r10, ', [0x8000], -2407), # + override: missing shift mode
('> strlsb r7, [r7, r2, lsl', [0x8000], -2408), # + override: missing space after shift
('> ldr r11, [r10, r5, ror r', [0x8000], -1702), # + override: missing info after shift mode
('> ldrb r12, [r1, r9, lsl # ', [0x8000], -1704), # + override: unexpected space after '#'
('> strb r13,[r9,#0xC0000034]', [0x8000], -2411), # + override: too long immediate displacement
('> ldr r0, [r12, #0b1002000]', [0x8000], -1002), # + override: invalid binary digit
('> strhi r1, [r13, #018000005]', [0x8000], -1003), # + override: invalid octal digit
('> strlob r2, [r14, #5d4]', [0x8000], -1004), # + override: invalid decimal digit
('> ldrplb r3, [r15, #0x4r]', [0x8000], -1005), # + override: invalid hexa digit
('> ldrb r3, [r15, #0x400000000]', [0x8000], -1006), # + override: too big number
('> ldrcsb r4, [ r6, #+0]', [0x8000, [4, 0x25D64000]], 1000), # success: base + imm. displ.
('> ldr r5, [r6, #20]', [0x8004, [4, 0xE5965014]], 1000), # success: base + imm. displ.
('> str r6,[r7, #+4095]', [0x8008, [4, 0xE5876FFF]], 1000), # success: maximum positive imm. displ.
('> ldreqb r7, [r8, #-20]', [0x800C, [4, 0x05587014]], 1000), # success: base + negative imm. displ.
('> strccb r8, [r9, #-4095] ', [0x8010, [4, 0x35498FFF]], 1000), # : minimum negative imm. displ.
('> ldr r9, [r10]', [0x8014, [4, 0xE59A9000]], 1000), # : base only
('> str r10,[r9,+r1]', [0x8018, [4, 0xE789A001]], 1000), # : base + reg. displacement
('> str r10, [r5, r15]', [0x801C], -2412), # + override: PC not allowed as Rm
('> strb r11, [r0, r8, ror #]', [0x801C], -1703), # + override: missing value after '#'
('> ldrle r12, [r2, r10, lsr #f]', [0x801C], -1705), # + override: unrecogn. info after '#'
('> strmib r13, [r4, r12, ror #-20]', [0x801C], -1706), # + override: negative number of shifts
('> ldrplb r14, [r5, r13, lsl #040]', [0x801C], -1706), # + override: too high number of shifts
('> ldrvs r15,[r6, lr, lsr #0x1C] ', [0x801C, [4, 0x6796FE2E]], 1000), # success: with trailing space
('> str r0, [r5, r13, lsl #00]', [0x8020, [4, 0xE785000D]], 1000), # success: true LSL #0
('0x904A ldr r1, [r6, sp, lsr #0x0 ]', [0x904A, [4, 0xE796100D]], 1000), # : converting LSR #0 into LSL #0
('> str r2, [r7,-r1,asr #0b10101]', [0x9050, [4, 0xE7072AC1]], 1000), # : ASR bin imm, no space after ','
('0x8090 ldr r3 ,[r7,+r1,asr #0b0]', [0x8090, [4, 0xE7973001]], 1000), # : converting ASR #0 into LSL #0
('> ldrb r4,[r9, r12, ror #0x1F]', [0x8094, [4, 0xE7D94FEC]], 1000), # : success ROR with 31 shifts
('> strb r5, [r9, r12, ror #0x0]', [0x8098, [4, 0xE7C9506C]], 1000), # : coding ROR #0 as RRX
('> lds', [0x809C], -3404), # + override: wrong memory transfer inst.
('> strz', [0x809C], -3404), # + override: wrong memory transfer inst.
('> strs', [0x809C], -3408), # + override: 's' not allowed for store inst.
('> ldrsb e', [0x809C], -1302), # + override: wrong text after inst.
('> strleh r10, r12', [0x809C], -2502), # + override: missing '['
('> strlsh r10, [12', [0x809C], -2503), # + override: missing reg after '['
('> strloh r8, [r12', [0x809C], -2504), # + override: missing closure
('> streqh r9, [r1,', [0x809C], -2505), # + override: missing displacement
('> ldsccb r1,[r2]!', [0x809C], -2510), # + override: unexpected text after ']'
('> strh r2, [r3, 3', [0x809C], -2506), # + override: wrong displacement
('> strplh r9, [r5, r10, ', [0x809C], -2513), # + override: scaled reg. displ. not allowed
('> ldsmib r10, [r9, #0x134]', [0x809C], -2511), # + override: too long immediate displacement
('> ldsb r9, [r5, r15]', [0x809C], -2512), # + override: PC not allowed as Rm
('> ldrgtsb r11 , [ r6, #+0]', [0x809C, [4, 0xC1D6B0D0]], 1000), # success: base + imm. displ.
('0x20030 strh r12, [r6 ,#195]', [0x20030, [4, 0xE1C6CCB3]], 1000), # success: base + imm. displ.
('0x2000 ldrlsh r3, [r10, #-180]', [0x2000, [4, 0x915A3BB4]], 1000), # : base + negative imm. displ.
('> stmz', [0x2004], -3404), # + override: wrong memory transfer inst.
('> ldmia', [0x2004], -3403), # + override: missing space after inst.
('> stmdb ', [0x2004], -3405), # + override: missing destination reg
('> ldmhsfd r2', [0x2004], -3406), # + override: missing ',' after destination reg
('> ldmhsfa r2,', [0x2004], -3407), # + override: missing info after dest. reg
('> stmccib r3,1', [0x2004], -1502), # + override: missing '{'
('> ldmmied r4!, {', [0x2004], -1503), # + override: missing registers
('> stmed r9, {r14,}', [0x2004], -1504), # + override: missing register after ','
('> ldmfd r13!, {r4-}', [0x2004], -1403), # + override: missing second reg in range list
('0x70FC ldmalib r11 , {r0-r5}', [0x70FC, [4, 0xE99B003F]], 1000), # success: single range
('> stmccdb r12!, {pc, r1-r2, sp-r12, r5}', [0x7100, [4, 0x392CB026]], 1000), # : several ranges, with spcs
('> str r0, =', [0x7104], -3409), # + override: 'str' cannot use '=' loading
('> ldrh r0,=', [0x7104], -2502), # + override: nor 'ldrh'
('> ldr r0, =t', [0x7104], -3410), # + override: idem with tranling rubbish
('> ldr r5, =0x100000000', [0x7104], -1006), # + override: too big number
('> ldr r6, =+0', [0x8104, [4, 0], 0x7104, [4, 0xE59F6FF8]], 1000), # success: set a relative pc loading
('> ldrhi r7, = 00317652', [0x8108, [4, 0x19FAA], 0x7108, [4, 0x859F7FF8]], 1000), # : octal number
('0x801C ldrlt lr, =-1000', [0x901C, [4, 0xFFFFFC18], 0x801C, [4, 0xB59FEFF8]], 1000), # : negative number
('> ldr pc, = 0x8000', [0x9020, [4, 0x8000], 0x8020, [4, 0xE59FFFF8]], 1000), # : hexa num. (load PC)
('0x801A ldrgt lr, =0x1FF80', [0x901A, [4, 0x1FF80], 0x801A, [4, 0xC59FEFF8]], 1000), # : explicit misalign
('> ldr sp , =0x80000', [0x9020, [4, 0x80000], 0x8020, [4, 0xE59FDFF8]], 1000), # : implicit misalign
('0xfffffffc .ascii \'1\'', [0xFFFFFFFC, [1, 49]], 1000), # almost in the address space limit
('> ldr r0, =8', [0x100001000, [4, 8], 0x100000000, [4, 0xE59F0FF8]], -4006), # crossing addr. space limit
('0xffffeffc .ascii \'2\'', [0xFFFFEFFC, [1, 50]], 1000), # almost in the address space limit
('> ldr r2,=-8', [0x100000000, [4, 0xFFFFFFF8], 0xFFFFF000, [4, 0xE59F2FF8]], -4006) # crossing addr. limit
]
imsc_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> push', [0x8000], -3502), # T40.0.2b > T40.2.6 + override : missing operands
('0x8000 clz 2', [0x8000], -1302), # T40.0.3 > T40.1.6 + override : unrecognizable register
('> clz r', [0x8000], -1303), # + override : missing register number
('> clz r16', [0x8000], -1304), # + override : too high reg number
('> push 1', [0x8000], -1502), # + override : missing '{'
('> pop {', [0x8000], -1503), # + override : missing registers
('> pushge {r14,}', [0x8000], -1504), # + override : missing register after ','
('> popcc {r4-}', [0x8000], -1403), # + override : missing second reg in range list
('0x9004 popcce', [0x9004], -3504), # + override : wrong text after inst.
('> clzhs r15, ', [0x8000], -3505), # + override : wrong info after Rd
('> clzls r15,r6', [0x8000, [4, 0x9160F016]], 1000), # success : 'clz' + cond
('0xA00 pushls {r14}', [0xA00, [4, 0x992D4000]], 1000), # success : 'push' + cond
('> pop {r0, r4-r10, r14}', [0xA04, [4, 0xE8BD47F1]], 1000) # success : 'pop'
]
test_groups = [(number_analyzer, hex_test, 'hexadecimal numbers'),
(number_analyzer, dec_test, 'decimal numbers'),
(number_analyzer, oct_test, 'octal numbers'),
(number_analyzer, bin_test, 'binary numbers'),
(char_analyzer, chr_test, 'single quoted chars'),
(string_analyzer, str_test, 'double quoted strings'),
(data_analyzer, dat_test, 'data directives'),
(address_analyzer, adr_test, 'hex addresses'),
(register_analyzer, reg_test, 'register identifiers'),
(regbit_analyzer, rbt_test, 'registers bit mask'),
(reglst_analyzer, rlt_test, 'registers list mask'),
(immediate_op_analyzer, imo_test, 'immediate operand'),
(immediate_sr_analyzer, ims_test, 'immediate shift register'),
(op2_analyzer, op2_test, 'second operand'),
(opdat_analyzer, opd_test, 'data instruction operands'),
(instdat_analyzer, idt_test, 'data instructions'),
(instmul_analyzer, iml_test, 'multiplication instructions'),
(instjmp_analyzer, ibr_test, 'branch instructions'),
(opldst2_analyzer, am2_test, 'addressing mode 2'),
(opldst3_analyzer, am3_test, 'addressing mode 3'),
(instmem_analyzer, im2_test, 'memory transfer instructions, addressing mode 2'),
(instmem_analyzer, im3_test, 'memory transfer instructions, addressing mode 3'),
(instmem_analyzer, imm_test, 'memory transfer instructions, multiple registers'),
(instmem_analyzer, iil_test, 'memory transfer instructions, immediate load'),
(instmsc_analyzer, imi_test, 'miscellanea instructions'),
(arm_analyzer, data_arm, 'arm data directives'),
(arm_analyzer, idat_arm, 'arm data instructions'),
(arm_analyzer, imul_arm, 'arm multiplication instructions'),
(arm_analyzer, ijmp_arm, 'arm branch instructions'),
(arm_analyzer, imem_arm, 'arm memory transfer instructions'),
(arm_analyzer, imsc_arm, 'arm miscellanea instructions')
]
| 92.899927
| 120
| 0.498529
|
from num_analyzer import NumberAnalyzer
from string_analyzer import CharAnalyzer
from string_analyzer import StringAnalyzer
from data_analyzer import DataAnalyzer
from adr_analyzer import AddressAnalyzer
from reg_analyzer import RegisterAnalyzer
from reg_analyzer import RegisterBitsAnalyzer
from reg_analyzer import RegisterListAnalyzer
from imm_analyzer import ImmediateOpAnalyzer
from imm_analyzer import ImmediateRSAnalyzer
from op2_analyzer import Op2Analyzer
from opdat_analyzer import OpdatAnalyzer
from instdat_analyzer import InstdatAnalyzer
from instmul_analyzer import InstmulAnalyzer
from instjmp_analyzer import InstjmpAnalyzer
from opldst_analyzer import Opldst2Analyzer
from opldst_analyzer import Opldst3Analyzer
from instmem_analyzer import InstmemAnalyzer
from instmsc_analyzer import InstmscAnalyzer
from arm_analyzer import ArmAnalyzer
number_analyzer = NumberAnalyzer()
char_analyzer = CharAnalyzer()
string_analyzer = StringAnalyzer()
data_analyzer = DataAnalyzer()
address_analyzer = AddressAnalyzer()
register_analyzer = RegisterAnalyzer()
regbit_analyzer = RegisterBitsAnalyzer()
reglst_analyzer = RegisterListAnalyzer()
immediate_op_analyzer = ImmediateOpAnalyzer()
immediate_sr_analyzer = ImmediateRSAnalyzer()
op2_analyzer = Op2Analyzer()
opdat_analyzer = OpdatAnalyzer()
instdat_analyzer = InstdatAnalyzer()
instmul_analyzer = InstmulAnalyzer()
instjmp_analyzer = InstjmpAnalyzer()
opldst2_analyzer = Opldst2Analyzer()
opldst3_analyzer = Opldst3Analyzer()
instmem_analyzer = InstmemAnalyzer()
instmsc_analyzer = InstmscAnalyzer()
arm_analyzer = ArmAnalyzer()
hex_test = [('', [], -1001),
(' ', [], -1001),
('0x', [], -1005),
(' 0x', [], -1005),
('0x1', [1], 1000),
(' 0x1', [1], 1000),
(' 0xA', [10], 1000),
('0x01', [1], 1000),
(' 0x001', [1], 1000),
('0x10', [16], 1000),
('0x2864', [10340], 1000),
('0xF3AE', [62382], 1000),
('0xb14a', [45386], 1000),
('0xb14A', [45386], 1000),
('0xR124', [], -1005),
('0x51V4', [], -1005),
('0x514W', [], -1005),
('0x10002EF0', [268447472], 1000),
('0x10002EF00', [], -1006)
]
dec_test = [('0', [0], 1000),
(' 0', [0], 1000),
('1', [1], 1000),
(' 1', [1], 1000),
('-1', [-1], 1000),
(' -1', [-1], 1000),
('10', [10], 1000),
('2864', [2864], 1000),
('-2864', [-2864], 1000),
('+2864', [2864], 1000),
('r12', [], -1001),
('5V6', [], -1004),
('514W', [], -1004),
('-', [], -1004),
('+', [], -1004),
('-r12', [], -1004),
('+r12', [], -1004),
('-5V6', [], -1004),
('4684474720', [], -1006),
('-2147483649', [], -1006)
]
oct_test = [('000', [0], 1000),
(' 00', [0], 1000),
('01', [1], 1000),
(' 01', [1], 1000),
('001', [1], 1000),
('010', [8], 1000),
('02764', [1524], 1000),
('02864', [], -1003),
('0r12', [], -1003),
('05V6', [], -1003),
('0514W', [], -1003),
('00r12', [], -1003),
('063710000000', [], -1006)
]
bin_test = [('0b', [], -1002),
(' 0b', [], -1002),
('0b1', [1], 1000),
(' 0b1', [1], 1000),
(' 0b0', [0], 1000),
('0b01', [1], 1000),
(' 0b001', [1], 1000),
('0b10', [2], 1000),
('0b0110', [6], 1000),
('0bR101', [], -1002),
('0b01V4', [], -1002),
('0b110W', [], -1002),
('0b0140', [], -1002),
('0b10000000000000001000000000000000', [2147516416], 1000),
('0b100000000000000010000000000000001', [], -1006)
]
chr_test = [('', [], -1101),
("'", [], -1101), # T11.0.2 > T11.1.0 error: open single quote, missing char
(' n\'', [], -1101),
("''", [], -1102),
("' ", [32], -1104), # T11.0.2 > T11.1.2 > T11.2.0 error: unclosed single quoted char
("' 0", [32], -1105),
("' '", [32], 1000),
(" ' '", [32], 1000),
('" "', [], -1101),
('\'\"\'', [34], 1000), # T11.0.2 > T11.1.2 > T11.2.1 capture double quote as single char
('\'\n\'', [], -1103) # T11.0.2 > T11.1.3 illegal character in single quotes
]
str_test = [('', [], -1201), # T12.0.0 error: no double quote
("'", [], -1201), # T12.0.3 error: unexpected single quote
('"', [], -1201), # T12.0.2 > T12.1.0 error: open double quote, missing string
(' n\"', [], -1201), # T12.0.1 > T12.0.3 error: missing quote before characters
('""', [], -1202), # T12.0.2 > T12.1.1 error: empty double quotes
('" ', [32], -1204), # T12.0.2 > T12.1.2 > T12.2.0 error: unclosed double quotes
('" 0', [32, 48], -1204), # T12.0.2 > T12.1.2 > T12.2.2 > T12.2.0 / idem with two chars
('" "', [32], 1000), # T12.0.2 > T12.1.2 > T12.2.1 successful single-char string
(' " "', [32], 1000), # T12.0.1 > T12.0.2 > T12.1.2 > T12.2.1 / idem with leading space
('"0123456789"', [48, 49, 50, 51, 52, 53, 54, 55, 56, 57], 1000), # T12.0.2 > T12.1.2 > T12.2.2 > T12.2.1
('"abcdefghijklmnopqrstuvwxyz"', [97, 98, 99, 100, 101, 102, 103, # alphabetic digits
104, 105, 106, 107, 108, 109, 110, 111, 112,
113, 114, 115, 116, 117, 118, 119, 120, 121,
122], 1000), # lower case letters
('"ABCDEFGHIJKLMNOPQRSTUVWXYZ"', [65, 66, 67, 68, 69, 70, 71, 72,
73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
84, 85, 86, 87, 88, 89, 90], 1000), # upper case letters
('"!
40, 41, 42, 43, 44, 45, 46, 47, 58, 59, 60,
61, 62, 63, 64, 91, 92, 93, 94, 95, 96, 123,
124, 125, 126], 1000), # punctuation letters
('\"\'\"', [39], 1000), # T12.0.2 > T12.1.2 > T12.2.1 capture single quote as a string
('\"\n\"', [], -1203), # T12.0.2 > T12.1.3 illegal character after double quote
('\" \n\"', [32], -1203) # T12.0.2 > T12.1.2 > T12.2.2 > T12.2.3 idem after a valid char
]
dat_test = [('', [], -2101), # T21.0.0 error: missing data directive
(' ', [], -2101), # T21.0.1 > T21.0.0 idem with leading space
('.', [], -2101), # T21.0.2 > T21.1.0 error: missing directive after '.'
('f', [], -2101), # T21.0.3 error: missing '.'
('.f', [], -2104), # T21.0.2 > T21.1.6 error: unknown data directive
('.byte', [], -2102), # T21.0.2 > T21.1.1a error: missing data values
('.byte ', [1], -2102), # T21.0.2 > T21.1.1b > T21.2.0 error: missing data values
('.byte2', [], -2103), # T21.0.2 > T21.1.1c error: missing space after directive
('.byte 2', [1, 2], 1000), # T21.0.2 > T21.1.1b > T21.2.1a success: get one byte
('.byte 20', [1, 20], 1000), # T21.0.2 > T21.1.1b > T21.2.1a idem with two digits
('.byte -20', [1, 236], 1000), # T21.0.2 > T21.1.1b > T21.2.1a idem with negative number
('.byte 2000', [1], -2107), # T21.0.2 > T21.1.1b > T21.2.1a + override data >= 2**8
('.byte -200', [1], -2107), # T21.0.2 > T21.1.1b > T21.2.1a + override data < -2**7
('.byte 45r', [1], -1004), # T21.0.2 > T21.1.1b > T21.2.1a + override unexpected decimal digit
('.byte 45,', [1, 45], -2102), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.0 error: missing data
('.byte 45, ', [1, 45], -2106), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.3 unrecognizeable info
('.byte 200, 0xF4', [1, 200, 244], 1000), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.1a get two bytes
('.byte \'2\'', [1, 50], 1000), # T21.0.2 > T21.1.1b > T21.2.2a success: get one char
('.byte \'2\', \'F\'', [1, 50, 70], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.2a get two chars
('.byte \'2\', 0123', [1, 50, 83], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.1a one char + one num.
('.byte \'2\' , 0123', [1, 50, 83], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.1a with extra space
('.byte \'2\', 0123 ', [1, 50, 83], 1000), # T21.0.2 > T21.1.1b > T21.2.2b > T21.2.1a with trailing space
('.byte 0b110, \'e\'', [1, 6, 101], 1000), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.2a one num. + one char
('.byte 0b110 , \'e\'', [1, 6, 101], 1000), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.2a with extra space
('.byte 0b110, \'e\' ', [1, 6, 101], 1000),
# T21.0.2 > T21.1.1b > T21.2.1b > T21.2.2a with trailing space
('.byte \'e\' c', [1], -2105), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.1c wrong delimiter
('.byte \'e\', c', [1, 101], -2106), # T21.0.2 > T21.1.1b > T21.2.1b > T21.2.3 unrecognizeable info
('.byte c', [1], -2106), # T21.0.2 > T21.1.1b > T21.2.3 unrecognizeable info
('.hword', [], -2102), # T21.0.2 > T21.1.2a error: missing data values
('.hword ', [2], -2102), # T21.0.2 > T21.1.2b > T21.3.0 error missing halfwords
('.hword2', [], -2103), # T21.0.2 > T21.1.2c error: missing space after directive
('.hword 2000', [2, 2000], 1000), # T21.0.2 > T21.1.2b > T21.3.1a success: capture a halfword
('.hword 2000, 0b0010', [2, 2000, 2], 1000), # T21.0.2 > T21.1.2b > T21.3.1b > T21.3.1a two halfwords
('.hword 02000, -1, 0xF00A', [2, 1024, 65535, 61450], 1000), # success: three halfwords
('.hword \'e\'', [2], -2106), # T21.0.2 > T21.1.2b > T21.3.2 unrecognizeable info
('.hword 045r', [2], -1003), # T21.0.2 > T21.1.2b > T21.3.1a + override unexpected hexa digit
('.hword 45,', [2, 45], -2102), # T21.0.2 > T21.1.2b > T21.3.1b > T21.3.0 error: missing data
('.hword 2 , -0123 ', [2, 2, 0xFF85], 1000), # T21.0.2 > T21.1.2b > T21.3.1b > T21.3.1a extra space
('.hword -45000', [2], -2107), # T21.0.2 > T21.1.2b > T21.3.1a + overrride error: data < -2**15
('.word', [], -2102), # T21.0.2 > T21.1.3a error: missing data values
('.word ', [4], -2102), # T21.0.2 > T21.1.3b > T21.4.0 error missing words
('.wordh', [], -2103), # T21.0.2 > T21.1.3c error: missing space after directive
('.word 2000', [4, 2000], 1000), # T21.0.2 > T21.1.3b > T21.4.1a success: capture a word
('.word -2147483648, 0b0010', [4, 2147483648, 0b0010], 1000), # T21.0.2 > T21.1.3b > T21.4.1b > T21.4.1a
('.word 020000000, -1, 0x1F00A', [4, 0o20000000, 4294967295, 0x1F00A], 1000), # three words
('.word r45', [4], -2106), # T21.0.2 > T21.1.3b > T21.4.2 unrecognizeable info
('.word 0b45', [4], -1002), # T21.0.2 > T21.1.3b > T21.4.1a + override unexpected binary digit
('.word 0x4X5', [4], -1005), # T21.0.2 > T21.1.3b > T21.4.1a + override unexpected hexa digit
('.word 0x400000000', [4], -1006), # T21.0.2 > T21.1.3b > T21.4.1a + override too long value (>2^32)
('.word 45,', [4, 45], -2102), # T21.0.2 > T21.1.3b > T21.4.1b > T21.4.0 error: missing data
('.word 2 , -0123 ', [4, 2, 4294967173], 1000), # T21.0.2 > T21.1.3b > T21.4.1b > T21.4.1a
('.word 4294967295', [4, 4294967295], 1000), # T21.0.2 > T21.1.3b > T21.4.1a success: maximum int
('.ascii', [], -2102), # T21.0.2 > T21.1.4a error: missing string
('.asciz', [], -2102), # T21.0.2 > T21.1.5a error: missing string
('.ascii ', [1], -2102), # T21.0.2 > T21.1.4b > T21.5.0 : missing string
('.asciz ', [1], -2102), # T21.0.2 > T21.1.5b > T21.6.0 : missing string
('.ascii5', [], -2103), # T21.0.2 > T21.1.4c error: missing space after directive
('.asciz8', [], -2103), # T21.0.2 > T21.1.5c error: missing space after directive
('.ascii \' \'', [1, 32], 1000), # T21.0.2 > T21.1.4b > T21.5.1a success: get one char
('.asciz \' \'', [1, 32, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.1a success: get one char + '\0'
('.ascii \'a\', \'b\' ,\'c\' , \'d\' ', [1, 97, 98, 99, 100], 1000), # > T21.5.1b > T21.5.1a
('.asciz \'a\', \'b\' ,\'c\' , \'d\' ', [1, 97, 0, 98, 0, 99, 0, 100, 0], 1000), # > T21.6.1b > T21.6.1a
('.ascii "0123456789"', [1, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57], 1000), # T21.0.2 > T21.1.4b > T21.5.2a
('.asciz "abcdef"', [1, 97, 98, 99, 100, 101, 102, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.2a
('.ascii \"b\", \"a\"', [1, 98, 97], 1000), # T21.0.2 > T21.1.4b > T21.5.2b > T21.5.2a
('.asciz \"a\", \"b\"', [1, 97, 0, 98, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.2b > T21.6.2a
('.ascii \"b\", \'a\'', [1, 98, 97], 1000), # T21.0.2 > T21.1.4b > T21.5.2b > T21.5.1a
('.asciz \'a\', \"b\"', [1, 97, 0, 98, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.1b > T21.6.2a
('.ascii \' ', [1], -1104), # T21.0.2 > T21.1.4b > T21.5.1a + override unclosed char
('.ascii \" ', [1], -1204),
('.asciz \' ', [1], -1104), # T21.0.2 > T21.1.5b > T21.6.1a + override unclosed char
('.asciz \" ', [1], -1204), # T21.0.2 > T21.1.5b > T21.6.2a + override unclosed string
('.ascii \'\'', [1], -1102), # T21.0.2 > T21.1.4b > T21.5.1a + override empty char
('.ascii \"\"', [1], -1202), # T21.0.2 > T21.1.4b > T21.5.2a + override empty string
('.asciz \'\'', [1], -1102), # T21.0.2 > T21.1.5b > T21.6.1a + override empty char
('.asciz \"\"', [1], -1202), # T21.0.2 > T21.1.5b > T21.6.2a + override empty string
('.ascii \' 0\'', [1], -1105), # T21.0.2 > T21.1.4b > T21.5.2a + override more than one character
('.asciz \' 0\'', [1], -1105), # T21.0.2 > T21.1.5b > T21.6.2a + override idem after .ascii
('.ascii \'a\', \"bc , \'d\"', [1, 97, 98, 99, 32, 44, 32, 39, 100], 1000), # > T21.5.1b > T21.5.2a
('.asciz \',\', \",,\"', [1, 44, 0, 44, 44, 0], 1000), # T21.0.2 > T21.1.5b > T21.6.1a success capture ','
('.ascii \'\t\'', [1], -1103), # T21.0.2 > T21.1.4b > T21.5.1c + override illegal character ''
('.asciz \'\t\'', [1], -1103), # T21.0.2 > T21.1.5b > T21.6.1c + override idem after .ascii
('.ascii \"\t\"', [1], -1203), # T21.0.2 > T21.1.4b > T21.5.2c + override illegal character ""
('.asciz \" \t\"', [1], -1203), # T21.0.2 > T21.1.5b > T21.6.2c + override idem after valid char
('.ascii \'"\'a', [1], -2105),
('.ascii \"\'a\"b', [1], -2105), # T21.0.2 > T21.1.4b > T21.5.2c unexpected separator
('.asciz \'"\'a', [1], -2105), # T21.0.2 > T21.1.5b > T21.6.1c unexpected separator
('.asciz \"\'a\"b', [1], -2105), # T21.0.2 > T21.1.5b > T21.6.2c unexpected separator
('.ascii \' a\'', [1], -1105), # T21.0.2 > T21.1.4b > T21.5.2a + override more than one character
('.asciz \' a\'', [1], -1105), # T21.0.2 > T21.1.5b > T21.6.2a + override idem after .ascii
('.ascii a\'', [1], -2106), # T21.0.2 > T21.1.4b > T21.5.3 non recognizable info
('.asciz a\'', [1], -2106), # T21.0.2 > T21.1.5b > T21.6.3 non recognizable info
(' .asciz \'a\'', [1, 97, 0], 1000) # T21.0.1 > T21.0.2 > T21.1.5b > T21.6.1a success with leading space
]
adr_test = [('', [], -2001), # T20.0.0 error: missing address
(' ', [], -2001), # T20.0.1 > T20.0.0 idem white leading space
('0x', [], -2002), # T20.0.2 > T20.1.0 error: '0x' but missing hex digits
('x0', [], -2001), # T20.0.3 error: missing address start
(' 0x8001', [], -2003), # T20.0.1 > T20.0.2 > T20.1.1a address but missing trailing space
('0xF3AE ', [0xF3AE], 1000), # T20.0.2 > T20.0.2 > T20.1.1b success address with trailing space
('0xR124', [], -2003), # T20.0.2 > T20.1.2 illegal address (first digit)
('0x51V4', [], -2003), # T20.0.2 > T20.1.1c illegal address (in-the-middle)
('0x514W', [], -2003), # T20.0.2 > T20.1.1c illegal address (last one)
('0xF0002E00 ', [0xF0002E00], 1000), # T20.0.2 > T20.1.1b big hex address: eight digits
('0x10002EF00 ', [], -2004) # T20.0.2 > T20.1.1b + override long hex address (> 2^32)
]
reg_test = [('', [], -1301), # T13.0.0 error: missing register
(' ', [], -1301), # T13.0.1 > T13.0.0 / idem with leading space
('1', [], -1302), # T13.0.4 error: unknown register identifier
('r', [], -1303), # T13.0.2 > T13.1.0 error: missing register number
('ra', [], -1304), # T13.0.2 > T13.1.2 error: wrong reg number
('r1a', [], -1304), # T13.0.2 > T13.1.2 error: wrong reg number
('r-1', [], -1304), # T13.0.2 > T13.1.1 + override : negative reg number
('r16', [], -1304), # T13.0.2 > T13.1.1 + override : too high reg number
('r12', [12], 1000), # T13.0.2 > T13.1.1 success: two digit reg number
('r0', [0], 1000), # T13.0.2 > T13.1.1 success: one digit reg number
('sp', [13], 1000), # T13.0.3 success: stack pointer
('lr', [14], 1000), # T13.0.3 success: link register
('pc', [15], 1000) # T13.0.3 success: program counter
]
rbt_test = [('', [], -1401), # T14.0.0 error: missing register
(' ', [], -1401), # T14.0.1 > T14.0.0 / idem with leading space
('1', [], -1302), # T14.0.2c + override unknown register identifier
('r', [], -1303), # T14.0.2a + override missing register number
('ra', [], -1304), # T14.0.2a + override wrong reg number
('r1a', [], -1304), # T14.0.2c + override wrong reg number
('r-1', [], -1303), # T14.0.2b + override negative reg number
('r16', [], -1304), # T14.0.2a + override too high reg number
('r0', [0x1], 1000), # T14.0.2a success: single register
('r15', [0x8000], 1000), # T14.0.2a : maximum single reg value
('r0-r5', [0x3F], 1000), # T14.0.2b > T14.1.1 success: reg range (min, max)
('r12-r2', [0x1FFC], 1000), # T14.0.2b > T14.1.1 : (max, min)
('lr-pc', [0xC000], 1000), # T14.0.2b > T14.1.1 : (symbolic)
('sp-r12', [0x3000], 1000), # T14.0.2b > T14.1.1 : (symbolic & numeric, two bits)
('sp-r13', [0x2000], 1000), # T14.0.2b > T14.1.1 : (symbolic & numeric, one bit)
('r4-', [0x10], -1403), # T14.0.2b > T14.1.0 error: missing second reg in range list
('r8-1', [0x100], -1302), # T14.0.2a > T14.1.1 + override wrong second reg
('r9-r16', [0x200], -1304) # T14.0.2a > T14.1.1 + override too high second reg number
]
rlt_test = [('', [], -1501), # T15.0.0 error: missing register list
(' ', [], -1501), # T15.0.1 > T15.0.0 : idem with leading space
('1', [], -1502), # T15.0.3 error: missing '{'
('{', [], -1503), # T15.0.2 > T15.1.0 error: missing registers
('{1', [], -1302), # T15.0.2 > T15.1.1a + override : unknown register identifier
('{r', [], -1303), # T15.0.2 > T15.1.1a + override : missing register number
('{ra', [], -1304), # T15.0.2 > T15.1.1a + override : wrong reg number
('{r1a', [], -1304), # T15.0.2 > T15.1.1a + override : wrong reg number
('{r-1', [], -1303), # T15.0.2 > T15.1.1a + override : negative reg number
('{r16', [], -1304), # T15.0.2 > T15.1.1a + override : too high reg number
('{r0', [], -1503), # T15.0.2 > T15.1.1a error: unclosed single register
('{r0}', [0x1], 1000), # T15.0.2 > T15.1.1c success: single register
('{r0-r5}', [0x3F], 1000), # T15.0.2 > T15.1.1c success: single range
('{r0-r5 }', [0x3F], 1000), # : idem with trailing space
('{r12-r2, lr', [0x1FFC], -1503), # > T15.1.1b > T15.1.1a error: missing '}' after list
('{r12 - r2, lr}', [0x5FFC], 1000), # > T15.1.1b > T15.1.1c success: range + single register
('{ pc, r1 -r2, sp- r12, r5}', [0xB026], 1000), # : several ranges, with spaces
('{r4-}', [], -1403), # > T15.1.1a + override : missing second reg in range list
('{r14, r8-1', [0x4000], -1302), # > T15.1.1a + override : wrong second reg
('{r9-r16, r13}', [], -1304), # > T15.1.1a + override : too high second reg number
('{r14,r8}', [0x4100], 1000), # success: no space after ','
('{ r9 , r13 }', [0x2200], 1000), # success: extra spaces
('{r14,}', [0x4000], -1504), # > T15.1.1b > T15.1.2 error: missing register after ','
('{r14, }', [0x4000], -1504), # > T15.1.1b > T15.1.2 : missing register after ', '
('{r9-r15, sp13}', [0xFE00], -1402) # > T15.1.1b + override : unrecognized register id
]
imo_test = [('', [], -1601), # T16.0.0 error: missing immediate value
(' ', [], -1601), # T16.0.1 > T16.0.0 idem with leading space
('2', [], -1602), # T16.0.3 error: missing '#'
('#', [], -1603), # T16.0.2 > T16.1.0 error: missing value after '#'
('# ', [], -1604), # T16.0.2 > T16.1.1 error: unexpected space after '#'
('#f', [], -1605), # T16.0.2 > T16.1.4 error: unrecognizable info after '#'
('#20', [20], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success: simple byte value
('#\'f\'', [102], 1000), # T16.0.2 > T16.1.3 > T16.2.0 success: simple char value
('#-20', [], -1606), # T16.0.2 > T16.1.2 + override : impossible fixup for negative number
('#2000', [0xE7D], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: in-the-middle bits
('#0xC0000034', [0x1D3], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: split bits
('#0xFF000000', [0x4FF], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: maximum rotation
('#0xFF0000FF', [], -1606), # T16.0.2 > T16.1.2 + override : impossible fixup for 16 bits
('#0x102', [], -1606), # T16.0.2 > T16.1.2 + override : impossible fixup for odd rotations
('#0x104', [0xF41], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: odd immediate mask
('#0x108', [0xF42], 1000), # T16.0.2 > T16.1.2 > T16.2.0 : even immediate mask
('#45r', [], -1004), # T16.0.2 > T16.1.2 + override : unexpected decimal digit
('#\'e\' c', [101], -1607), # T16.0.2 > T16.1.3 > T16.2.1 error: unexpected text after imm val.
('#0b111111100000000000', [0xBFE], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: binary
('#0b1002000', [], -1002), # T16.0.2 > T16.1.2 + override : invalid binary digit
('#012000000005', [0x255], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: octal
('#012000900005', [], -1003), # T16.0.2 > T16.1.2 + override : invalid octal digit
('#45d', [], -1004), # T16.0.2 > T16.1.2 + override : invalid decimal digit
('#0x4X5', [], -1005), # T16.0.2 > T16.1.2 + override : invalid hexa digit
('#0x400000000', [], -1006), # T16.0.2 > T16.1.2 + override : too long value (>2^32)
('#0x08000002', [0x382], 1000), # T16.0.2 > T16.1.2 > T16.2.0 success fixup: MSB = 1 at IM
('#\'', [], -1605), # T16.0.2 > T16.1.4 error: unclosed char
('#\' ', [], -1104), # T16.0.2 > T16.1.3 + override : unclosed char
('#\'\'', [], -1102), # T16.0.2 > T16.1.3 + override : empty char
('#\' 0\'', [], -1105), # T16.0.2 > T16.1.3 + override : more than one character
('#\'\t\'', [], -1103), # T16.0.2 > T16.1.3 + override : illegal character ''
('#\"t\"', [], -1605), # T16.0.2 > T16.1.4 error: illegal character '"'
(' #\'a\'', [97], 1000)
]
ims_test = [('', [], -1701),
(' ', [], -1701),
('2', [], -1702),
('#', [], -1703),
('# ', [], -1704),
('#f', [], -1705),
('#2', [2], 1000),
('#-20', [], -1706),
('#040', [], -1706),
('#0x1C', [28], 1000),
('#0b10101', [21], 1000),
('#0b10020', [], -1002),
('#019', [], -1003),
('#4d', [], -1004),
('#0xX', [], -1005),
(' #0x1F', [31], 1000)
]
op2_test = [('', [], -2201),
(' ', [], -2203),
('2', [], -2203),
('#', [], -1603),
('# ', [], -1604),
('#f', [], -1605),
('#20', [0x02000014], 1000),
('#\'f\'', [0x02000066], 1000),
('#-20', [], -1606),
('#0xC0000034', [0x020001D3], 1000),
('#0x102', [], -1606),
('#\'e\' c', [], -1607),
('#0b1002000', [], -1002),
('#012000900005', [], -1003),
('#45d', [], -1004),
('#0x4X5', [], -1005),
('#0x400000000', [], -1006),
('#\'', [], -1605), # T22.0.1 + override : unclosed char
(' ('#\' 0\'', [], -1105),
('#\'\t\'', [], -1103),
('#\"t\"', [], -1605),
(' #\'a\'', [0x02000061], 1000), # T22.0.1 success with leading space
('r', [], -1303), # T22.0.2a + override : missing register number
('ra', [], -1304), # T22.0.2a + override : wrong reg number
('r1a', [], -1304), # T22.0.2a + override : wrong reg number
('r-1', [], -1304), # T22.0.2a + override : negative reg number
('r16', [], -1304), # T22.0.2a + override : too high reg number
('r12', [12], 1000), # T22.0.2a success: single reg
('r0 ', [0], 1000), # T22.0.2a success: single reg with trailing space
(' sp', [13], 1000), # T22.0.2a success: single reg with leading space
('r1,', [1], -2204), # T22.0.2b > T22.1.0 error: missing shift mode
('r2, ', [2], -2204), # T22.0.2b > T22.1.1 > T22.1.0 : idem with trailing space
('r3, lslx', [3], -2206), # T22.0.2b > T22.1.1 > T22.1.2c : missing space after shift mode
('r3, r0', [3], -2206), # T22.0.2b > T22.1.1 > T22.1.2c : missing space after shift mode
('r3, #0', [3], -2206), # T22.0.2b > T22.1.1 > T22.1.2c : missing space after shift mode
('r4, xl', [4], -2206), # T22.0.2b > T22.1.1 > T22.1.3 : unrecognized shift mode
('r5, lsl', [5], -2205), # T22.0.2b > T22.1.1 > T22.1.2a : missing space after shift mode
('r6, lsr ', [6], -2205), # > T22.1.2b > T22.2.0 : missing info after shift mode
('r7, asr x', [7], -2207), # > T22.1.2b > T22.2.3 : wrong info after shift mode
('r8, ror r', [8], -1303), # > T22.1.2b > T22.2.1 + override: missing register number
('r9, lsl ra', [9], -1304), # > T22.1.2b > T22.2.1 + override: wrong reg number
('r10, lsr r1a', [10], -1304), # > T22.1.2b > T22.2.1 + override: wrong reg number
('r11, asr r-1', [11], -1304), # > T22.1.2b > T22.2.1 + override: negative reg number
('r12, ror r16', [12], -1304), # > T22.1.2b > T22.2.1 + override: too high reg number
('r13, lsl r12', [0xC1D], 1000), # > T22.1.2b > T22.2.1 success: LSL reg
('sp, lsr r0 ', [0x3D], 1000), # > T22.1.2b > T22.2.1 : LSR reg with trailing space
('r1,asr lr', [0xE51], 1000), # > T22.1.2b > T22.2.1 : ASR reg no space after ','
('r8, ror #', [8], -1703), # > T22.1.2b > T22.2.2 + override: missing value after '#'
('r9, lsl # ', [9], -1704), # > T22.1.2b > T22.2.2 + override: unexpected space after '#'
('r10, lsr #f', [10], -1705), # > T22.1.2b > T22.2.2 + override: unrecognizable info after '#'
('r11, asr #2', [0x14B], 1000), # > T22.1.2b > T22.2.2 success: valid number of shifts
('r12, ror #-20', [12], -1706), # > T22.1.2b > T22.2.2 + override: negative number of shifts
('r13, lsl #040', [13], -1706), # > T22.1.2b > T22.2.2 + override: too high number of shifts
('pc, lsr #0x1C ', [0xE2F], 1000), # > T22.1.2b > T22.2.2 success LSR imm with trailing space
('r1,asr #0b10101', [0xAC1], 1000), # > T22.1.2b > T22.2.2 : ASR bin imm, no space after ','
('r8, ror #0b10020', [8], -1002), # > T22.1.2b > T22.2.2 + override: invalid binary digit
('r9, lsl #019', [9], -1003), # > T22.1.2b > T22.2.2 + override: invalid octal digit
('r10, lsr #4d', [10], -1004), # > T22.1.2b > T22.2.2 + override: invalid decimal digit
('r11, asr #0xX', [11], -1005), # > T22.1.2b > T22.2.2 + override: invalid hexa digit
(' r12 , ror #0x1F ', [0xFEC], 1000), # > T22.1.2b > T22.2.2 success with lead/trail spaces
('r13, lsl r12 a', [13], -1304), # > T22.1.2b > T22.2.1 + override: unexpected text after parse
('r12, ror #0x1F b', [12], -1005) # > T22.1.2b > T22.2.2 + override: idem for immediate parsing
]
opd_test = [('', [], -2301), # T23.0.0 error: missing operands
(' ', [], -2303), # T23.0.2 error: idem with leading space
('2', [], -1302), # T23.0.1a + override : unrecognizable register
('2,', [], -1302), # T23.0.1b + override : unrecognizable operand with ','
('r', [], -1303), # T23.0.1a + override : missing register number
('ra', [], -1304), # T23.0.1a + override : wrong reg number
('r16', [], -1304), # T23.0.1a + override : too high reg number
('r12', [], -2302), # T23.0.1a error: good dest reg, missing other ops
('r0 ', [], -2302), # T23.0.1a error: missing ',' after dest reg
('r1,', [0x1000], -2304), # T23.0.1b > T23.1.0 error: missing source operands
('r2, ', [0x2000], -2306), # T23.0.1b > T23.1.3 error: missing source operands
('r3, 3', [0x3000], -2306), # T23.0.1b > T23.1.3 error: wrong source op 1
('r4, ra', [0x4000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : wrong reg number
('r5, r1a', [0x5000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : wrong reg number
('r6, r-1', [0x6000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : negative reg number
('r7, r16', [0x7000], -1304), # T23.0.1b > T23.1.1 > T23.1.2a + override : too high reg number
('r8, r12', [0x8800C], 1000), # T23.0.1b > T23.1.1 success: two registers
('r9,r1 ', [0x99001], 1000), # T23.0.1b > T23.1.1 success: idem with no space after ','
(' sp , lr ', [0xDD00E], 1000), # T23.0.1b > T23.1.1 success: idem with extra spaces
('r10, r1,', [0x0A000], -2204), # T23.0.1b > T23.1.1 + override : missing shift register
('r11, r2, ', [0x0B000], -2204), # T23.0.1b > T23.1.1 + override : idem with space
('r12, r3, 3', [0x3C000], -2308), # T23.0.1b > T23.1.2b > T23.2.2 error: wrong op 2
('r13, r4, ra', [0x4D000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : wrong reg number
('r14, r5, r1a', [0x5E000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : wrong reg number
('r15, r6, r-1', [0x6F000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : negative reg number
('r0, r7, r16', [0x70000], -1304), # T23.0.1b > T23.1.2b > T23.2.1 + override : too high reg number
('r1, r8, r12', [0x8100C], 1000), # T23.0.1b > T23.1.2b > T23.2.1 success: three registers
('r2,r9,r1 ', [0x92001], 1000), # T23.0.1b > T23.1.2a : idem with no space after ','
('r3, #', [0x03000], -1603), # T23.0.1b > T23.1.1 + override : missing value after '#'
('r4, # ', [0x04000], -1604), # T23.0.1b > T23.1.1 + override : unexpected space after '#'
('r5, #f', [0x05000], -1605), # T23.0.1b > T23.1.1 + override : unrecognizable info after '#'
('r6, #20', [0x02066014], 1000), # T23.0.1b > T23.1.1 success: dest reg + immediate value
('r7, #\'f\'', [0x02077066], 1000), # T23.0.1b > T23.1.1 success: dest reg + immediate char
('r8, #-20', [0x08000], -1606), # T23.0.1b > T23.1.1 + override : impossible fixup for negative num.
('r9,#0xC0000034', [0x020991D3], 1000), # T23.0.1b > T23.1.1 success fixup: split bits
('r10, #0x102', [0x0A000], -1606), # T23.0.1b > T23.1.1 + override : impossible fixup for odd rotations
('r11, #\'e\' c', [0xB000], -1607), # T23.0.1b > T23.1.1 + override : unexpected text after imm val.
('r12, #0b1002000', [0x0C000], -1002), # T23.0.1b > T23.1.1 + override : invalid binary digit
('r13, #012000900005', [0x0D000], -1003), # > T23.1.1 + override : invalid octal digit
('r14, #45d', [0x0E000], -1004), # T23.0.1b > T23.1.1 + override : invalid decimal digit
('r15, #0x4X5', [0x0F000], -1005), # T23.0.1b > T23.1.1 + override : invalid hexa digit
('r0, #\'', [0x0], -1605), # T23.0.1b > T23.1.1 + override : unclosed char
('r1, #\' ', [0x01000], -1104), # T23.0.1b > T23.1.1 + override : unclosed char
('r2, #\'\'', [0x02000], -1102), # T23.0.1b > T23.1.1 + override : empty char
('r3, #\' 0\'', [0x03000], -1105), # T23.0.1b > T23.1.1 + override : more than one character
('r4, #\'\t\'', [0x04000], -1103), # T23.0.1b > T23.1.1 + override : illegal character ''
('r5, lslx', [0x05000], -2306), # T23.0.1b > T23.1.3 error: unrecognized source operand
('r5, r10, lslx', [0xA5000], -2308), # T23.0.1b > T23.1.2b > T23.2.2 error: wrong second operand
('r5, r10, r1', [0xA5001], 1000), # T23.0.1b > T23.1.2b > T23.2.1 success: three registers
('r5, r10, #2', [0x20A5002], 1000), # T23.0.1b > T23.1.2b > T23.2.1 success: two regs, one immediate
('r6, r1, xl', [0x16000], -2308), # T23.0.1b > T23.1.2b > T23.2.2 error: wrong second operand
('r7, r2, lsl', [0x07000], -2205), # T23.0.1b > T23.1.1 + override : missing space after shift mode
('r8, r3, lsr ', [0x08000], -2205), # T23.0.1b > T23.1.1 + override : missing info after shift mode
('r9, r4, asr x', [0x09000], -2207), # T23.0.1b > T23.1.1 + override : wrong info after shift mode
('r10, r5, ror r', [0x0A000], -1303), # T23.0.1b > T23.1.1 + override : missing register number
('r11, r6, lsl ra', [0x0B000], -1304), # T23.0.1b > T23.1.1 + override : wrong reg number
('r12, r7, ror r16', [0x0C000], -1304), # T23.0.1b > T23.1.1 + override : too high reg number
('r13, r8, lsl r12', [0xDDC18], 1000), # T23.0.1b > T23.1.1 success: LSL reg
('r14, sp, lsr r0 ', [0xEE03D], 1000), # T23.0.1b > T23.1.1 : LSR reg with trailing space
('r15, r1,asr lr', [0xFFE51], 1000), # T23.0.1b > T23.1.1 : ASR reg no space after ','
('r0, r8, ror #', [0], -1703), # T23.0.1b > T23.1.1 + override : missing value after '#'
('r1, r9, lsl # ', [0x01000], -1704), # T23.0.1b > T23.1.1 + override : unexpected space after '#'
('r2, r10, lsr #f', [0x02000], -1705), # T23.0.1b > T23.1.1 + override : unrecognizable info after '#'
('r3, r11, asr #2', [0x3314B], 1000), # T23.0.1b > T23.1.1 success: valid number of shifts
('r4, r12, ror #-20', [0x04000], -1706), # > T23.1.1 + override : negative number of shifts
('r5, r13, lsl #040', [0x05000], -1706), # > T23.1.1 + override : too high number of shifts
('r5, r13, lsl #00', [0x05500D], 1000), # > T23.1.1 success: true LSL #0
('r6, pc, lsr #0x1C ', [0x66E2F], 1000), # > T23.1.1 success LSR imm with trailing space
('r6, pc, lsr #0x0 ', [0x6600F], 1000), # > T23.1.1 converting LSR #0 into LSL #0
('r7,r1,asr #0b10101', [0x77AC1], 1000), # > T23.1.1 : ASR bin imm, no space after ','
('r7,r1,asr #0b0', [0x77001], 1000), # > T23.1.1 converting ASR #0 into LSL #0
('r8, r13, lsl r12 a', [0x08000], -1304), # > T23.1.1 + override : unexpected text after parse
('r9, r12, ror #0x1F b', [0x09000], -1005), # > T23.1.1 + override : idem for immediate parsing
('r9, r12, ror #0x1F', [0x99FEC], 1000), # > T23.1.1 success ROR with 31 shifts
('r9, r12, ror #0x0', [0x9906C], 1000), # > T23.1.1 coding ROR #0 as RRX
('r13, r7, r8, lsl r12 ', [0x7DC18], 1000), # > T23.1.2 > T23.2.1 success: three regs, last shift reg
('r14 , r8 , sp , lsr r10', [0x8EA3D], 1000), # > T23.1.2 > T23.2.1 : idem with trailing spaces
('r15,r9,r1,asr lr', [0x9FE51], 1000), # > T23.1.2 > T23.2.1 : idem with space after ','
('r13, r7, r8, lsl #12 ', [0x7D608], 1000), # > T23.1.2 > T23.2.1 success: three regs, last shift imm
('r14 , r8 , sp , lsr #10', [0x8E52D], 1000), # > T23.1.2 > T23.2.1 : idem with trailing spaces
('r15,r9,r1,asr #31', [0x9FFC1], 1000), # > T23.1.2 > T23.2.1 : idem with space after ','
('r15,r9,r1,asr r32', [0x9F000], -1304), # > T23.1.2 > T23.2.1 + override : wrong range reg number
('r15,r9,r1,asr #32', [0x9F000], -1706), # > T23.1.2 > T23.2.1 + override : invalid number of shifts
('r15,r9,r1,asr r', [0x9F000], -1303), # > T23.1.2 > T23.2.1 + override : missing reg number
('r15,r9,r1,asr ', [0x9F000], -2205) # > T23.1.2 > T23.2.1 + override : missing info after shift
]
idt_test = [('', [], -3101), # T31.0.0 error: missing data instruction
(' ', [], -3101), # T31.0.1 > T31.0.0 error: idem with leading space
('2', [], -3103), # T31.0.3 error: unrecognizable instruction
('and', [], -3102), # T31.0.2a error: missing operands after instr.
('eor ', [4, 0xE0200000], -3102), # T31.0.2b > T31.3.0 error: missing operands after instr.
('sub 2,', [4, 0xE0400000], -1302), # T31.0.2b > T31.3.1 + override : unrecognizable operand with ','
('rsb r', [4, 0xE0600000], -1303), # T31.0.2b > T31.3.1 + override : missing register number
('add r16', [4, 0xE0800000], -1304), # T31.0.2b > T31.3.1 + override : too high reg number
('adc r12', [4, 0xE0A00000], -2302), # T31.0.2b > T31.3.1 + override : good dest reg, missing other ops
('sbc ', [4, 0xE0C00000], -2303), # T31.0.2b > T31.3.1 + override : missing dest reg
('rsc r1,', [4, 0xE0E00000], -2304), # T31.0.2b > T31.3.1 + override : missing source operands
('orr r2, ', [4, 0xE1800000], -2306), # T31.0.2b > T31.3.1 + override : missing source operands
('bic r3, 3', [4, 0xE1C00000], -2306), # T31.0.2b > T31.3.1 + override : wrong source op 1
('and r12, r3, 3', [4, 0xE0000000], -2308), # > T31.3.1 + override : wrong op 2
('eor r3, #', [4, 0xE0200000], -1603), # > T31.3.1 + override : missing value after '#'
('sub r4, # ', [4, 0xE0400000], -1604), # > T31.3.1 + override : unexpected space after '#'
('rsb r5, #f', [4, 0xE0600000], -1605), # > T31.3.1 + override : unrecognizable info after '#'
('add r10, #0x102', [4, 0xE0800000], -1606), # > T31.3.1 + override : impossible fixup for odd rotations
('adc r11, #\'e\' c', [4, 0xE0A00000], -1607), # > T31.3.1 + override : unexpected text after imm val.
('sbc r10, r1,', [4, 0xE0C00000], -2204), # > T31.3.1 + override : missing shift register
('rsc r7, r2, lsl', [4, 0xE0E00000], -2205), # > T31.3.1 + override : missing space after shift mode
('orr r9, r4, asr x', [4, 0xE1800000], -2207), # > T31.3.1 + override : wrong info after shift mode
('bic r0, r8, ror #', [4, 0xE1C00000], -1703), # > T31.3.1 + override : missing value after '#'
('and r1, r9, lsl # ', [4, 0xE0000000], -1704), # > T31.3.1 + override : unexpected space after '#'
('eor r2, r10, lsr #f', [4, 0xE0200000], -1705), # > T31.3.1 + override : unrecognizable info after '#'
('sub r4, r12, ror #-20', [4, 0xE0400000], -1706), # > T31.3.1 + override : negative number of shifts
('rsb r12, #0b1002000', [4, 0xE0600000], -1002), # > T31.3.1 + override : invalid binary digit
('add r13, #012000900005', [4, 0xE0800000], -1003), # > T31.3.1 + override : invalid octal digit
('adc r14, #45d', [4, 0xE0A00000], -1004), # > T31.3.1 + override : invalid decimal digit
('sbc r15, #0x4X5', [4, 0xE0C00000], -1005), # > T31.3.1 + override : invalid hexa digit
('rsc r2, #\'\'', [4, 0xE0E00000], -1102), # > T31.3.1 + override : empty char
('orr r4, #\'\t\'', [4, 0xE1800000], -1103), # > T31.3.1 + override : illegal character ''
('bic r1, #\' ', [4, 0xE1C00000], -1104), # > T31.3.1 + override : unclosed char
('and r3, #\' 0\'', [4, 0xE0000000], -1105), # > T31.3.1 + override : more than one character
('eors', [4, 0xE0200000], -3102), # T31.0.2c > T31.1.2a error: data operands
('eoral', [4, 0xE0200000], -3102), # T31.0.2c > T31.1.1a error: data operands
('tsts', [4, 0xE1100000], -3102), # T31.0.2c > T31.1.2a : missing operands
('tsts ', [4, 0xE1100000], -3102), # T31.0.2c > T31.1.2b > T31.3.0 : missing operands
('teqst', [4, 0xE1300000], -3105), # T31.0.2c > T31.1.2c error: wrong text after instruction
('cmpxx', [4, 0xE1500000], -3104), # T31.0.2c > T31.1.3 error: unknown instruction condition
('cmneq', [4, 0xE1700000], -3102), # T31.0.2c > T31.1.1a error: missing ops after pred.inst.
('movne ', [4, 0x11A00000], -3102), # T31.0.2c > T31.1.1b > T31.3.0 : idem after space
('mvncss', [4, 0x21E00000], -3102), # T31.0.2c > T31.1.1c > T31.2.1a : idem after set flag
('mvncsx', [4, 0x21E00000], -3105), # T31.0.2c > T31.1.1c > T31.2.2 : wrong text after pred.inst
('mvncssx', [4, 0x21E00000], -3105), # T31.0.2c > T31.1.1c > T31.2.1c : wrong text after pred.inst + flag
('andhss', [4, 0x20000000], -3102), # T31.0.2c > T31.1.1c > T31.2.1a : missing operands after set flag
('andhss ', [4, 0x20100000], -3102), # T31.0.2c > T31.1.1c > T31.2.1b > T31.3.0 : after set flag + space
('eorccx', [4, 0x30200000], -3105), # T31.0.2c > T31.1.1c > T31.2.2 : wrong text after pred.inst
('sublosx', [4, 0x30400000], -3105), # T31.0.2c > T31.1.1c > T31.2.1c : wrong text after pred.inst + flag
('cmp', [], -3102), # T31.0.2a error: missing operands after instr.
('cmn ', [4, 0xE1700000], -3102), # T31.0.2b > T31.3.0 error: missing operands after instr.
('mov 2,', [4, 0xE1A00000], -1302), # T31.0.2b > T31.3.1 + override : unrecognizable operand with ','
('mvn r', [4, 0xE1E00000], -1303), # T31.0.2b > T31.3.1 + override : missing register number
('tst r16', [4, 0xE1100000], -1304), # T31.0.2b > T31.3.1 + override : too high reg number
('teq r12', [4, 0xE1300000], -2302), # T31.0.2b > T31.3.1 + override : good dest reg, missing other ops
('cmp ', [4, 0xE1500000], -2303), # T31.0.2b > T31.3.1 + override : missing source 1 reg
('cmn r1,', [4, 0xE1700000], -2304), # T31.0.2b > T31.3.1 + override : missing source operands
('mov r2, ', [4, 0xE1A00000], -2306), # T31.0.2b > T31.3.1 + override : missing source operands
('mvn r3, 3', [4, 0xE1E00000], -2306), # T31.0.2b > T31.3.1 + override : wrong source op 1
('tst r3, #', [4, 0xE1100000], -1603), # > T31.3.1 + override : missing value after '#'
('teq r4, # ', [4, 0xE1300000], -1604), # > T31.3.1 + override : unexpected space after '#'
('cmp r5, #f', [4, 0xE1500000], -1605), # > T31.3.1 + override : unrecognizable info after '#'
('mov r10, #0x102', [4, 0xE1A00000], -1606), # > T31.3.1 + override : impossible fixup for odd rotations
('mvn r11, #\'e\' c', [4, 0xE1E00000], -1607), # > T31.3.1 + override : unexpected text after imm val.
('tst r7, r2, lsl', [4, 0xE1100000], -2205), # > T31.3.1 + override : missing space after shift mode
('teq r9, r4, asr x', [4, 0xE1300000], -2207), # > T31.3.1 + override : wrong info after shift mode
('cmp r0, r8, ror #', [4, 0xE1500000], -1703), # > T31.3.1 + override : missing value after '#'
('cmn r1, r9, lsl # ', [4, 0xE1700000], -1704), # > T31.3.1 + override : unexpected space after '#'
('mov r2, r10, lsr #f', [4, 0xE1A00000], -1705), # > T31.3.1 + override : unrecognizable info after '#'
('mvn r4, r12, ror #-20', [4, 0xE1E00000], -1706), # > T31.3.1 + override : negative number of shifts
('tst r12, #0b1002000', [4, 0xE1100000], -1002), # > T31.3.1 + override : invalid binary digit
('teq r13, #012000900005', [4, 0xE1300000], -1003), # > T31.3.1 + override : invalid octal digit
('cmp r14, #45d', [4, 0xE1500000], -1004), # > T31.3.1 + override : invalid decimal digit
('cmn r15, #0x4X5', [4, 0xE1700000], -1005), # > T31.3.1 + override : invalid hexa digit
('mov r2, #\'\'', [4, 0xE1A00000], -1102), # > T31.3.1 + override : empty char
('mvn r4, #\'\t\'', [4, 0xE1E00000], -1103), # > T31.3.1 + override : illegal character ''
('tst r1, #\' ', [4, 0xE1100000], -1104), # > T31.3.1 + override : unclosed char
('teq r3, #\' 0\'', [4, 0xE1300000], -1105), # > T31.3.1 + override : more than one character
('eorsx', [4, 0xE0200000], -3105), # T31.0.2c > T31.1.2c error: wrong text after 's'
('eorx', [4, 0xE0200000], -3104), # T31.0.2c > T31.1.3 error: wrong text after inst.
('rsb r5, r10, #2', [4, 0xE26A5002], 1000), # T31.0.2b > T31.3.1 success: two regs, one immediate
('add r13, r8, lsl r12', [4, 0xE08DDC18], 1000), # T31.0.2b > T31.3.1 : LSL reg
('adc r14, sp, lsr r0 ', [4, 0xE0AEE03D], 1000), # T31.0.2b > T31.3.1 : LSR reg with trailing space
('sbc r15, r1,asr lr', [4, 0xE0CFFE51], 1000), # T31.0.2b > T31.3.1 : ASR reg no space after ','
('rsc r6, pc, lsr #0x1C ', [4, 0xE0E66E2F], 1000), # T31.0.2b > T31.3.1 : LSR imm with trailing space
('rsc r6, pc, lsr #0x0 ', [4, 0xE0E6600F], 1000), # : LSR #0 -> LSL #0
('orrs r7,r1,asr #0b10101', [4, 0xE1977AC1], 1000), # > T31.1.2b > T31.3.1:ASR bin imm, no space after ','
('orrs r7,r1,asr #0b0', [4, 0xE1977001], 1000), # : ASR #0 -> LSL #0
('bicmi r13, r7, r8, lsl r12 ', [4, 0x41C7DC18], 1000), # > T31.1.1b > T31.3.1 : three regs, shift reg
('andpls r14 , r8 , sp , lsr r10', [4, 0x5018EA3D], 1000), # > T31.1.1c > T31.2.1b > T31.3.1 : cond. + 's'
('eorvss r15,r9,#\'f\'', [4, 0x6239F066], 1000), # > T31.1.1c > T31.2.1b > T31.3.1 : cond.+'s'+ imm.
('subvc r9,#0xC0000034', [4, 0x724991D3], 1000), # T31.0.2c > T31.1.1b > T31.3.1 : one reg + one imm.
('rsbhis r8 , sp , lsr #10', [4, 0x8078852D], 1000), # > T31.1.1c > T31.2.1b > T31.3.1: reg + shifted reg
('addls r9,r1,asr r15', [4, 0x90899F51], 1000), # > T31.1.1b > T31.3.1 : idem with no 's'
('tst r7,r1, #0b10101', [4, 0xE1100000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'tst'
('teq r13,r7,r8,lsl r12', [4, 0xE1300000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'teq'
('cmppl r14,r8,sp,lsr r10', [4, 0x51500000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'cmp'
('cmnvss r15,r9,#\'f\'', [4, 0x61700000], -2310), # T31.0.2b > T31.3.1 + override : 3 ops with 'cmn'
('movvc r1,r9, #0xC000', [4, 0x71A00000], -2311), # T31.0.2b > T31.3.1 + override : 3 ops with 'mov'
('mvnhis r8, lr, sp, lsr pc', [4, 0x81F00000], -2311), # > T31.3.1 + override : 3 os with 'mvn'
('tst r7, #0b10101', [4, 0xE3170015], 1000), # T31.0.2b > T31.3.1 : 'tst' + reg + imm
('teqlss r7,r8,lsl r12', [4, 0x91370C18], 1000), # > T31.1.1c > T31.2.1b > T31.3.1: 'teq'+reg+shifted reg
('cmpge r14, r8', [4, 0xA15E0008], 1000), # > T31.1.1c > T31.3.1 : 'cmp' + reg + reg
('cmnlt r15, #\'f\'', [4, 0xB37F0066], 1000), # > T31.1.1c > T31.3.1 : 'cmn' + reg + char
('movgts r1, #0xC000', [4, 0xC3B01903], 1000), # > T31.1.1c > T31.2.1b > T31.3.1: 'mov' + reg + imm
('mvnle lr, sp, lsr #15', [4, 0xD1E0E7AD], 1000), # > T31.1.1c > T31.3.1 : 'mvn'+reg+shifted reg
('mov r2, #-1', [4, 0xE3E02000], 1000), # T31.0.2b > T31.3.1 : 'mov' + reg + NOT imm
('mvn r3, #0xFFF00FFF', [4, 0xE3A03AFF], 1000), # T31.0.2b > T31.3.1 : 'mvn' + reg + NOT imm
('and r4, #-200', [4, 0xE3C440C7], 1000), # T31.0.2b > T31.3.1 : 'and' + reg + NOT imm
('bic r5, #0xFFC03FFF', [4, 0xE20559FF], 1000), # T31.0.2b > T31.3.1 : 'bic' + reg + NOT imm
('add r6, #-300', [4, 0xE2466F4B], 1000), # T31.0.2b > T31.3.1 : 'add' + reg + NOT imm
('sub r7, #0xFF100000', [4, 0xE287760F], 1000), # T31.0.2b > T31.3.1 : 'mvn' + reg + NOT imm
('cmp r8, #-1000', [4, 0xE3780FFA], 1000), # T31.0.2b > T31.3.1 : 'cmp' + reg + NOT imm
('cmn r9, #0xFFC04000', [4, 0xE35909FF], 1000) # T31.0.2b > T31.3.1 : 'cmn' + reg + NOT imm
]
iml_test = [('', [], -3201), # T32.0.0 error: missing multiplication instr.
(' ', [], -3201), # T32.0.1 > T32.0.0 error: idem with leading space
('2', [], -3203), # T32.0.3 error: unrecognizable instruction
('mul', [], -3202), # T32.0.2a error: missing operands after instr.
('mla ', [4, 0xE0200090], -3202), # T32.0.2b > T32.3.0 error: missing operands after instr.
('umull 2,', [4, 0xE0800090], -1302), # T32.0.2b > T32.3.1b + override : unrecognizable operand with ','
('smull r', [4, 0xE0C00090], -1303), # T32.0.2b > T32.3.1b + override : missing register number
('umlal r16', [4, 0xE0A00090], -1304), # T32.0.2b > T32.3.1b + override : too high reg number
('smlal r12', [4, 0xE0E00090], -3202), # T32.0.2b > T32.3.1a error: good dest reg, missing other ops
('mul ', [4, 0xE0000090], -1301), # T32.0.2b > T32.3.1a + override : missing reg1
('mla r1,', [4, 0xE0210090], -3202), # T32.0.2b > T32.3.1b > T32.4.0 : missing source operands
('umull r2, ', [4, 0xE0802090], -1301), # > T32.4.1b + override : missing reg2
('smull r3, gu', [4, 0xE0C03090], -1302), # > T32.4.1b + override : wrong op 2
('umlal r12, r3, e3', [4, 0xE0A3C090], -1302), # > T32.5.1b + override : wrong op 3
('smlal r3, r4, r5, ', [4, 0xE0E43095], -1301), # > T32.6.1 + override : missing reg4
('mul r3, r4, r5, r6', [4, 0xE0030594], -3207), # > T32.6.1 + override : four regs with 'mul'
('mla r3, r4, r5', [4, 0xE0230594], -3202), # > T32.6.1 + override : three regs with 'mla'
('mul r3, r4, r5', [4, 0xE0030594], 1000), # > T32.5.1a success: three regs with 'mul'
('mla r3, r4, r5, r6', [4, 0xE0236594], 1000), # > T32.6.1 success: four regs with 'mla'
('umull r10, r11, r12, r13', [4, 0xE08BAD9C], 1000), # > T32.6.1 : four regs with 'umull'
('umlal r1, r11, r2, r3', [4, 0xE0AB1392], 1000), # > T32.6.1 : four regs with 'umlal'
('smull r10, r11, lr, r10', [4, 0xE0CBAA9E], 1000), # > T32.6.1 : four regs with 'smull'
('smlal sp, lr, r0, r7', [4, 0xE0EED790], 1000), # > T32.6.1 : four regs with 'smlal'
('mul pc, r0, r7', [4, 0xE0000090], -3208), # > T32.5.1a + override : use of PC as Rd
('mul r0, pc, r8', [4, 0xE0000090], -3208), # > T32.5.1a + override : use of PC as Rm
('mla r0, r7, pc', [4, 0xE0200097], -3208), # > T32.5.1a + override : use of PC as Rs
('umlal r10, pc, r6, r9', [4, 0xE0A0A090], -3208), # + override : use of PC as RdHi
('smlal pc, r9, r8, r7', [4, 0xE0E00090], -3208), # + override : use of PC as RdLo
('mul r3, r3, r5', [4, 0xE0030593], 1000), # + warning : Rd should be different from Rm
('mla r5, r5, r5, r1', [4, 0xE0251595], 1000), # + warning : Rd should be different from Rm
('mla r3, r4, r3, r4', [4, 0xE0234394], 1000), # success : should work
('mla r3, r4, r3, r3', [4, 0xE0233394], 1000), # success : should work
('umull r6, r7, r7, r6', [4, 0xE0876697], 1000), # + warning : RdHi, RdLo and Rm must all be dif
('smull r9, r10, r9,r9', [4, 0xE0CA9999], 1000), # + warning : RdHi, RdLo and Rm must all be dif
('umlal r6, r6, r7, r6', [4, 0xE0A66697], 1000), # + warning : RdHi and RdLo must be different
('smlal r8, r9, r10,r8', [4, 0xE0E9889A], 1000), # success : should work
('muleq', [4, 0xE0000090], -3202), # T32.0.2c > T32.1.1a error : cond & missing ops
('muls', [4, 0xE0000090], -3202), # T32.0.2c > T32.1.2a error : 's'' & missing ops
('mulz', [4, 0xE0000090], -3204), # T32.0.2c > T32.1.3 error : wrong text after
('muleqs', [4, 0x00000090], -3202), # > T32.1.1c > T32.2.1a error : missing ops
('muleqsz', [4, 0x00000090], -3205), # > T32.1.2b > T32.2.1c error : missing ops
('smull r3, r4', [4, 0xE0C03090], -3202), # > T32.4.1a error : missing ops
('smull r3, r4,', [4, 0xE0C43090], -3202), # > T32.5.0 error : missing ops
('smull r3, r4, r5', [4, 0xE0C43095], -3202), # > T32.5.1a error : missing ops
('smull r3, r4, r5,', [4, 0xE0C43095], -3202), # > T32.6.0 error : missing ops
('muleq r3, r4, r5', [4, 0x00030594], 1000), # T32.0.2c > T32.1.1b > success : 'mul' + cond
('mlanes r3, r4, r5, r6', [4, 0x10336594], 1000), # > T32.1.1c > T32.2.1b > : 'mla' + cond + 's'
('umulls r10, r11, r12, r13', [4, 0xE09BAD9C], 1000), # T32.0.2c > T32.1.2b > : 'umull' + 's'
('umlalle r1, r11, r2, r3', [4, 0xD0AB1392], 1000), # T32.0.2c > T32.1.1b > : 'umlal' + cond
('smulllex r10, r11, lr, r10', [4, 0xD0C00090], -3205), # T32.0.2c > T32.1.1c > T32.2.2 : error after cond
('smlalsy sp, lr, r0, r7', [4, 0xE0E00090], -3205) # T32.0.2c > T32.1.2c : error after 's'
]
ibr_test = [('', [], -3301), # T33.0.0 error: missing branch instr.
(' ', [], -3301), # T33.0.1 > T33.0.0 error: idem with leading space
('2', [], -3303), # T33.0.5 error: unrecognizable instruction
('blo', [], -3302), # T33.0.2a error: missing offset after instr.
('bleq ', [4, 0x0B000000], -3302), # T33.0.2b > T33.3.0 : missing offset after instr.
('blox', [4], -3303), # T33.0.2c error: unexpected text after instr.
('bx', [], -3304), # T33.0.3a error: missing reg after instr.
('blx ', [4, 0xE12FFF30], -3304), # T33.0.3b > T33.4.0 error: missing reg after instr.
('blxo', [4, 0xE12FFF30], -3303), # T33.0.3c > T33.2.2 error: unexpected text after instr.
('b', [], -3302), # T33.0.4a error: missing offset after instr.
('bl ', [4, 0xEB000000], -3302), # T33.0.4b > T33.3.0 error: missing offset after instr.
('bly', [4, 0xEB000000], -3303), # T33.0.4c > T33.1.2 error: unexpected text after instr.
('beq', [4, 0xEA000000], -3302), # T33.0.4c > T33.1.1a error: missing offset after instr.
('blne ', [4, 0x1B000000], -3302), # T33.0.4c > T33.1.1b > T 33.3.0 : missing offset after instr.
('blnex', [4, 0x1B000000], -3303), # T33.0.4c > T33.1.1c : unexpected text after instr.
('bxeq', [4, 0xE12FFF10], -3302), # T33.0.3c > T33.2.1a error: missing offset after instr.
('blxeq ', [4, 0x012FFF30], -3304), # T33.0.3c > T33.2.1b > T 33.4.0 : missing offset after instr.
('blxeqx', [4, 0x012FFF30], -3303), # T33.0.3c > T33.2.1c : unexpected text after instr.
('blt f', [4, 0xBA000000], -3305), # T33.0.2b > T33.3.2 error: wrong offset
('bls 0b12', [4, 0x9A000000], -1002), # T33.0.2b > T33.3.1 + override : unexpected binary digit
('blls 0192', [4, 0x9B000000], -1003), # > T33.1.1b > T33.3.1 + override: unexpected octal digit
('bllo -192a', [4, 0x3B000000], -1004), # > T33.1.1b > T33.3.1 + override: unexpected decimal digit
('blvc 0xA3G0', [4, 0x7B000000], -1005), # > T33.1.1b > T33.3.1 + override: unexpected hexa digit
('bvc 0xA30000000', [4, 0x7A000000], -1006), # > T33.3.1 + override: too long hex address
('bxvc 0xA300', [4, 0x712FFF10], -1302), # > T33.2.1b > T33.4.1 + override: unrecognized reg
('blxcc r', [4, 0x312FFF30], -1303), # > T33.2.1b > T33.4.1 + override: missing reg number
('bxcc rf', [4, 0x312FFF10], -1304), # > T33.2.1b > T33.4.1 + override: wrong reg number
('bxmi r16', [4, 0x412FFF10], -1304), # > T33.2.1b > T33.4.1 + override: wrong reg number
('bx r6', [4, 0xE12FFF16], 1000), # T33.0.3b > T33.4.1 success: 'bx' jump
('blxpl r6', [4, 0x512FFF36], 1000), # > T33.2.1b > T33.4.1 success: 'blx' jump
('blxlt r15', [4, 0xB12FFF3F], 1000), # > T33.2.1b > T33.4.1 warning: use of pc (r15)
('b 0xA300', [4, 0xEA0028C0], 1000), # T33.0.4b > T33.3.1 success: 'b' jump
('bl -1300', [4, 0xEBFFFEBB], 1000), # T33.0.4b > T33.3.1 success: 'bl' negative jump
('blt 073000000', [4, 0xBA3B0000], 1000), # > T33.3.1 success: 'blt' octal jump
('bleq 0x730000', [4, 0x0B1CC000], 1000), # > T33.3.1 success: 'bleq' hexa jump
('bhi 0xA30000', [4, 0x8A28C000], 1000), # > T33.3.1 success: 'b' jump
('blgt 0x1302', [4, 0xCB000000], -3307), # > T33.3.1 + override : misaligned address
('bllt 0x73000000', [4, 0xBB000000], -3308), # > T33.3.1 + override : out of range offset
('blal -73000000', [4, 0xEB000000], -3308), # > T33.3.1 + override : out of range negative offset
('bal -7300001', [4, 0xEA000000], -3307) # > T33.3.1 + override : misaligned negative address
]
am2_test = [('', [], -2401), # T24.0.0 error: missing addressing mode
(' ', [], -2401), # T24.0.1 > T24.0.0 error: idem with leading space
('2', [], -2402), # T24.0.3 error: missing '['
('[', [], -2403), # T24.0.2 > T24.1.0 error: missing info after '['
('[2', [], -2403), # T24.0.2 > T24.1.2 : unrecognizable register
('[r', [], -1303), # T24.0.2 > T24.1.1a + override : missing register number
('[ra', [], -1304), # T24.0.2 > T24.1.1a + override : wrong reg number
('[r16', [], -1304), # T24.0.2 > T24.1.1a + override : too high reg number
('[r12', [], -2404), # T24.0.2 > T24.1.1a error: good base reg, missing closure
('[r0 ', [], -2404), # T24.0.2 > T24.1.1a error: missing ',' after base reg
('[r1,', [0x01810000], -2405), # T24.0.2 > T24.1.1b > T24.2.0 : missing displacement
('[r2]!', [0x01820000], -2410), # T24.0.2 > T24.1.1c > T24.7.2 : unexpected text after ']'
('[r3, 3', [0x01830000], -2406), # > T24.1.1b > T24.2.1 > T24.2.6 : wrong displacement
('[r4, ra', [0x01840000], -1304), # > T24.2.1 > T24.2.5a + override: wrong reg number
('[r5, r1a', [0x01850000], -1304), # > T24.2.1 > T24.2.5a + override: wrong reg number
('[r6, +r1', [0x01860000], -2404), # > T24.2.1 > T24.2.2 > T24.3.1a : check positive reg displ.
('[r7, -r6', [0x01070000], -2404), # > T24.2.1 > T24.2.3 > T24.3.1a : check negative reg displ.
('[r8, -', [0x01080000], -2405), # > T24.2.3 > T24.3.0 : EOSeq after '-'
('[r8, -3.2', [0x01080000], -2406), # > T24.2.3 > T24.3.2 : wrong reg after '-'
('[r5, r10, ', [0x0385000A], -2407), # > T24.2.5b > T24.5.1 > T24.5.0 : missing shift mode
('[r7, r2, lsl', [0x03870002], -2408), # > T24.2.5b > T24.5.1 > T24.5.2a: missing space after shift
('[r8, r3, lsr ', [0x03880003], -2408), # > T24.5.2b > T24.6.0 : missing info after shift mode
('[r10, r5, ror r', [0x038A0005], -1702), # > T24.5.2b > T24.6.2 : idem
('[r1, r9, lsl # ', [0x03810009], -1704), # > T24.5.2b > T24.6.1a + override : unexpected space after '#'
('[r3, r11, asr #2', [0x0383000B], -2404), # > T24.5.2b > T24.6.1a : valid scaled reg, missing ']'
('[r8, #', [0x01880000], -2405), # > T24.2.1 > T24.2.4 > T24.4.0 : missing displacement
('[r4, # ', [0x01840000], -2406), # > T24.2.1 > T24.2.4 > T24.4.2 : unexpected space after '#'
('[r5, #\'f\'', [0x01850000], -2406), # > T24.2.1 > T24.2.4 > T24.4.2 : unrecognizable info after '#'
('[r6, #20', [0x01860000], -2404), # > T24.2.1 > T24.2.4 > T24.4.1a : base + imm. displ., missing ']'
('[r8, #-20', [0x01880000], -2404), # > T24.2.1 > T24.2.4 > T24.4.1a : idem for negative imm. displ.
('[r9,#0xC0000034]', [0x1890000], -2411), # > T24.4.1b + override : too long immediate displacement
('[r12, #0b1002000]', [0x018C0000], -1002), # + override : invalid binary digit
('[r13, #012000900005]', [0x018D0000], -1003), # + override : invalid octal digit
('[r14, #45d]', [0x018E0000], -1004), # + override : invalid decimal digit
('[r15, #0x4X5]', [0x018F0000], -1005), # + override : invalid hexa digit
('[ r6, #+0]', [0x01860000], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : success base + imm. displ.
('[r6, #20]', [0x01860014], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : success base + imm. displ.
('[r7, #+4095]', [0x01870FFF], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : maximum positive imm. displ.
('[r8, #-20]', [0x01080014], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : base + negative imm. displ.
('[r9, #-4095]', [0x01090FFF], 1000), # > T24.2.4 > T24.4.1b > T24.7.0 : minimum negative imm. displ.
('[r10]', [0x018A0000], 1000), # T24.0.2 > T24.1.1c > T24.7.0 : success base only
('[sp ]', [0x018D0000], 1000), # T24.0.2 > T24.1.1c > T24.7.0 : idem with trailing space
('[r9,r1]', [0x03890001], 1000), # > T24.1.1b > T24.2.5c > T24.7.0: success base + reg. displacement
('[ sp , lr ]', [0x038D000E], 1000), # > T24.1.1b > T24.2.5c > T24.7.0: idem with extra spaces
('[r1, +r6]', [0x03810006], 1000), # > T24.2.2 > T24.3.1c > T24.7.0 : check positive reg displ.
('[r6, -r7]', [0x03060007], 1000), # > T24.2.3 > T24.3.1c > T24.7.0 : check negative reg displ.
('[r5, r15]', [0x01850000], -2412), # > T24.2.5b + override : PC not allowed as Rm
('[r5, r10, ]', [0x0385000A], -2409), # > T24.2.5b > T24.5.1 > T24.5.3 : missing shift mode
('[r5, r10, lslx]', [0x0385000A], -2409), # > T24.2.5b > T24.5.1 > T24.5.3 : wrong shift mode
('[r7, +r2, lsl]', [0x03870002], -2409), # > T24.3.1b > T24.5.1 > T24.5.2c : missing space after shift
('[r8, -r3, lsr ]', [0x03080003], -2409), # > T24.3.1b > T24.6.2 : missing info after shift mode
('[r9, r4, asr x]', [0x03890004], -1702), # > T24.5.2b > T24.6.2 : wrong info after shift mode
('[r0, r8, ror #]', [0x03800008], -1703), # > T24.5.2b > T24.6.1a + override : missing value after '#'
('[r2, r10, lsr #f]', [0x0382000A], -1705), # > T24.5.2b > T24.6.1a + override : unrecogn. info after '#'
('[r4, r12, ror #-20]', [0x0384000C], -1706), # > T24.6.1b + override : negative number of shifts
('[r5, r13, lsl #040]', [0x0385000D], -1706), # > T24.6.1b + override : too high number of shifts
('[r5, r13, lsl #0]', [0x0385000D], 1000), # > T24.6.1b > T24.7.0 : true LSL #0
('[r6, lr, lsr #0x1C] ', [0x03860E2E], 1000), # > T24.6.1b > T24.7.1> T24.7.0: success with trailing space
('[r5, r13, lsl #00]', [0x0385000D], 1000), # > T24.6.1b > T24.7.0 : true LSL #0
('[r6, sp, lsr #0x0 ]', [0x0386000D], 1000), # > T24.6.1b > T24.7.0 : converting LSR #0 into LSL #0
('[r7,-r1,asr #0b10101]', [0x03070AC1], 1000), # : ASR bin imm, no space after ','
('[r7,+r1,asr #0b0]', [0x03870001], 1000), # : converting ASR #0 into LSL #0
('[r9, r12, ror #0x1F]', [0x03890FEC], 1000), # : success ROR with 31 shifts
('[r9, r12, ror #0x0]', [0x0389006C], 1000) # : coding ROR #0 as RRX
]
am3_test = [('', [], -2501), # T25.0.0 error: missing addressing mode
(' ', [], -2501), # T25.0.1 > T25.0.0 error: idem with leading space
('2', [], -2502), # T25.0.3 error: missing '['
('[', [], -2503), # T25.0.2 > T25.1.0 error: missing info after '['
('[2', [], -2503), # T25.0.2 > T25.1.2 : unrecognizable register
('[r', [], -1303), # T25.0.2 > T25.1.1a + override : missing register number
('[ra', [], -1304), # T25.0.2 > T25.1.1a + override : wrong reg number
('[r16', [], -1304), # T25.0.2 > T25.1.1a + override : too high reg number
('[r12', [], -2504), # T25.0.2 > T25.1.1a error: good base reg, missing closure
('[r0+', [], -1304), # T25.0.2 > T25.1.1a + override : missing ',' after base reg
('[r1,', [0x01C10000], -2505), # T25.0.2 > T25.1.1b > T25.2.0 : missing displacement
('[r2]!', [0x01C20000], -2510), # T25.0.2 > T25.1.1c > T25.7.2 : unexpected text after ']'
('[r3, 3', [0x01C30000], -2506), # > T25.1.1b > T25.2.1 > T25.2.6 : wrong displacement
('[r4, ra', [0x01C40000], -1304), # > T25.2.1 > T25.2.5a + override: wrong reg number
('[r5, r1a', [0x01C50000], -1304), # > T25.2.1 > T25.2.5a + override: wrong reg number
('[r6, +r1', [0x01C60000], -2504), # > T25.2.1 > T25.2.2 > T25.3.1a : check positive reg displ.
('[r7, -r6', [0x01470000], -2504), # > T25.2.1 > T25.2.3 > T25.3.1a : check negative reg displ.
('[r8, -', [0x01480000], -2505), # > T25.2.3 > T25.3.0 : EOSeq after '-'
('[r8, -3.2', [0x01480000], -2506), # > T25.2.3 > T25.3.2 : wrong reg after '-'
('[r5, r10, ', [0x01C50000], -2513), # > T25.2.5b : scaled reg. displ. not allowed
('[r7, r2, lsl', [0x01C70000], -2513), # > T24.2.5b : idem
('[r8, #', [0x01C80000], -2505), # > T25.2.1 > T25.2.4 > T25.4.0 : missing displacement
('[r4, # ', [0x01C40000], -2506), # > T25.2.1 > T25.2.4 > T25.4.2 : unexpected space after '#'
('[r5, #\'f\'', [0x01C50000], -2506), # > T25.2.1 > T25.2.4 > T25.4.2 : unrecognizable info after '#'
('[r6, #20', [0x01C60000], -2504), # > T25.2.1 > T25.2.4 > T25.4.1a : base + imm. displ., missing ']'
('[r9, #0x134]', [0x1C90000], -2511), # > T25.4.1b + override : too long immediate displacement
('[r12, #0b0001103]', [0x01CC0000], -1002), # + override : invalid binary digit
('[r13, #012009005]', [0x01CD0000], -1003), # + override : invalid octal digit
('[r14, #4+5]', [0x01CE0000], -1004), # + override : invalid decimal digit
('[r15, #0xX45]', [0x01CF0000], -1005), # + override : invalid hexa digit
('[ r6, #+0]', [0x01C60000], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : success base + imm. displ.
('[r6 ,#195]', [0x01C60C03], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : success base + imm. displ.
(' [r7, #+255]', [0x01C70F0F], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : maximum positive imm. displ.
('[r8, # -80]', [0x01480500], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : base + negative imm. displ.
('[r9, #-255 ]', [0x01490F0F], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : minimum negative imm. displ.
('[r9,# - 25]', [0x01490109], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : negative with white spaces
('[r9, # + 25]', [0x01C90109], 1000), # > T25.2.4 > T25.4.1b > T25.7.0 : positive with white spaces
('[r10]', [0x01CA0000], 1000), # T25.0.2 > T25.1.1c > T25.7.0 : success base only
('[sp ]', [0x01CD0000], 1000), # T25.0.2 > T25.1.1c > T25.7.0 : idem with trailing space
('[r9,r1]', [0x01890001], 1000), # > T25.1.1b > T25.2.5c > T25.7.0: success base + reg. displacement
('[ sp , lr ]', [0x018D000E], 1000), # > T25.1.1b > T25.2.5c > T25.7.0: idem with extra spaces
('[r1, +r6]', [0x01810006], 1000), # > T25.2.2 > T25.3.1c > T25.7.0 : check positive reg displ.
('[r1, + r6]', [0x01810006], 1000), # > T25.2.2 > T25.3.1c > T25.7.0 : idem with white space
('[r6, -r7]', [0x01060007], 1000), # > T25.2.3 > T25.3.1c > T25.7.0 : check negative reg displ.
('[r6,- r7] ', [0x01060007], 1000), # > T25.3.1c > T25.7.1 > T25.7.0 : idem with white space
('[r5, r15]', [0x01C50000], -2512), # > T25.2.5b + override : PC not allowed as Rm
('[r5, r10+]', [0x01C50000], -1304), # > T25.2.5b + override : wrong text after reg. number
('[r5, +r10,]', [0x01C50000], -2513) # > T25.2.2 > T25.3.1b : scaled reg. displ. not allowed
]
im2_test = [('', [], -3401), # T34.0.0 error: missing memory transfer inst.
(' ', [], -3401), # T34.0.1 > T34.0.0 error: idem with leading space
('2', [], -3402), # T34.0.3 error: missing 'ld' or 'st'
('ld', [4, 0xE0000000], -3402), # T34.0.2 > T34.1.0 error: missing inst. continuation
('st ', [4, 0xE0000000], -3402), # T34.0.2 > T34.1.4 error: missing inst. continuation
('str', [4, 0xE0000000], -3403), # T34.0.2 > T34.1.1 > T34.2.0 : missing space after inst.
('ldr ', [4, 0xE4100000], -3405), # > T34.1.1 > T34.2.1 > T34.5.0 : missing destination register
('sts', [4, 0xE0000000], -3408), # T34.0.2 > T34.1.2 + override : 's' not allowed for store inst.
('ldx', [4, 0xE0000000], -3402), # T34.0.2 > T34.1.4 : unrecognized mem. transfer inst.
('ldrb', [4, 0xE0000000], -3403), # > T34.1.1 > T34.2.2 > T34.3.0 : missing space after inst.
('strb ', [4, 0xE4400000], -3405), # > T34.2.2 > T34.3.1 > T34.5.0 : missing destination register
('ldrby', [4, 0xE0000000], -3404), # > T34.2.2 > T34.3.2 : wrong text after inst.
('ldrb e', [4, 0xE4500000], -1302), # > T34.3.1 > T34.5.1a + override: unknown reg
('str r', [4, 0xE4000000], -1303), # > T34.2.1 > T34.5.1a + override: missing reg number
('ldr rb', [4, 0xE4100000], -1304), # > T34.2.1 > T34.5.1a + override: wrong reg number
('ldrb r1', [4, 0xE4500000], -3406), # > T34.2.1 > T34.5.1a error: missing ',' after dest. reg
('strb r2,', [4, 0xE4402000], -3407), # > T34.5.1b > T34.6.0 error: missing info after dest. reg
('streq', [4, 0x00000000], -3403), # > T34.2.3 > T34.4.0 : missing space after inst.
('ldrne ', [4, 0x14100000], -3405), # > T34.2.3 > T34.4.1 > T34.5.0 : missing destination register
('strles', [4, 0xD0000000], -3408), # > T34.2.3 > T34.4.4 + override : 's' not allowed for store inst.
('ldrlox', [4, 0x30000000], -3404), # > T34.2.3 > T34.4.5 : unrecognized mem. transfer inst.
('ldrmib', [4, 0x40000000], -3403), # > T34.2.3 > T34.4.2 > T34.3.0 : missing space after inst.
('strmib ', [4, 0x44400000], -3405), # > T34.4.2 > T34.3.1 > T34.5.0 : missing destination register
('ldrhsbx', [4, 0x20000000], -3404), # > T34.4.2 > T34.3.2 : wrong text after inst.
('ldrhsb r2, 2', [4, 0x24502000], -2402), # > T34.6.1 > T34.6.3 + override : missing '['
('strvcb r3, [', [4, 0x74403000], -2403), # > T34.6.3 + override : missing info after '['
('ldrge r4, [2', [4, 0xA4104000], -2403), # > T34.6.3 + override : unrecognizable register
('strltb r5,[r', [4, 0xB4405000], -1303), # > T34.6.3 + override : missing register number
('ldrvc r6, [r16', [4, 0x74106000], -1304), # + override : too high reg number
('ldr lr, [r12', [4, 0xE410E000], -2404), # + override : good base reg, missing closure
('str sp, [r0 ', [4, 0xE400D000], -2404), # + override : missing ',' after base reg
('ldrb r15, [r1,', [4, 0xE450F000], -2405), # + override : missing displacement
('strb pc, [r2]!', [4, 0xE440F000], -2410), # + override : unexpected text after ']'
('ldrvsb r4,[r3, 3', [4, 0x64504000], -2406), # + override : wrong displacement
('strge r5, [r5, r1a', [4, 0xA4005000], -1304), # + override : wrong reg number
('ldrltb r6, [r5, r10, ', [4, 0xB4506000], -2407), # + override : missing shift mode
('strlsb r7, [r7, r2, lsl', [4, 0x94407000], -2408), # + override : missing space after shift
('strgt r9, [r8, r3, lsr ', [4, 0xC4009000], -2408), # + override : missing info after shift mode
('ldr r11, [r10, r5, ror r', [4, 0xE410B000], -1702), # + override : idem
('ldrb r12, [r1, r9, lsl # ', [4, 0xE450C000], -1704), # + override : unexpected space after '#'
('strb r13,[r9,#0xC0000034]', [4, 0xE440D000], -2411), # + override : too long immediate displacement
('ldr r0, [r12, #0b1002000]', [4, 0xE4100000], -1002), # + override : invalid binary digit
('strhi r1, [r13, #018000005]', [4, 0x84001000], -1003), # + override : invalid octal digit
('strlob r2, [r14, #5d4]', [4, 0x34402000], -1004), # + override : invalid decimal digit
('ldrplb r3, [r15, #0x4r]', [4, 0x54503000], -1005), # + override : invalid hexa digit
('ldrb r3, [r15, #0x400000000]', [4, 0xE4503000], -1006), # + override : too big number
('ldrcsb r4, [ r6, #+0]', [4, 0x25D64000], 1000), # > T34.6.3 : success base + imm. displ.
('ldr r5, [r6, #20]', [4, 0xE5965014], 1000), # : success base + imm. displ.
('str r6,[r7, #+4095]', [4, 0xE5876FFF], 1000), # : maximum positive imm. displ.
('ldreqb r7, [r8, #-20]', [4, 0x05587014], 1000), # : base + negative imm. displ.
('strccb r8, [r9, #-4095] ', [4, 0x35498FFF], 1000), # : minimum negative imm. displ.
('ldr r9, [r10]', [4, 0xE59A9000], 1000), # : success base only
('str r10,[r9,+r1]', [4, 0xE789A001], 1000), # : success base + reg. displacement
('str r10, [r5, r15]', [4, 0xE400A000], -2412), # + override : PC not allowed as Rm
('strb r11, [r0, r8, ror #]', [4, 0xE440B000], -1703), # + override : missing value after '#'
('ldrle r12, [r2, r10, lsr #f]', [4, 0xD410C000], -1705), # + override : unrecogn. info after '#'
('strmib r13, [r4, r12, ror #-20]', [4, 0x4440D000], -1706), # override : negative number of shifts
('ldrplb r14, [r5, r13, lsl #040]', [4, 0x5450E000], -1706), # override : too high number of shifts
('ldrvs r15,[r6, lr, lsr #0x1C] ', [4, 0x6796FE2E], 1000), # : success with trailing space
('str r0, [r5, r13, lsl #00]', [4, 0xE785000D], 1000), # : true LSL #0
('ldr r1, [r6, sp, lsr #0x0 ]', [4, 0xE796100D], 1000), # : converting LSR #0 into LSL #0
('str r2, [r7,-r1,asr #0b10101]', [4, 0xE7072AC1], 1000), # : ASR bin imm, no space after ','
('ldr r3 ,[r7,+r1,asr #0b0]', [4, 0xE7973001], 1000), # : converting ASR #0 into LSL #0
('ldrb r4,[r9, r12, ror #0x1F]', [4, 0xE7D94FEC], 1000), # : success ROR with 31 shifts
('strb r5, [r9, r12, ror #0x0]', [4, 0xE7C9506C], 1000) # : coding ROR #0 as RRX
]
im3_test = [('lds', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.2 > T34.8.0 error: wrong memory transfer inst.
('strz', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.1 > T34.2.6 error: wrong memory transfer inst.
('strs', [4, 0xE0000000], -3408), # > T34.1.1 > T34.2.5 + override : 's' not allowed for store inst.
('strh', [4, 0xE00000B0], -3403), # > T34.1.1 > T34.2.4 > T34.9.0 error: missing space after inst.
('ldrs', [4, 0xE0000000], -3404), # > T34.1.1 > T34.2.5 > T34.10.0 : wrong memory transfer inst.
('ldrh ', [4, 0xE01000B0], -3405), # > T34.2.4 > T34.9.1 > T34.11.0 : missing destination reg
('ldrsb', [4, 0xE01000D0], -3403), # > T34.2.5 > T34.10.1 > T34.9.0 : missing space after inst.
('ldrsh', [4, 0xE01000F0], -3403), # > T34.2.5 > T34.10.1 > T34.9.0 : missing space after inst.
('ldrsi', [4, 0xE0000000], -3404), # > T34.2.5 > T34.10.2 : missing space after inst.
('ldrsb ', [4, 0xE01000D0], -3405), # > T34.10.1 > T34.9.1 > T34.11.0: missing destination reg
('ldrsb e', [4, 0xE01000D0], -1302), # > T34.11.1a + override : wrong text after inst.
('ldrsbt', [4, 0xE01000D0], -3404), # > T34.10.1 > T34.9.2 : wrong memory transfer inst.
('ldsb', [4, 0xE01000D0], -3403), # > T34.8.2 > T34.9.0 : missing space after inst.
('ldsh ', [4, 0xE01000F0], -3405), # > T34.8.2 > T34.9.1 > T34.11.0 : missing destination reg
('ldsu ', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.2 > T34.8.3 : wrong memory transfer inst.
('strneh', [4, 0x100000B0], -3403), # > T34.2.3 > T34.4.3 > T34.9.0 : missing space after inst.
('ldscc', [4, 0x30000000], -3404), # > T34.1.2 > T34.8.1 > T34.10.0 : wrong memory transfer inst.
('ldreqs', [4, 0x00000000], -3404), # > T34.2.3 > T34.4.4 > T34.10.0 : wrong memory transfer inst.
('ldrlssb', [4, 0x901000D0], -3403), # > T34.4.4 > T34.10.1 > T34.9.0 : missing space after inst.
('ldshsb r2', [4, 0x201000D0], -3406), # > T34.9.1 > T34.11.1a error: missing ',' after destination reg
('ldrhsh r2,', [4, 0x201020B0], -3407), # > T34.11.1b > T34.12.0 : missing info after dest. reg
('strleh r10, r12', [4, 0xD000A0B0], -2502), # T34.11.1b > T34.12.1 + override : missing '['
('strlsh r10, [12', [4, 0x9000A0B0], -2503), # T34.11.1b > T34.12.1 + override : missing reg after '['
('strloh r8, [r12', [4, 0x300080B0], -2504), # T34.11.1b > T34.12.1 + override : missing closure
('streqh r9, [r1,', [4, 0x000090B0], -2505), # T34.11.1b > T34.12.1 + override : missing displacement
('ldsccb r1,[r2]!', [4, 0x301010D0], -2510), # T34.11.1b > T34.12.1 + override: unexpected text after ']'
('strh r2, [r3, 3', [4, 0xE00020B0], -2506), # + override : wrong displacement
('ldsvch r4, [r5, r1a', [4, 0x701040F0], -1304), # + override : wrong reg number
('ldrvssb r5, [r7, -r6', [4, 0x601050D0], -2504), # + override : check negative reg displ.
('strplh r9, [r5, r10, ', [4, 0x500090B0], -2513), # + override : scaled reg. displ. not allowed
('ldsmib r10, [r9, #0x134]', [4, 0x4010A0D0], -2511), # + override : too long immediate displacement
('ldrgtsb r11 , [ r6, #+0]', [4, 0xC1D6B0D0], 1000), # > T34.11.1b > T34.12.1 success: base + imm. displ.
('strh r12, [r6 ,#195]', [4, 0xE1C6CCB3], 1000), # : base + imm. displ.
('ldrlsh r3, [r10, #-180]', [4, 0x915A3BB4], 1000), # : base + negative imm. displ.
('ldsgeh r13, [r8, # -80]', [4, 0xA158D5F0], 1000), # : base + negative imm. displ.
('ldshsb r14,[r9, #-255 ]', [4, 0x2159EFDF], 1000), # : minimum negative imm. displ.
('strhih pc, [r10]', [4, 0x81CAF0B0], 1000), # : success base only
(' ldrgtsh lr, [ pc ]', [4, 0xC1DFE0F0], 1000), # : idem with trailing space
('ldsvsb r10,[r9,r1]', [4, 0x6199A0D1], 1000), # : success base + reg. displacement
('ldrlssh r0, [ sp , lr ]', [4, 0x919D00FE], 1000), # : idem with extra spaces
('strleh r1, [r6, -r7]', [4, 0xD10610B7], 1000), # : check negative reg displ.
('ldsb r9, [r5, r15]', [4, 0xE01090D0], -2512) # + override : PC not allowed as Rm
]
imm_test = [('ldm', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.3 > T34.13.0 error: wrong memory transfer inst.
('stmz', [4, 0xE0000000], -3404), # T34.0.2 > T34.1.3 > T34.13.3 error: wrong memory transfer inst.
('ldmia', [4, 0xE8900000], -3403), # > T34.13.2 > T34.15.0 : missing space after inst.
('stmdb ', [4, 0xE9000000], -3405), # > T34.15.1 > T34.16.0 : missing destination reg
('ldmibe', [4, 0xE9900000], -3404), # > T34.13.2 > T34.15.2 : wrong memory transfer inst.
('ldmib e', [4, 0xE9900000], -1302), # > T34.16.1a + override : wrong register
('stmne', [4, 0x10000000], -3404), # > T34.13.1 > T34.14.0 : wrong memory transfer inst.
('ldmccda', [4, 0x38100000], -3403), # > T34.14.1 > T34.15.0 : missing space after inst.
('ldmccde', [4, 0x30000000], -3404), # > T34.14.2 error: missing space after inst.
('ldmeqia r', [4, 0x08900000], -1303), # > T34.16.1a + override : missing reg number
('ldmhsfd r2', [4, 0x28900000], -3406), # > T34.16.1a error: missing ',' after destination reg
('ldmhsfa r2,', [4, 0x28120000], -3407), # > T34.16.1b > T34.18.0 : missing info after dest. reg
('stmhiea r2!', [4, 0x89020000], -3406), # > T34.16.1c > T34.17.0 : missing ',' after destination reg
('stmhiea r2!,', [4, 0x89220000], -3407), # > T34.17.2 > T34.18.0 : missing info after dest. reg
('stmea r2!d', [4, 0xE9020000], -3404), # > T34.17.3 error: wrong text after '!'
('stmccib r3,1', [4, 0x39830000], -1502), # > T34.18.1 + override : missing '{'
('ldmmied r4!, {', [4, 0x49B40000], -1503), # + override : missing registers
('ldmplia r5, {1', [4, 0x58950000], -1302), # + override : unknown register identifier
('stmneda r6! , {r', [4, 0x18260000], -1303), # > T34.17.1 + override : missing register number
('stmia r7,{ra', [4, 0xE8870000], -1304), # + override : wrong reg number
('ldmfd r8, {r0', [4, 0xE8980000], -1503), # + override : unclosed single register
('stmed r9, {r14,}', [4, 0xE9890000], -1504), # + override : missing register after ','
('ldmfd r13!, {r4-}', [4, 0xE8BD0000], -1403), # + override : missing second reg in range list
('ldmfd r13!, {r14, }', [4, 0xE8BD0000], -1504), # + override : missing register after ', '
('ldmeqda r10!, {r0}', [4, 0x083A0001], 1000), # > T34.18.1 success: single register
('ldmalib r11 , {r0-r5}', [4, 0xE99B003F], 1000), # : single range
('stmccdb r12!, {pc, r1-r2, sp-r12, r5}', [4, 0x392CB026], 1000), # : several ranges, with spaces
('stmea r13!, {r14,r8}', [4, 0xE92D4100], 1000), # : no space after ','
('ldmfd r13!, { r9 , r13 }', [4, 0xE8BD2200], 1000) # : extra spaces
]
iil_test = [('str r0, =', [4, 0xE4000000], -3409), # > T34.6.2 + override : 'str' cannot use '=' loading
('ldrb r0,=', [4, 0xE4500000], -3409), # > T34.6.2 + override : neither 'ldrb'
('ldrh r0,=', [4, 0xE01000B0], -2502), # > T34.12.1 + override error: nor 'ldrh'
('ldr r0, =', [4, 0xE4100000], -3410), # > T34.6.2 > T34.7.0 error: missing number for immediate load
('ldr r0, = ', [4, 0xE4100000], -3410), # > T34.7.1 > T34.7.0 : idem with tranling space
('ldr r0, =t', [4, 0xE4100000], -3410), # > T34.7.1 > T34.7.3 : idem with tranling rubbish
('ldr r1, =0b00130', [4, 0xE4101000], -1002), # > T34.7.2 + override: invalid binary digit
('ldr r2, =00180', [4, 0xE4102000], -1003), # + override: invalid octal digit
('ldr r3, = -18a', [4, 0xE4103000], -1004), # + override: invalid decimal digit
('ldr r4, =0x10GA', [4, 0xE4104000], -1005), # + override: invalid hexa digit
('ldr r5, =0x100000000', [4, 0xE4105000], -1006), # + override: too big number
('ldr r6, =+0', [4, 0xE59F6FF8, 0], 1000), # > T34.7.2 success: set a relative pc loading
('ldrhi r7, = 00317652', [4, 0x859F7FF8, 0x19FAA], 1000), # : octal number
('ldrlt lr, =-1000', [4, 0xB59FEFF8, -1000], 1000), # : negative number
('ldr pc, = 0x8000', [4, 0xE59FFFF8, 0x8000], 1000) # : hexa number (load PC)
]
imi_test = [('', [], -3501), # T35.0.0 error: missing miscellanea instruction
(' ', [], -3501), # T35.0.1 > T35.0.0 : idem with space
('ldr', [], -3503), # T35.0.4 error: unrecognizable instruction
('push', [], -3502), # T35.0.2a error: missing operands
(' clz', [], -3502), # T35.0.1 > T35.0.3a error: idem with leading space
('pop ', [4, 0xE8BD0000], -3502), # > T35.0.2b > T35.2.0 : idem with a trailing space
('clz ', [4, 0xE1600010], -3502), # > T35.0.3b > T35.4.0 : idem for 'clz'
('clz 2', [4, 0xE1600010], -1302), # > T35.4.1a + override : unrecognizable register
('clz r', [4, 0xE1600010], -1303), # > T35.4.1a + override : missing register number
('clz r16', [4, 0xE1600010], -1304), # > T35.4.1a + override : too high reg number
('push 1', [4, 0xE92D0000], -1502), # > T35.2.1 + override : missing '{'
('pop {', [4, 0xE8BD0000], -1503), # + override : missing registers
('pushne {1', [4, 0x192D0000], -1302), # + override : unknown register identifier
('pophs {r', [4, 0x28BD0000], -1303), # + override : missing register number
('pushhi {ra', [4, 0x892D0000], -1304), # + override : wrong reg number
('poplo {r0', [4, 0x38BD0000], -1503), # + override : unclosed single register
('pushge {r14,}', [4, 0xA92D0000], -1504), # + override : missing register after ','
('popcc {r4-}', [4, 0x38BD0000], -1403), # + override : missing second reg in range list
('pushvs {r14, }', [4, 0x692D0000], -1504), # + override : missing register after ', '
('pusheq', [4, 0xE92D0000], -3502), # T35.0.2c > T35.1.1a error: missing operands
('popcce', [4, 0x38BD0000], -3504), # T35.0.2c > T35.1.1c error: wrong text after inst.
('popce', [4, 0xE8BD0000], -3504), # T35.0.2c > T35.1.2 error: wrong text after inst.
('pushle ', [4, 0xD92D0000], -3502), # > T35.1.1b > T35.2.0 error: missing operands
('clzh', [4, 0xE1600010], -3504), # T35.0.3c > T35.3.2 error: wrong text after inst.
('clzhi', [4, 0xE1600010], -3502), # T35.0.3c > T35.3.1a error: missing operands
('clzhi ', [4, 0x81600010], -3502), # > T35.3.1b > T35.4.0 err: missing operands
('clzhii', [4, 0x81600010], -3504), # T35.0.3c > T35.3.1c error: wrong text after inst.
('clzhs r15,', [4, 0x2160F010], -3502), # > T35.4.1b > T35.5.0 : missing operands
('clzhs r15 z,', [4, 0x21600010], -1304), # > T35.4.1a + override : wrong reg
('clzhs r15, ', [4, 0x2160F010], -3505), # > T35.4.1c > T35.5.2 : wrong info after Rd
('clzls r15,r6', [4, 0x9160F016], 1000), # > T35.4.1b > T35.5.1 : success 'clz' + cond
('pushls {r14}', [4, 0x992D4000], 1000), # > T35.1.1b > T35.2.1 : success 'push' + cond
('pop {r0, r4-r10, r14}', [4, 0xE8BD47F1], 1000) # > T35.2.1 : success 'pop'
]
data_arm = [('', [], -4001), # T40.0.0 error: missing initial hex address
('2', [], -4002), # T40.0.4 error: wrong initial address
('>', [], -4003), # T40.0.2a error: missing space after '>'
('>a', [], -4003), # T40.0.2c error: unexpected char after '>'
(' ', [], -4001), # T40.0.1 > T40.0.0 error: white leading space
('0x', [], -2002), # T40.0.3 + override : leading '0x', missing hex digits
(' 0x8001', [], -2003), # T40.0.1 > T40.0.3 + override : missing space after address
(' 0x8001 ', [0x8001], -4004), # T40.0.1 > T40.0.3 > T40.1.0 error: right address, missing info
('0x10002EF00 .byte 2', [], -2004), # T40.0.3 + override : long hex address (> 2^32)
('0x8000.f', [], -2003), # T40.0.3 + override : missing space after address
('0x8000 .f', [0x8000], -2104), # T40.0.3 > T40.1.1 + override : unknown data dir
('0x8024 .byte', [0x8024], -2102), # T40.0.3 > T40.1.1 + override : address & directive, missing val
('0x8000 .byte ', [0x8000], -2102), # T40.0.3 > T40.1.1 + override : missing data values
('0x8000 .byte2', [0x8000], -2103), # T40.0.3 > T40.1.1 + override : missing space after directive
('0x8024 .byte 23', [0x8024, [1, 23]], 1000), # T40.0.3 > T40.1.1 success: capture one byte
('> ', [0x8025], -4004), # T40.0.2b > T40.2.0 error: missing info after '>'
('> .byte 2', [0x8025, [1, 2]], 1000), # T40.0.2b > T40.2.1 success: .byte directive after '>'
('> .byte 3', [0x8026, [1, 3]], 1000), # T40.0.2b > T40.2.1 success: '>' after '>'
('> .byte 230', [0x8027, [1, 230]], 1000), # T40.0.2b > T40.2.1 success : '>' after .byte (1 value)
('0x802F .byte 23, 0xCB', [0x802F, [1, 23, 0xCB]], 1000), # T40.0.3 > T40.1.1 success: capture two bytes
('0x802F .byte \'e\' c', [0x802F], -2105), # T40.0.3 > T40.1.1 + override : wrong delimiter
('0x802F .byte \'e\', c', [0x802F], -2106), # T40.0.3 > T40.1.1 + override : unrecognizeable info
('0x802F .byte 2000', [0x802F], -2107), # T40.0.3 > T40.1.1 + override : data >= 2**8
('0x901B .hword 2300, 0xCB0', [0x901B, [2, 2300, 0xCB0]], 1000), # T40.0.2b > T40.1.1 / misaligned h
(' > .hword 230', [0x9020, [2, 230]], 1000), # T40.0.2b > T40.2.1 '>' after .hword (2 values)
('0x901A .hword 2300, 0xCB0', [0x901A, [2, 2300, 0xCB0]], 1000), # T40.0.3 > T40.1.1 / aligned h
(' > .hword 320', [0x901E, [2, 320]], 1000), # T40.0.2b > T40.2.1 '>' after .hword (h aligned)
('0xCbf8 .word 230000, 0xCB000', [0xCBF8, [4, 230000, 0xCB000]], 1000), # T40.0.3 > T40.1.1 / aligned w
('0xCbf9 .word 230000, 0xCB000', [0xCBF9, [4, 230000, 0xCB000]], 1000), # / misaligned w (1)
('0xCbfa .word 230000, 0xCB000', [0xCBFA, [4, 230000, 0xCB000]], 1000), # / misaligned w (2)
('0xCbfb .word 230000, 0xCB000', [0xCBFB, [4, 230000, 0xCB000]], 1000), # / misaligned w (3)
('> .word 010', [0xCC04, [4, 8]], 1000), # T40.0.2b > T40.2.1 '>' after .word (2 values)
('0xa03c .ascii \'2\'', [0xA03C, [1, 50]], 1000), # T40.0.3 > T40.1.1 success: .ascii directive
('> .word 0x010', [0xA040, [4, 16]], 1000), # T40.0.2b > T40.2.1 '>' after .ascii (1 value)
('0xa03b .asciz \'2\', \"0xCB\"', [0xA03B, [1, 50, 0, 48, 120, 67, 66, 0]], 1000), # / two strings
('> .word 0b010', [0xA044, [4, 2]], 1000), # T40.0.2b > T40.2.1 '>' after .asciz (7 values)
('0xa03c .ascii \' ', [0xA03C], -1104), # T40.0.3 > T40.1.1 + override : unclosed char
('0xa03c .ascii \" ', [0xA03C], -1204),
('0xa03c .asciz \' ', [0xA03C], -1104), # : unclosed char
('0xa03c .asciz \" ', [0xA03C], -1204), # : unclosed string
('0xa03c .ascii \'\'', [0xA03C], -1102), # : empty char
('0xa03c .ascii \"\"', [0xA03C], -1202), # : empty string
('0xa03c .asciz \'\'', [0xA03C], -1102), # : empty char
('0xa03c .asciz \"\"', [0xA03C], -1202), # : empty string
('0xc30a .ascii \'\t\'', [0xC30A], -1103), # : illegal character ''
('0xc30a .asciz \'\t\'', [0xC30A], -1103), # : idem after .ascii
('0xc30a .ascii \"\t\"', [0xC30A], -1203), # : illegal character ""
('0xc30a .asciz \" \t\"', [0xC30A], -1203), # : idem after valid char
('0x3000 .ascii \' t\'', [0x3000], -1105), # : more than one character
('0x3000 .asciz \' t\'', [0x3000], -1105), # : idem after .ascii
('0x1000 .byte 0b012', [0x1000], -1002), # : unexpected binary digit
('0x2000 .hword 0408', [0x2000], -1003), # : unexpected octal digit
('0x2000 .hword 4oo8', [0x2000], -1004), # : unexpected decimal digit
('0x2000 .hword 408000', [0x2000], -2107), # : out of range dec. number
('0x2000 .hword -48000', [0x2000], -2107), # : out of range neg. number
('0x4000 .word 0x40x', [0x4000], -1005), # : unexpected hexa digit
('0x4000 .word 0x400000000', [0x4000], -1006), # : too long num. (>2^32 bits)
('0x4000 .word 0x4, 0x', [0x4000], -1005), # : unexpected hexa digit
('0xfffffffc .ascii \'0\'', [0xFFFFFFFC, [1, 48]], 1000), # almost in the address space limit
('> .word 0b1', [0x100000000, [4, 1]], -4006), # T40.0.2b > T40.2.1 '>' after .asciz (7 values)
]
idat_arm = [('0x8000 2', [0x8000], -4005), # T40.0.3 > T40.1.7 error: unrecognizable instruction
('0x8004 and', [0x8004], -3102), # T40.0.3 > T40.1.2 + override : missing operands after instr.
('0x8008 eor ', [0x8008], -3102), # T40.0.3 > T40.1.2 + override : missing operands after instr.
('0x800C sub 20,', [0x800C], -1302), # : unrecognizable operand with ','
('0x8010 rsb r', [0x8010], -1303), # : missing register number
('0x8014 add r65', [0x8014], -1304), # : too high reg number
('0x8018 adc r12', [0x8018], -2302), # : good dest reg, missing other ops
('0x801C sbc ', [0x801C], -2303), # : missing dest reg
('0x8020 rsc r1,', [0x8020], -2304), # : missing source operands
('0x8024 orr r2, ', [0x8024], -2306), # : missing source operands
('0x8028 bic r3, gu', [0x8028], -2306), # : wrong source op 1
('0x802C and r12, r3, e3', [0x802C], -2308), # : wrong op 2
('0x8030 eor r3, #', [0x8030], -1603), # : missing value after '#'
('0x8034 sub r4, # ', [0x8034], -1604), # : unexpected space after '#'
('0x8038 rsb r5, #f', [0x8038], -1605), # : unrecognizable info after '#'
('0x803C add r10, #0x1002', [0x803C], -1606), # : impossible fixup for odd rotations
('0x8040 adc r11, #\'c\' 5', [0x8040], -1607), # : unexpected text after imm val.
('0x8044 sbc r10, r1,', [0x8044], -2204), # : missing shift register
('0x8048 rsc r7, r2, lsl', [0x8048], -2205), # : missing space after shift mode
('0x804C orr r9, r4, asr x', [0x804C], -2207), # : wrong info after shift mode
('0x8050 bic r0, r8, ror #', [0x8050], -1703), # : missing value after '#'
('0x8054 and r1, r9, lsl # ', [0x8054], -1704), # : unexpected space after '#'
('0x8058 eor r2, r10, lsr #f3', [0x8058], -1705), # : unrecognizable info after '#'
('0x805C sub r4, r12, ror #-2', [0x805C], -1706), # : negative number of shifts
('0x8060 orrs', [0x8060], -3102), # : missing data instruction operands
('0x8064 teqslo', [0x8064], -3105), # : wrong text after instruction
('0x8068 cmnlyy', [0x8068], -3104), # : unknown instruction condition
('0x8068 cmnls r0, #90', [0x8068, [4, 0x9370005A]], 1000), # T40.0.3 > T40.1.2 success: 1 reg, 1 imm.
('> rsbals r6, r11, #256', [0x806C, [4, 0xE27B6C01]], 1000), # T40.0.2b > T40.2.2 success: 2 regs, 1 imm.
('> addgt r12, r12, lsl r12', [0x8070, [4, 0xC08CCC1C]], 1000), # T40.0.2b > T40.2.2 : LSL reg
('0x8080 adcs r1, r2, lsr r0 ', [0x8080, [4, 0xE0B11032]], 1000), # T40.0.3 > T40.1.2 : LSR reg with space
('> rscles pc, lr, lsr #0x1F ', [0x8084, [4, 0xD0FFFFAE]], 1000), # 40.0.2b > T40.2.2 : LSR imm with space
('0x8088 bicmis r10, r11, r12, lsl r12', [0x8088, [4, 0x41DBAC1C]], 1000), # : three regs, shift reg
('0x8088 bicmis r0, r1, r2, lsl #0', [0x8088, [4, 0x41D10002]], 1000), # : three regs, LSL #0
('0x8088 bicmis r0, r1, r2, ror #0', [0x8088, [4, 0x41D10062]], 1000), # : three regs, ROR #0 -> RRX
('> tst r7,r1, #01010', [0x808C], -2310), # > T40.2.2 + override : 3 ops with 'tst'
('> movvc r1,r9, #0xC000', [0x808C], -2311), # > T40.2.2 + override : 3 ops with 'mov'
('> tst r7, #01010', [0x808C, [4, 0xE3170F82]], 1000), # T40.0.2b > T40.2.2 : 'tst' + reg + imm
('> teqlts r7,r8,lsl #12', [0x8090, [4, 0xB1370608]], 1000), # T40.0.2b > T40.2.2 : 'teq'+reg+shifted reg
('> mov r2, #-100', [0x8094, [4, 0xE3E02063]], 1000), # T40.0.2b > T40.2.2 : 'mov' + reg + NOT imm
('> and r4, #-250', [0x8098, [4, 0xE3C440F9]], 1000), # T40.0.2b > T40.2.2 : 'and' + reg + NOT imm
('> add r6, #-3120', [0x809C, [4, 0xE2466EC3]], 1000), # T40.0.2b > T40.2.2 : 'add' + reg + NOT imm
('0xA0008 cmp r8, #-1004', [0xA0008, [4, 0xE3780FFB]], 1000), # T40.0.3 > T40.1.2 : 'cmp' + reg + NOT imm
('> .byte -1', [0xA000C, [1, 255]], 1000), # T40.0.2b > T40.2.1 : automatic inc. +1
('> bics r5, #-255', [0xA0010, [4, 0xE21550FE]], 1000), # T40.0.2b > T40.2.2 : adjust adr. 3 bytes
('> .hword -2', [0xA0014, [2, 65534]], 1000), # T40.0.2b > T40.2.1 : automatic inc. +2
('> movvss r9,#0xC0000', [0xA0018, [4, 0x63B09703]], 1000), # T40.0.2b > T40.2.2 : adjust adr. 2 bytes
(' > .byte -1, -2, -3', [0xA001C, [1, 255, 254, 253]], 1000), # T40.0.2b > T40.2.1 : automatic inc. +3
(' > cmnne r5, #-256', [0xA0020, [4, 0x13550C01]], 1000), # T40.0.2b > T40.2.2 : adjust adr. 1 byte
('> r5, #-256', [0xA0024], -4005), # T40.0.2b > T40.2.7 : unrecognized inst.
('0xA0025 cmp r9, #1004', [0xA0025, [4, 0xE3590FFB]], 1000), # warning : address missaligned 1 byte
('0xA0026 cmp r10, #1008', [0xA0026, [4, 0xE35A0E3F]], 1000), # warning : address missaligned 1 byte
(' 0xA0027 cmp r11, #1012', [0xA0027, [4, 0xE35B0FFD]], 1000), # warning : address missaligned 1 byte
('0x8068 .word -4', [0x8068, [4, 4294967292]], 1000) # final test: set auto-address as before the first
# test in this series that makes use of '>'
]
imul_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> ', [0x8000], -4005), # T40.0.2b > T40.1.7 error: unrecognizable instruction
('> 2', [0x8000], -4005), # T40.0.2b > T40.1.7 error: unrecognizable instruction
('> mul', [0x8000], -3202), # T40.0.2b > T40.2.3 + override : missing operands after instr.
('> mla ', [0x8000], -3202), # T40.0.2b > T40.2.3 + override : missing operands after instr.
('> umull 2', [0x8000], -1302), # : wrong register
('> umull 2,', [0x8000], -1302), # : wrong register with ','
('> umull r', [0x8000], -1303), # : missing register number
('> smull r65', [0x8000], -1304), # : too high reg number
('> umlal r12', [0x8000], -3202), # : missing other regs
('> mul ', [0x8000], -1301), # : missing other regs
('0x90FC mul r1,', [0x90FC], -3202), # : missing source operands
('> mla r2, ', [0x8000], -1301), # : missing source operands
('> smlal r3, gu', [0x8000], -1302), # : wrong reg2
('> umlal r12, r3, e3', [0x8000], -1302), # : wrong reg3
('> mul r3, r4, r5, r6', [0x8000], -3207), # : four registers with 'mul'
('> smlal r3, r4, r5, ', [0x8000], -1301), # : missing reg4
('> mla r3, r4, r5', [0x8000], -3202), # : three regs with 'mla'
('> mul r1, r10, r8', [0x8000, [4, 0xE001089A]], 1000), # success: three regs with 'mul'
('0xA000 mla r13, r14, r0, r0', [0xA000, [4, 0xE02D009E]], 1000), # success: four regs with 'mla'
('> umull sp, lr, r12, r13', [0xA004, [4, 0xE08EDD9C]], 1000), # success: four regs with 'umull'
('> mul r10, pc, r7', [0xA008], -3208), # + override: use of PC as Rm
('> smulllex r10, r11, lr, r10', [0xA008], -3205), # + override: error after cond
('> mulz', [0xA008], -3204) # + override: wrong text after
]
ijmp_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> blo', [0x8000], -3302), # T40.0.2b > T40.2.4 + override: missing offset
('0x9004 bleq ', [0x9004], -3302), # T40.0.3 > T40.1.4 + override : missing offset
('> blox', [0x8000], -4005), # T40.0.2b > T40.2.4 + override: unexpected text after inst
('0xA0000 bx', [0xA0000], -3304), # T40.0.3 > T40.1.4 + override : missing reg after instr.
('> blxo', [0x8000], -4005), # T40.0.2b > T40.2.4 + override: unexpected text after inst
('0x10 blt f', [0x10], -3305), # T40.0.3 > T40.1.4 + override : wrong offset
('> bls 0b12', [0x8000], -1002), # T40.0.3 > T40.1.4 + override : unexpected binary digit
('> blls 0192', [0x8000], -1003), # : unexpected octal digit
('> bllo -192a', [0x8000], -1004), # : unexpected decimal digit
('> blvc 0xA3G0', [0x8000], -1005), # : unexpected hexa digit
('> bvc 0xA30000000', [0x8000], -1006), # : too long hex address
('> bxvc 0xA300', [0x8000], -1302), # : unrecognized reg
('> blxcc r', [0x8000], -1303), # : missing reg number
('> bxcc rf', [0x8000], -1304), # : wrong reg number
('> bxmi r16', [0x8000], -1304), # : wrong reg number
('> blgt 0x1302', [0x8000], -3307), # : misaligned address
('> bllt 0x73000000', [0x8000], -3308), # : out of range offset
('> blal -73000000', [0x8000], -3308), # : out of range neg. offset
('> bal -7300001', [0x8000], -3307), # : misaligned negative address
('> bx r6 ', [0x8000, [4, 0xE12FFF16]], 1000), # T40.0.2b > T40.2.4 success: 'bx' jump
('> blxpl r6', [0x8004, [4, 0x512FFF36]], 1000), # : 'blx' jump
('0x7A0C blxlt r15', [0x7A0C, [4, 0xB12FFF3F]], 1000), # > T40.1.4 warning: use of pc (r15)
('> b 0xA300', [0x7A10, [4, 0xEA000A3A]], 1000), # > T40.2.4 success: 'b' jump
('0xFFF8 bl 1300', [0xFFF8, [4, 0xEBFFC145]], 1000), # > T40.1.4 success: 'bl' negative jump
('> blt 073000000', [0xFFFC, [4, 0xBA3ABFFF]], 1000), # > T40.2.4 success: 'blt' octal jump
('> bleq 0x730000', [0x10000, [4, 0x0B1C7FFE]], 1000), # > T40.2.4 success: 'bleq' hexa jump
('0x7FF8 bhi 0xA30000', [0x7FF8, [4, 0x8A28A000]], 1000), # > T40.1.4 success: 'bhi' jump
('> bge 0x2008000', [0x7FFC, [4, 0xAA7FFFFF]], 1000), # : forward jump limit
('0x2000000 blhs 0x8', [0x2000000, [4, 0x2B800000]], 1000), # : backward jump limit
('0x400000 blhs 0xC', [0x400000, [4, 0x2BF00001]], 1000), # : another backward jump
('0x4000 blhi 0x4000', [0x4000, [4, 0x8BFFFFFE]], 1000), # : jump onto same address
('0x4000 blhi 0x4008', [0x4000, [4, 0x8B000000]], 1000), # : jump onto advanced pc
('0x4001 blhi 0x4008', [0x4001, [4, 0x8BFFFFFF]], 1000) # : jump from misaligned adr.
]
imem_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> ld', [0x8000], -4005), # T40.0.2b > T40.2.5 + override: missing inst. continuation
('> st ', [0x8000], -4005), # + override: missing inst. continuation
('> str', [0x8000], -3403), # + override: missing space after inst.
('> ldr ', [0x8000], -3405), # + override: missing destination register
('> sts', [0x8000], -3408), # + override: 's' not allowed for store inst.
('> ldx', [0x8000], -4005), # + override: unrecognized mem. transfer inst.
('> ldrby', [0x8000], -3404), # + override: wrong text after inst.
('> ldrb e', [0x8000], -1302), # + override: unknown reg
('> str r', [0x8000], -1303), # + override: missing reg number
('> ldr rb', [0x8000], -1304), # + override: wrong reg number
('> ldrb r1', [0x8000], -3406), # + override: missing ',' after dest. reg
('> strb r2,', [0x8000], -3407), # + override: missing info after dest. reg
('> ldrhsb r2, 2', [0x8000], -2402), # + override: missing '['
('> strvcb r3, [', [0x8000], -2403), # + override: missing info after '['
('> ldrge r4, [2', [0x8000], -2403), # + override: unrecognizable register
('> strltb r5,[r', [0x8000], -1303), # + override: missing register number
('> ldrvc r6, [r16', [0x8000], -1304), # + override: too high reg number
('> ldr lr, [r12', [0x8000], -2404), # + override: good base reg, missing closure
('> ldrb r15, [r1,', [0x8000], -2405), # + override: missing displacement
('> strb pc, [r2]!', [0x8000], -2410), # + override: unexpected text after ']'
('> ldrvsb r4,[r3, 3', [0x8000], -2406), # + override: wrong displacement
('> ldrltb r6, [r5, r10, ', [0x8000], -2407), # + override: missing shift mode
('> strlsb r7, [r7, r2, lsl', [0x8000], -2408), # + override: missing space after shift
('> ldr r11, [r10, r5, ror r', [0x8000], -1702), # + override: missing info after shift mode
('> ldrb r12, [r1, r9, lsl # ', [0x8000], -1704), # + override: unexpected space after '#'
('> strb r13,[r9,#0xC0000034]', [0x8000], -2411), # + override: too long immediate displacement
('> ldr r0, [r12, #0b1002000]', [0x8000], -1002), # + override: invalid binary digit
('> strhi r1, [r13, #018000005]', [0x8000], -1003), # + override: invalid octal digit
('> strlob r2, [r14, #5d4]', [0x8000], -1004), # + override: invalid decimal digit
('> ldrplb r3, [r15, #0x4r]', [0x8000], -1005), # + override: invalid hexa digit
('> ldrb r3, [r15, #0x400000000]', [0x8000], -1006), # + override: too big number
('> ldrcsb r4, [ r6, #+0]', [0x8000, [4, 0x25D64000]], 1000), # success: base + imm. displ.
('> ldr r5, [r6, #20]', [0x8004, [4, 0xE5965014]], 1000), # success: base + imm. displ.
('> str r6,[r7, #+4095]', [0x8008, [4, 0xE5876FFF]], 1000), # success: maximum positive imm. displ.
('> ldreqb r7, [r8, #-20]', [0x800C, [4, 0x05587014]], 1000), # success: base + negative imm. displ.
('> strccb r8, [r9, #-4095] ', [0x8010, [4, 0x35498FFF]], 1000), # : minimum negative imm. displ.
('> ldr r9, [r10]', [0x8014, [4, 0xE59A9000]], 1000), # : base only
('> str r10,[r9,+r1]', [0x8018, [4, 0xE789A001]], 1000), # : base + reg. displacement
('> str r10, [r5, r15]', [0x801C], -2412), # + override: PC not allowed as Rm
('> strb r11, [r0, r8, ror #]', [0x801C], -1703), # + override: missing value after '#'
('> ldrle r12, [r2, r10, lsr #f]', [0x801C], -1705), # + override: unrecogn. info after '#'
('> strmib r13, [r4, r12, ror #-20]', [0x801C], -1706), # + override: negative number of shifts
('> ldrplb r14, [r5, r13, lsl #040]', [0x801C], -1706), # + override: too high number of shifts
('> ldrvs r15,[r6, lr, lsr #0x1C] ', [0x801C, [4, 0x6796FE2E]], 1000), # success: with trailing space
('> str r0, [r5, r13, lsl #00]', [0x8020, [4, 0xE785000D]], 1000), # success: true LSL #0
('0x904A ldr r1, [r6, sp, lsr #0x0 ]', [0x904A, [4, 0xE796100D]], 1000), # : converting LSR #0 into LSL #0
('> str r2, [r7,-r1,asr #0b10101]', [0x9050, [4, 0xE7072AC1]], 1000), # : ASR bin imm, no space after ','
('0x8090 ldr r3 ,[r7,+r1,asr #0b0]', [0x8090, [4, 0xE7973001]], 1000), # : converting ASR #0 into LSL #0
('> ldrb r4,[r9, r12, ror #0x1F]', [0x8094, [4, 0xE7D94FEC]], 1000), # : success ROR with 31 shifts
('> strb r5, [r9, r12, ror #0x0]', [0x8098, [4, 0xE7C9506C]], 1000), # : coding ROR #0 as RRX
('> lds', [0x809C], -3404), # + override: wrong memory transfer inst.
('> strz', [0x809C], -3404), # + override: wrong memory transfer inst.
('> strs', [0x809C], -3408), # + override: 's' not allowed for store inst.
('> ldrsb e', [0x809C], -1302), # + override: wrong text after inst.
('> strleh r10, r12', [0x809C], -2502), # + override: missing '['
('> strlsh r10, [12', [0x809C], -2503), # + override: missing reg after '['
('> strloh r8, [r12', [0x809C], -2504), # + override: missing closure
('> streqh r9, [r1,', [0x809C], -2505), # + override: missing displacement
('> ldsccb r1,[r2]!', [0x809C], -2510), # + override: unexpected text after ']'
('> strh r2, [r3, 3', [0x809C], -2506), # + override: wrong displacement
('> strplh r9, [r5, r10, ', [0x809C], -2513), # + override: scaled reg. displ. not allowed
('> ldsmib r10, [r9, #0x134]', [0x809C], -2511), # + override: too long immediate displacement
('> ldsb r9, [r5, r15]', [0x809C], -2512), # + override: PC not allowed as Rm
('> ldrgtsb r11 , [ r6, #+0]', [0x809C, [4, 0xC1D6B0D0]], 1000), # success: base + imm. displ.
('0x20030 strh r12, [r6 ,#195]', [0x20030, [4, 0xE1C6CCB3]], 1000), # success: base + imm. displ.
('0x2000 ldrlsh r3, [r10, #-180]', [0x2000, [4, 0x915A3BB4]], 1000), # : base + negative imm. displ.
('> stmz', [0x2004], -3404), # + override: wrong memory transfer inst.
('> ldmia', [0x2004], -3403), # + override: missing space after inst.
('> stmdb ', [0x2004], -3405), # + override: missing destination reg
('> ldmhsfd r2', [0x2004], -3406), # + override: missing ',' after destination reg
('> ldmhsfa r2,', [0x2004], -3407), # + override: missing info after dest. reg
('> stmccib r3,1', [0x2004], -1502), # + override: missing '{'
('> ldmmied r4!, {', [0x2004], -1503), # + override: missing registers
('> stmed r9, {r14,}', [0x2004], -1504), # + override: missing register after ','
('> ldmfd r13!, {r4-}', [0x2004], -1403), # + override: missing second reg in range list
('0x70FC ldmalib r11 , {r0-r5}', [0x70FC, [4, 0xE99B003F]], 1000), # success: single range
('> stmccdb r12!, {pc, r1-r2, sp-r12, r5}', [0x7100, [4, 0x392CB026]], 1000), # : several ranges, with spcs
('> str r0, =', [0x7104], -3409), # + override: 'str' cannot use '=' loading
('> ldrh r0,=', [0x7104], -2502), # + override: nor 'ldrh'
('> ldr r0, =t', [0x7104], -3410), # + override: idem with tranling rubbish
('> ldr r5, =0x100000000', [0x7104], -1006), # + override: too big number
('> ldr r6, =+0', [0x8104, [4, 0], 0x7104, [4, 0xE59F6FF8]], 1000), # success: set a relative pc loading
('> ldrhi r7, = 00317652', [0x8108, [4, 0x19FAA], 0x7108, [4, 0x859F7FF8]], 1000), # : octal number
('0x801C ldrlt lr, =-1000', [0x901C, [4, 0xFFFFFC18], 0x801C, [4, 0xB59FEFF8]], 1000), # : negative number
('> ldr pc, = 0x8000', [0x9020, [4, 0x8000], 0x8020, [4, 0xE59FFFF8]], 1000), # : hexa num. (load PC)
('0x801A ldrgt lr, =0x1FF80', [0x901A, [4, 0x1FF80], 0x801A, [4, 0xC59FEFF8]], 1000), # : explicit misalign
('> ldr sp , =0x80000', [0x9020, [4, 0x80000], 0x8020, [4, 0xE59FDFF8]], 1000), # : implicit misalign
('0xfffffffc .ascii \'1\'', [0xFFFFFFFC, [1, 49]], 1000), # almost in the address space limit
('> ldr r0, =8', [0x100001000, [4, 8], 0x100000000, [4, 0xE59F0FF8]], -4006), # crossing addr. space limit
('0xffffeffc .ascii \'2\'', [0xFFFFEFFC, [1, 50]], 1000), # almost in the address space limit
('> ldr r2,=-8', [0x100000000, [4, 0xFFFFFFF8], 0xFFFFF000, [4, 0xE59F2FF8]], -4006) # crossing addr. limit
]
imsc_arm = [('0x7FFC .word -4', [0x7FFC, [4, 4294967292]], 1000), # set auto-address as before the first use of '>'
('> push', [0x8000], -3502), # T40.0.2b > T40.2.6 + override : missing operands
('0x8000 clz 2', [0x8000], -1302), # T40.0.3 > T40.1.6 + override : unrecognizable register
('> clz r', [0x8000], -1303), # + override : missing register number
('> clz r16', [0x8000], -1304), # + override : too high reg number
('> push 1', [0x8000], -1502), # + override : missing '{'
('> pop {', [0x8000], -1503), # + override : missing registers
('> pushge {r14,}', [0x8000], -1504), # + override : missing register after ','
('> popcc {r4-}', [0x8000], -1403), # + override : missing second reg in range list
('0x9004 popcce', [0x9004], -3504), # + override : wrong text after inst.
('> clzhs r15, ', [0x8000], -3505), # + override : wrong info after Rd
('> clzls r15,r6', [0x8000, [4, 0x9160F016]], 1000), # success : 'clz' + cond
('0xA00 pushls {r14}', [0xA00, [4, 0x992D4000]], 1000), # success : 'push' + cond
('> pop {r0, r4-r10, r14}', [0xA04, [4, 0xE8BD47F1]], 1000) # success : 'pop'
]
test_groups = [(number_analyzer, hex_test, 'hexadecimal numbers'),
(number_analyzer, dec_test, 'decimal numbers'),
(number_analyzer, oct_test, 'octal numbers'),
(number_analyzer, bin_test, 'binary numbers'),
(char_analyzer, chr_test, 'single quoted chars'),
(string_analyzer, str_test, 'double quoted strings'),
(data_analyzer, dat_test, 'data directives'),
(address_analyzer, adr_test, 'hex addresses'),
(register_analyzer, reg_test, 'register identifiers'),
(regbit_analyzer, rbt_test, 'registers bit mask'),
(reglst_analyzer, rlt_test, 'registers list mask'),
(immediate_op_analyzer, imo_test, 'immediate operand'),
(immediate_sr_analyzer, ims_test, 'immediate shift register'),
(op2_analyzer, op2_test, 'second operand'),
(opdat_analyzer, opd_test, 'data instruction operands'),
(instdat_analyzer, idt_test, 'data instructions'),
(instmul_analyzer, iml_test, 'multiplication instructions'),
(instjmp_analyzer, ibr_test, 'branch instructions'),
(opldst2_analyzer, am2_test, 'addressing mode 2'),
(opldst3_analyzer, am3_test, 'addressing mode 3'),
(instmem_analyzer, im2_test, 'memory transfer instructions, addressing mode 2'),
(instmem_analyzer, im3_test, 'memory transfer instructions, addressing mode 3'),
(instmem_analyzer, imm_test, 'memory transfer instructions, multiple registers'),
(instmem_analyzer, iil_test, 'memory transfer instructions, immediate load'),
(instmsc_analyzer, imi_test, 'miscellanea instructions'),
(arm_analyzer, data_arm, 'arm data directives'),
(arm_analyzer, idat_arm, 'arm data instructions'),
(arm_analyzer, imul_arm, 'arm multiplication instructions'),
(arm_analyzer, ijmp_arm, 'arm branch instructions'),
(arm_analyzer, imem_arm, 'arm memory transfer instructions'),
(arm_analyzer, imsc_arm, 'arm miscellanea instructions')
]
| true
| true
|
1c4a4a032bbb9e6042445ab08cd24531a80ce7bd
| 2,275
|
py
|
Python
|
docs/source/conf.py
|
Gemicai/Gemicai
|
1ce3be768979acc7251b4108a59292cba99624d1
|
[
"MIT"
] | 5
|
2020-11-16T11:06:51.000Z
|
2021-02-23T04:54:30.000Z
|
docs/source/conf.py
|
Gemicai/Gemicai
|
1ce3be768979acc7251b4108a59292cba99624d1
|
[
"MIT"
] | 1
|
2021-08-24T16:21:30.000Z
|
2021-08-24T16:21:30.000Z
|
docs/source/conf.py
|
Gemicai/Gemicai
|
1ce3be768979acc7251b4108a59292cba99624d1
|
[
"MIT"
] | 1
|
2021-02-23T04:54:31.000Z
|
2021-02-23T04:54:31.000Z
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
# -- Project information -----------------------------------------------------
project = 'Gemicai'
copyright = '2020, Kevin Alberts, Niek Heinen, Mateusz Jaworski, Sieta de Jong'
author = 'Kevin Alberts, Niek Heinen, Mateusz Jaworski, Sieta de Jong'
# The full version, including alpha/beta/rc tags
release = '0.5.0'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
import sphinx_glpi_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'glpi'
html_theme_path = sphinx_glpi_theme.get_html_themes_path()
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
| 37.295082
| 88
| 0.665495
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
project = 'Gemicai'
copyright = '2020, Kevin Alberts, Niek Heinen, Mateusz Jaworski, Sieta de Jong'
author = 'Kevin Alberts, Niek Heinen, Mateusz Jaworski, Sieta de Jong'
release = '0.5.0'
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
]
templates_path = ['_templates']
exclude_patterns = []
import sphinx_glpi_theme
html_theme = 'glpi'
html_theme_path = sphinx_glpi_theme.get_html_themes_path()
html_static_path = ['_static']
| true
| true
|
1c4a4bffc43ac8052868c75e692429c7af2a1d60
| 3,508
|
py
|
Python
|
testproject/testproject/settings.py
|
io-ma/django-groups-manager
|
5ab1e098ac44b319b166b529e7a46c6a83e5ddac
|
[
"MIT"
] | 1
|
2020-08-20T00:25:26.000Z
|
2020-08-20T00:25:26.000Z
|
testproject/testproject/settings.py
|
dpineiden/django-groups-manager
|
d02361e6f2825c174410db676ec3fb28c54e0256
|
[
"MIT"
] | null | null | null |
testproject/testproject/settings.py
|
dpineiden/django-groups-manager
|
d02361e6f2825c174410db676ec3fb28c54e0256
|
[
"MIT"
] | null | null | null |
"""
Django settings for testproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import django
try:
import guardian
has_guardian = True
except ImportError:
has_guardian = False
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9vg3q-kbo(p^zpom4!*o8*%tfu-14o=3++txo+sxwto)2@=qd='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
# Uncomment for testing templates, and after a `pip install django-bootstrap3`
# 'bootstrap3',
# App test
'groups_manager',
'testproject',
)
if has_guardian:
INSTALLED_APPS += ('guardian',)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
MIDDLEWARE_CLASSES = MIDDLEWARE
# django-guardian required settings
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'guardian.backends.ObjectPermissionBackend',
)
ANONYMOUS_USER_ID = -1
ROOT_URLCONF = 'testproject.urls'
WSGI_APPLICATION = 'testproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
SESSION_COOKIE_NAME = "testproject"
LOGIN_URL = '/admin/login/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
]
},
},
]
# Uncomment for testing application settings
"""
GROUPS_MANAGER = {
'AUTH_MODELS_SYNC': True,
'AUTH_MODELS_GET_OR_CREATE': False,
'GROUP_NAME_PREFIX': '',
'GROUP_NAME_SUFFIX': '',
'USER_USERNAME_PREFIX': '',
'USER_USERNAME_SUFFIX': '',
'PERMISSIONS': {
'owner': ['view', 'change', 'delete'],
'group': ['view', 'change'],
'groups_upstream': ['view'],
'groups_downstream': [],
'groups_siblings': ['view'],
},
}
"""
| 23.386667
| 82
| 0.686431
|
import os
import django
try:
import guardian
has_guardian = True
except ImportError:
has_guardian = False
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '9vg3q-kbo(p^zpom4!*o8*%tfu-14o=3++txo+sxwto)2@=qd='
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
# Uncomment for testing templates, and after a `pip install django-bootstrap3`
# 'bootstrap3',
# App test
'groups_manager',
'testproject',
)
if has_guardian:
INSTALLED_APPS += ('guardian',)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
MIDDLEWARE_CLASSES = MIDDLEWARE
# django-guardian required settings
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'guardian.backends.ObjectPermissionBackend',
)
ANONYMOUS_USER_ID = -1
ROOT_URLCONF = 'testproject.urls'
WSGI_APPLICATION = 'testproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
SESSION_COOKIE_NAME = "testproject"
LOGIN_URL = '/admin/login/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
]
},
},
]
# Uncomment for testing application settings
| true
| true
|
1c4a4c5663dcdc3e1fe9bb9a01798918c54fc1ba
| 1,609
|
py
|
Python
|
dataset/dataset_inspect.py
|
Lsplastic/Tensorflow_ssd
|
f2935079fb8d2cd2288ef5f7a415749243f34542
|
[
"Apache-2.0"
] | null | null | null |
dataset/dataset_inspect.py
|
Lsplastic/Tensorflow_ssd
|
f2935079fb8d2cd2288ef5f7a415749243f34542
|
[
"Apache-2.0"
] | null | null | null |
dataset/dataset_inspect.py
|
Lsplastic/Tensorflow_ssd
|
f2935079fb8d2cd2288ef5f7a415749243f34542
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Changan Wang
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
def count_split_examples(split_path, file_prefix='.tfrecord'):
# Count the total number of examples in all of these shard
num_samples = 0
tfrecords_to_count = tf.gfile.Glob(os.path.join(split_path, file_prefix))
opts = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB)
for tfrecord_file in tfrecords_to_count:
for record in tf.python_io.tf_record_iterator(tfrecord_file):#, options = opts):
num_samples += 1
return num_samples
if __name__ == '__main__':
print('train:', count_split_examples('/media/rs/7A0EE8880EE83EAF/Detections/SSD/dataset/tfrecords', 'train-?????-of-?????'))
print('val:', count_split_examples('/media/rs/7A0EE8880EE83EAF/Detections/SSD/dataset/tfrecords', 'val-?????-of-?????'))
| 44.694444
| 129
| 0.698571
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
def count_split_examples(split_path, file_prefix='.tfrecord'):
num_samples = 0
tfrecords_to_count = tf.gfile.Glob(os.path.join(split_path, file_prefix))
opts = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB)
for tfrecord_file in tfrecords_to_count:
for record in tf.python_io.tf_record_iterator(tfrecord_file):
num_samples += 1
return num_samples
if __name__ == '__main__':
print('train:', count_split_examples('/media/rs/7A0EE8880EE83EAF/Detections/SSD/dataset/tfrecords', 'train-?????-of-?????'))
print('val:', count_split_examples('/media/rs/7A0EE8880EE83EAF/Detections/SSD/dataset/tfrecords', 'val-?????-of-?????'))
| true
| true
|
1c4a4c7893d539bc2653916b6c930594e9f82081
| 466
|
py
|
Python
|
data/scripts/templates/object/tangible/ship/attachment/booster/shared_ywing_booster_s01.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20
|
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/tangible/ship/attachment/booster/shared_ywing_booster_s01.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/tangible/ship/attachment/booster/shared_ywing_booster_s01.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20
|
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/attachment/booster/shared_ywing_booster_s01.iff"
result.attribute_template_id = 8
result.stfName("item_n","ship_attachment")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 27.411765
| 89
| 0.736052
| true
| true
|
|
1c4a4ca689afea9e81b98745e6c04f99db6d9b09
| 148
|
py
|
Python
|
app/rockband/apps.py
|
solattila/rock-band-api
|
1521b2913b75c53310ba1b71d77d599966237483
|
[
"MIT"
] | null | null | null |
app/rockband/apps.py
|
solattila/rock-band-api
|
1521b2913b75c53310ba1b71d77d599966237483
|
[
"MIT"
] | null | null | null |
app/rockband/apps.py
|
solattila/rock-band-api
|
1521b2913b75c53310ba1b71d77d599966237483
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class RockbandConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'rockband'
| 21.142857
| 56
| 0.763514
|
from django.apps import AppConfig
class RockbandConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'rockband'
| true
| true
|
1c4a4df4c0837afb763b5667c2f4dc1f6cf6ab1e
| 1,129
|
py
|
Python
|
python/meas_smag.py
|
dkkim1005/Neural_Network_Quantum_State
|
7e94929c5ef65ce87f63bf20c81acaa524adca82
|
[
"Unlicense"
] | null | null | null |
python/meas_smag.py
|
dkkim1005/Neural_Network_Quantum_State
|
7e94929c5ef65ce87f63bf20c81acaa524adca82
|
[
"Unlicense"
] | null | null | null |
python/meas_smag.py
|
dkkim1005/Neural_Network_Quantum_State
|
7e94929c5ef65ce87f63bf20c81acaa524adca82
|
[
"Unlicense"
] | 1
|
2022-01-26T05:13:38.000Z
|
2022-01-26T05:13:38.000Z
|
#!/usr/bin/env python3
import numpy as np
from pynqs import sampler
floatType = 'float32'
symmType = 'tr'
# hyper parameter sets of rbm and MCMC sampler
kwargs = {
'nInputs' : 16,
'nHiddens' : 4,
'nChains' : 1000,
'seedNumber' : 0,
'seedDistance' : 123456789,
'init_mcmc_steps' : 300
}
# transverse-field strengthes
hfield = '-1.1'
# functor to locate a path of the file
filepath = './temp/build/RBMTrSymmCH-N%dA%dH%sV1'\
%(kwargs['nInputs'], kwargs['nHiddens'], hfield)
kwargs['path_to_load'] = filepath
# total number of measurements
nmeas = 1000
# number of Monte-Carlo steps
nms = 20
# range of the error bar (95% confidence)
Z = 2
rbm = sampler.RBM(floatType = floatType, symmType = symmType)
rbm.init(**kwargs)
mag = np.zeros([nmeas], dtype = floatType)
for i in range(nmeas):
print ('# of measurements: %d'%i, end = '\r')
rbm.do_mcmc_steps(nms)
spinStates = rbm.get_spinStates()
mag[i] = np.mean(np.abs(np.mean(spinStates, axis = 1)))
mag_mean = np.mean(mag)
mag_err = Z*np.sqrt(np.sum((mag - mag_mean)**2)/(nmeas*(nmeas-1)))
print ('<|m|> : %.5E'%mag_mean, ' +/- %.3E'%mag_err)
| 27.536585
| 66
| 0.662533
|
import numpy as np
from pynqs import sampler
floatType = 'float32'
symmType = 'tr'
kwargs = {
'nInputs' : 16,
'nHiddens' : 4,
'nChains' : 1000,
'seedNumber' : 0,
'seedDistance' : 123456789,
'init_mcmc_steps' : 300
}
hfield = '-1.1'
filepath = './temp/build/RBMTrSymmCH-N%dA%dH%sV1'\
%(kwargs['nInputs'], kwargs['nHiddens'], hfield)
kwargs['path_to_load'] = filepath
nmeas = 1000
nms = 20
Z = 2
rbm = sampler.RBM(floatType = floatType, symmType = symmType)
rbm.init(**kwargs)
mag = np.zeros([nmeas], dtype = floatType)
for i in range(nmeas):
print ('# of measurements: %d'%i, end = '\r')
rbm.do_mcmc_steps(nms)
spinStates = rbm.get_spinStates()
mag[i] = np.mean(np.abs(np.mean(spinStates, axis = 1)))
mag_mean = np.mean(mag)
mag_err = Z*np.sqrt(np.sum((mag - mag_mean)**2)/(nmeas*(nmeas-1)))
print ('<|m|> : %.5E'%mag_mean, ' +/- %.3E'%mag_err)
| true
| true
|
1c4a4f10c49f9126358b074d166c4bcbaae00b6b
| 6,641
|
py
|
Python
|
pysph/sph/tests/test_linalg.py
|
nauaneed/pysph
|
9cb9a859934939307c65a25cbf73e4ecc83fea4a
|
[
"BSD-3-Clause"
] | 293
|
2017-05-26T14:41:15.000Z
|
2022-03-28T09:56:16.000Z
|
pysph/sph/tests/test_linalg.py
|
nauaneed/pysph
|
9cb9a859934939307c65a25cbf73e4ecc83fea4a
|
[
"BSD-3-Clause"
] | 217
|
2017-05-29T15:48:14.000Z
|
2022-03-24T16:16:55.000Z
|
pysph/sph/tests/test_linalg.py
|
nauaneed/pysph
|
9cb9a859934939307c65a25cbf73e4ecc83fea4a
|
[
"BSD-3-Clause"
] | 126
|
2017-05-25T19:17:32.000Z
|
2022-03-25T11:23:24.000Z
|
from pysph.sph.wc.linalg import (
augmented_matrix, gj_solve, mat_mult, mat_vec_mult
)
import numpy as np
import unittest
def gj_solve_helper(a, b, n):
m = np.zeros((n, n+1)).ravel().tolist()
augmented_matrix(a, b, n, 1, n, m)
result = [0.0]*n
is_singular = gj_solve(m, n, 1, result)
return is_singular, result
class TestLinalg(unittest.TestCase):
def _to_array(self, x, shape=None):
x = np.asarray(x)
if shape:
x.shape = shape
return x
def test_augmented_matrix(self):
# Given
a = np.random.random((3, 3))
b = np.random.random((3, 2))
res = np.zeros((3, 5)).ravel().tolist()
expect = np.zeros((3, 5))
expect[:, :3] = a
expect[:, 3:] = b
# When
augmented_matrix(a.ravel(), b.ravel(), 3, 2, 3, res)
res = self._to_array(res, (3, 5))
# Then
np.testing.assert_array_almost_equal(res, expect)
def test_augmented_matrix_with_lower_dimension(self):
# Given
a = np.random.random((3, 3))
b = np.random.random((3, 2))
res = np.zeros((3, 5)).ravel().tolist()
expect = np.zeros((2, 4))
expect[:, :2] = a[:2, :2]
expect[:, 2:] = b[:2, :]
expect.resize((3, 5), refcheck=False)
# When
augmented_matrix(a.ravel(), b.ravel(), 2, 2, 3, res)
res = self._to_array(res, (3, 5))
# Then
np.testing.assert_array_almost_equal(res, expect)
def test_augmented_matrix_with_gjsolve_with_lower_dimension(self):
# Given
nmax = 3
mat = np.array([[7., 4., 2.], [8., 9., 4.], [1., 4., 10.]])
b = np.array([5., 4., 2.])
expect = np.linalg.solve(mat[:2, :2], b[:2])
augmat = np.zeros((3, 4)).ravel().tolist()
res = np.zeros(2).ravel().tolist()
# When
augmented_matrix(mat.ravel(), b.ravel(), 2, 1, nmax, augmat)
gj_solve(augmat, 2, 1, res)
# Then
np.testing.assert_array_almost_equal(res, expect)
def test_general_matrix(self):
# Test Gauss Jordan solve.
"""
This is a general matrix which needs partial pivoting to be
solved.
References
----------
http://web.mit.edu/10.001/Web/Course_Notes/GaussElimPivoting.html
"""
n = 4
mat = [[0.02, 0.01, 0., 0.], [1., 2., 1., 0.], [0., 1., 2., 1.],
[0., 0., 100., 200.]]
b = [0.02, 1., 4., 800.]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_band_matrix(self):
n = 3
mat = [[1., -2., 0.], [1., -1., 3.], [2., 5., 0.]]
b = [-3., 1., 0.5]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_dense_matrix(self):
n = 3
mat = [[0.96, 4.6, -3.7], [2.7, 4.3, -0.67], [0.9, 0., -5.]]
b = [2.4, 3.6, -5.8]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_tridiagonal_matrix(self):
n = 4
mat = [[-2., 1., 0., 0.], [1., -2., 1., 0.], [0., 1., -2., 0.],
[0., 0., 1., -2.]]
b = [-1., 0., 0., -5.]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_symmetric_matrix(self):
n = 3
mat = [[0.96, 4.6, -3.7], [4.6, 4.3, -0.67], [-3.7, -0.67, -5.]]
b = [2.4, 3.6, -5.8]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_symmetric_positivedefinite_Matrix(self):
n = 4
mat = [[1., 1., 4., -1.], [1., 5., 0., -1.], [4., 0., 21., -4.],
[-1., -1., -4., 10.]]
b = [2.4, 3.6, -5.8, 0.5]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_inverse(self):
# Given
n = 3
mat = [[1.0, 2.0, 2.5], [2.5, 1.0, 0.0], [0.0, 0.0, 1.0]]
b = np.identity(3).ravel().tolist()
A = np.zeros((3, 6)).ravel().tolist()
augmented_matrix(np.ravel(mat), b, 3, 3, 3, A)
result = np.zeros((3, 3)).ravel().tolist()
# When
sing = gj_solve(A, n, n, result)
# Then
mat = np.asarray(mat)
res = np.asarray(result)
res.shape = 3, 3
np.testing.assert_allclose(res, np.linalg.inv(mat))
self.assertAlmostEqual(sing, 0.0)
def test_matmult(self):
# Given
n = 3
a = np.random.random((3, 3))
b = np.random.random((3, 3))
result = [0.0]*9
# When
mat_mult(a.ravel(), b.ravel(), n, result)
# Then.
expect = np.dot(a, b)
result = np.asarray(result)
result.shape = 3, 3
np.testing.assert_allclose(result, expect)
def test_mat_vec_mult(self):
# Given
n = 3
a = np.random.random((3, 3))
b = np.random.random((3,))
result = [0.0]*3
# When
mat_vec_mult(a.ravel(), b, n, result)
# Then.
expect = np.dot(a, b)
result = np.asarray(result)
np.testing.assert_allclose(result, expect)
def test_singular_matrix(self):
# Given
n = 3
mat = [[1., 1., 0.], [1., 1., 0.], [1., 1., 1.]]
b = [1.0, 1.0, 1.0]
#
sing, result = gj_solve_helper(np.ravel(mat), b, n)
self.assertAlmostEqual(sing, 1.0)
if __name__ == '__main__':
unittest.main()
| 32.237864
| 73
| 0.512573
|
from pysph.sph.wc.linalg import (
augmented_matrix, gj_solve, mat_mult, mat_vec_mult
)
import numpy as np
import unittest
def gj_solve_helper(a, b, n):
m = np.zeros((n, n+1)).ravel().tolist()
augmented_matrix(a, b, n, 1, n, m)
result = [0.0]*n
is_singular = gj_solve(m, n, 1, result)
return is_singular, result
class TestLinalg(unittest.TestCase):
def _to_array(self, x, shape=None):
x = np.asarray(x)
if shape:
x.shape = shape
return x
def test_augmented_matrix(self):
a = np.random.random((3, 3))
b = np.random.random((3, 2))
res = np.zeros((3, 5)).ravel().tolist()
expect = np.zeros((3, 5))
expect[:, :3] = a
expect[:, 3:] = b
augmented_matrix(a.ravel(), b.ravel(), 3, 2, 3, res)
res = self._to_array(res, (3, 5))
np.testing.assert_array_almost_equal(res, expect)
def test_augmented_matrix_with_lower_dimension(self):
a = np.random.random((3, 3))
b = np.random.random((3, 2))
res = np.zeros((3, 5)).ravel().tolist()
expect = np.zeros((2, 4))
expect[:, :2] = a[:2, :2]
expect[:, 2:] = b[:2, :]
expect.resize((3, 5), refcheck=False)
augmented_matrix(a.ravel(), b.ravel(), 2, 2, 3, res)
res = self._to_array(res, (3, 5))
np.testing.assert_array_almost_equal(res, expect)
def test_augmented_matrix_with_gjsolve_with_lower_dimension(self):
nmax = 3
mat = np.array([[7., 4., 2.], [8., 9., 4.], [1., 4., 10.]])
b = np.array([5., 4., 2.])
expect = np.linalg.solve(mat[:2, :2], b[:2])
augmat = np.zeros((3, 4)).ravel().tolist()
res = np.zeros(2).ravel().tolist()
augmented_matrix(mat.ravel(), b.ravel(), 2, 1, nmax, augmat)
gj_solve(augmat, 2, 1, res)
np.testing.assert_array_almost_equal(res, expect)
def test_general_matrix(self):
n = 4
mat = [[0.02, 0.01, 0., 0.], [1., 2., 1., 0.], [0., 1., 2., 1.],
[0., 0., 100., 200.]]
b = [0.02, 1., 4., 800.]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_band_matrix(self):
n = 3
mat = [[1., -2., 0.], [1., -1., 3.], [2., 5., 0.]]
b = [-3., 1., 0.5]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_dense_matrix(self):
n = 3
mat = [[0.96, 4.6, -3.7], [2.7, 4.3, -0.67], [0.9, 0., -5.]]
b = [2.4, 3.6, -5.8]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_tridiagonal_matrix(self):
n = 4
mat = [[-2., 1., 0., 0.], [1., -2., 1., 0.], [0., 1., -2., 0.],
[0., 0., 1., -2.]]
b = [-1., 0., 0., -5.]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_symmetric_matrix(self):
n = 3
mat = [[0.96, 4.6, -3.7], [4.6, 4.3, -0.67], [-3.7, -0.67, -5.]]
b = [2.4, 3.6, -5.8]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_symmetric_positivedefinite_Matrix(self):
n = 4
mat = [[1., 1., 4., -1.], [1., 5., 0., -1.], [4., 0., 21., -4.],
[-1., -1., -4., 10.]]
b = [2.4, 3.6, -5.8, 0.5]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
mat = np.array(mat)
new_b = np.dot(mat, np.transpose(np.array(result)))
new_b = np.ravel(np.array(new_b))
assert np.allclose(new_b, np.array(b))
self.assertAlmostEqual(sing, 0.0)
def test_inverse(self):
n = 3
mat = [[1.0, 2.0, 2.5], [2.5, 1.0, 0.0], [0.0, 0.0, 1.0]]
b = np.identity(3).ravel().tolist()
A = np.zeros((3, 6)).ravel().tolist()
augmented_matrix(np.ravel(mat), b, 3, 3, 3, A)
result = np.zeros((3, 3)).ravel().tolist()
sing = gj_solve(A, n, n, result)
mat = np.asarray(mat)
res = np.asarray(result)
res.shape = 3, 3
np.testing.assert_allclose(res, np.linalg.inv(mat))
self.assertAlmostEqual(sing, 0.0)
def test_matmult(self):
n = 3
a = np.random.random((3, 3))
b = np.random.random((3, 3))
result = [0.0]*9
mat_mult(a.ravel(), b.ravel(), n, result)
expect = np.dot(a, b)
result = np.asarray(result)
result.shape = 3, 3
np.testing.assert_allclose(result, expect)
def test_mat_vec_mult(self):
n = 3
a = np.random.random((3, 3))
b = np.random.random((3,))
result = [0.0]*3
mat_vec_mult(a.ravel(), b, n, result)
expect = np.dot(a, b)
result = np.asarray(result)
np.testing.assert_allclose(result, expect)
def test_singular_matrix(self):
n = 3
mat = [[1., 1., 0.], [1., 1., 0.], [1., 1., 1.]]
b = [1.0, 1.0, 1.0]
sing, result = gj_solve_helper(np.ravel(mat), b, n)
self.assertAlmostEqual(sing, 1.0)
if __name__ == '__main__':
unittest.main()
| true
| true
|
1c4a4fe749a6730b16f54520019eb9f262e581a6
| 13,887
|
py
|
Python
|
cvxpy/reductions/solvers/conic_solvers/conic_solver.py
|
QiuWJX/cvxpy
|
fd1c225b0cdf541618e292cae1a4c7ea25ddc934
|
[
"ECL-2.0",
"Apache-2.0"
] | 556
|
2021-04-20T03:19:49.000Z
|
2022-03-30T12:31:38.000Z
|
cvxpy/reductions/solvers/conic_solvers/conic_solver.py
|
QiuWJX/cvxpy
|
fd1c225b0cdf541618e292cae1a4c7ea25ddc934
|
[
"ECL-2.0",
"Apache-2.0"
] | 358
|
2021-04-20T08:17:49.000Z
|
2022-03-31T21:16:28.000Z
|
cvxpy/reductions/solvers/conic_solvers/conic_solver.py
|
phschiele/cvxpy
|
a43aed7447b87f6d0fbc6f71ae5c7b84183f3369
|
[
"ECL-2.0",
"Apache-2.0"
] | 131
|
2021-04-21T09:00:12.000Z
|
2022-03-29T04:43:51.000Z
|
"""
Copyright 2017 Robin Verschueren, 2017 Akshay Agrawal
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Tuple
import numpy as np
import scipy.sparse as sp
import cvxpy.settings as s
from cvxpy.constraints import PSD, SOC, ExpCone, NonNeg, PowCone3D, Zero
from cvxpy.reductions.cvx_attr2constr import convex_attributes
from cvxpy.reductions.dcp2cone.cone_matrix_stuffing import ParamConeProg
from cvxpy.reductions.solution import Solution, failure_solution
from cvxpy.reductions.solvers import utilities
from cvxpy.reductions.solvers.solver import Solver
# NOTE(akshayka): Small changes to this file can lead to drastic
# performance regressions. If you are making a change to this file,
# make sure to run cvxpy/tests/test_benchmarks.py to ensure that you have
# not introduced a regression.
class LinearOperator:
"""A wrapper for linear operators."""
def __init__(self, linear_op, shape: Tuple[int, ...]) -> None:
if sp.issparse(linear_op):
self._matmul = lambda X: linear_op @ X
else:
self._matmul = linear_op
self.shape = shape
def __call__(self, X):
return self._matmul(X)
def as_linear_operator(linear_op):
if isinstance(linear_op, LinearOperator):
return linear_op
elif sp.issparse(linear_op):
return LinearOperator(linear_op, linear_op.shape)
def as_block_diag_linear_operator(matrices) -> LinearOperator:
"""Block diag of SciPy sparse matrices or linear operators."""
linear_operators = [as_linear_operator(op) for op in matrices]
nrows = [op.shape[0] for op in linear_operators]
ncols = [op.shape[1] for op in linear_operators]
m, n = sum(nrows), sum(ncols)
col_indices = np.append(0, np.cumsum(ncols))
def matmul(X):
outputs = []
for i, op in enumerate(linear_operators):
Xi = X[col_indices[i]:col_indices[i + 1]]
outputs.append(op(Xi))
return sp.vstack(outputs)
return LinearOperator(matmul, (m, n))
# Utility method for formatting a ConeDims instance into a dictionary
# that can be supplied to solvers.
def dims_to_solver_dict(cone_dims):
cones = {
'f': cone_dims.zero,
'l': cone_dims.nonneg,
'q': cone_dims.soc,
'ep': cone_dims.exp,
's': cone_dims.psd,
'p': cone_dims.p3d
}
return cones
class ConicSolver(Solver):
"""Conic solver class with reduction semantics
"""
# The key that maps to ConeDims in the data returned by apply().
DIMS = "dims"
# Every conic solver must support Zero and NonNeg constraints.
SUPPORTED_CONSTRAINTS = [Zero, NonNeg]
# Some solvers cannot solve problems that do not have constraints.
# For such solvers, REQUIRES_CONSTR should be set to True.
REQUIRES_CONSTR = False
# If a solver supports exponential cones, it must specify the corresponding order
# The cvxpy standard for the exponential cone is:
# K_e = closure{(x,y,z) | z >= y * exp(x/y), y>0}.
# Whenever a solver uses this convention, EXP_CONE_ORDER should be [0, 1, 2].
EXP_CONE_ORDER = None
def accepts(self, problem):
return (isinstance(problem, ParamConeProg)
and (self.MIP_CAPABLE or not problem.is_mixed_integer())
and not convex_attributes([problem.x])
and (len(problem.constraints) > 0 or not self.REQUIRES_CONSTR)
and all(type(c) in self.SUPPORTED_CONSTRAINTS for c in
problem.constraints))
@staticmethod
def get_spacing_matrix(shape: Tuple[int, ...], spacing, streak, num_blocks, offset):
"""Returns a sparse matrix that spaces out an expression.
Parameters
----------
shape : tuple
(rows in matrix, columns in matrix)
spacing : int
The number of rows between the start of each non-zero block.
streak: int
The number of elements in each block.
num_blocks : int
The number of non-zero blocks.
offset : int
The number of zero rows at the beginning of the matrix.
Returns
-------
SciPy CSC matrix
A sparse matrix
"""
num_values = num_blocks * streak
val_arr = np.ones(num_values, dtype=np.float64)
streak_plus_spacing = streak + spacing
row_arr = np.arange(0, num_blocks * streak_plus_spacing).reshape(
num_blocks, streak_plus_spacing)[:, :streak].flatten() + offset
col_arr = np.arange(num_values)
return sp.csc_matrix((val_arr, (row_arr, col_arr)), shape)
@staticmethod
def psd_format_mat(constr):
"""Return a matrix to multiply by PSD constraint coefficients.
"""
# Default is identity.
return sp.eye(constr.size, format='csc')
def format_constraints(self, problem, exp_cone_order):
"""
Returns a ParamConeProg whose problem data tensors will yield the
coefficient "A" and offset "b" for the constraint in the following
formats:
Linear equations: (A, b) such that A * x + b == 0,
Linear inequalities: (A, b) such that A * x + b >= 0,
Second order cone: (A, b) such that A * x + b in SOC,
Exponential cone: (A, b) such that A * x + b in EXP,
Semidefinite cone: (A, b) such that A * x + b in PSD,
The CVXPY standard for the exponential cone is:
K_e = closure{(x,y,z) | z >= y * exp(x/y), y>0}.
Whenever a solver uses this convention, EXP_CONE_ORDER should be
[0, 1, 2].
The CVXPY standard for the second order cone is:
SOC(n) = { x : x[0] >= norm(x[1:n], 2) }.
All currently supported solvers use this convention.
Args:
problem : ParamConeProg
The problem that is the provenance of the constraint.
exp_cone_order: list
A list indicating how the exponential cone arguments are ordered.
Returns:
ParamConeProg with structured A.
"""
# Create a matrix to reshape constraints, then replicate for each
# variable entry.
restruct_mat = [] # Form a block diagonal matrix.
for constr in problem.constraints:
total_height = sum([arg.size for arg in constr.args])
if type(constr) == Zero:
restruct_mat.append(-sp.eye(constr.size, format='csr'))
elif type(constr) == NonNeg:
restruct_mat.append(sp.eye(constr.size, format='csr'))
elif type(constr) == SOC:
# Group each t row with appropriate X rows.
assert constr.axis == 0, 'SOC must be lowered to axis == 0'
# Interleave the rows of coeffs[0] and coeffs[1]:
# coeffs[0][0, :]
# coeffs[1][0:gap-1, :]
# coeffs[0][1, :]
# coeffs[1][gap-1:2*(gap-1), :]
t_spacer = ConicSolver.get_spacing_matrix(
shape=(total_height, constr.args[0].size),
spacing=constr.args[1].shape[0],
streak=1,
num_blocks=constr.args[0].size,
offset=0,
)
X_spacer = ConicSolver.get_spacing_matrix(
shape=(total_height, constr.args[1].size),
spacing=1,
streak=constr.args[1].shape[0],
num_blocks=constr.args[0].size,
offset=1,
)
restruct_mat.append(sp.hstack([t_spacer, X_spacer]))
elif type(constr) == ExpCone:
arg_mats = []
for i, arg in enumerate(constr.args):
space_mat = ConicSolver.get_spacing_matrix(
shape=(total_height, arg.size),
spacing=len(exp_cone_order) - 1,
streak=1,
num_blocks=arg.size,
offset=exp_cone_order[i],
)
arg_mats.append(space_mat)
restruct_mat.append(sp.hstack(arg_mats))
elif type(constr) == PowCone3D:
arg_mats = []
for i, arg in enumerate(constr.args):
space_mat = ConicSolver.get_spacing_matrix(
shape=(total_height, arg.size), spacing=2,
streak=1, num_blocks=arg.size, offset=i,
)
arg_mats.append(space_mat)
restruct_mat.append(sp.hstack(arg_mats))
elif type(constr) == PSD:
restruct_mat.append(self.psd_format_mat(constr))
else:
raise ValueError("Unsupported constraint type.")
# Form new ParamConeProg
if restruct_mat:
# TODO(akshayka): profile to see whether using linear operators
# or bmat is faster
restruct_mat = as_block_diag_linear_operator(restruct_mat)
# this is equivalent to but _much_ faster than:
# restruct_mat_rep = sp.block_diag([restruct_mat]*(problem.x.size + 1))
# restruct_A = restruct_mat_rep * problem.A
unspecified, remainder = divmod(problem.A.shape[0] *
problem.A.shape[1],
restruct_mat.shape[1])
reshaped_A = problem.A.reshape(restruct_mat.shape[1],
unspecified, order='F').tocsr()
restructured_A = restruct_mat(reshaped_A).tocoo()
# Because of a bug in scipy versions < 1.20, `reshape`
# can overflow if indices are int32s.
restructured_A.row = restructured_A.row.astype(np.int64)
restructured_A.col = restructured_A.col.astype(np.int64)
restructured_A = restructured_A.reshape(
np.int64(restruct_mat.shape[0]) * (np.int64(problem.x.size) + 1),
problem.A.shape[1], order='F')
else:
restructured_A = problem.A
new_param_cone_prog = ParamConeProg(problem.c,
problem.x,
restructured_A,
problem.variables,
problem.var_id_to_col,
problem.constraints,
problem.parameters,
problem.param_id_to_col,
formatted=True)
return new_param_cone_prog
def invert(self, solution, inverse_data):
"""Returns the solution to the original problem given the inverse_data.
"""
status = solution['status']
if status in s.SOLUTION_PRESENT:
opt_val = solution['value']
primal_vars = {inverse_data[self.VAR_ID]: solution['primal']}
eq_dual = utilities.get_dual_values(
solution['eq_dual'],
utilities.extract_dual_value,
inverse_data[Solver.EQ_CONSTR])
leq_dual = utilities.get_dual_values(
solution['ineq_dual'],
utilities.extract_dual_value,
inverse_data[Solver.NEQ_CONSTR])
eq_dual.update(leq_dual)
dual_vars = eq_dual
return Solution(status, opt_val, primal_vars, dual_vars, {})
else:
return failure_solution(status)
def _prepare_data_and_inv_data(self, problem):
data = {}
inv_data = {self.VAR_ID: problem.x.id}
# Format constraints
#
# By default cvxpy follows the SCS convention, which requires
# constraints to be specified in the following order:
# 1. zero cone
# 2. non-negative orthant
# 3. soc
# 4. psd
# 5. exponential
# 6. three-dimensional power cones
if not problem.formatted:
problem = self.format_constraints(problem, self.EXP_CONE_ORDER)
data[s.PARAM_PROB] = problem
data[self.DIMS] = problem.cone_dims
inv_data[self.DIMS] = problem.cone_dims
constr_map = problem.constr_map
inv_data[self.EQ_CONSTR] = constr_map[Zero]
inv_data[self.NEQ_CONSTR] = constr_map[NonNeg] + constr_map[SOC] + \
constr_map[PSD] + constr_map[ExpCone] + constr_map[PowCone3D]
return problem, data, inv_data
def apply(self, problem):
"""Returns a new problem and data for inverting the new solution.
Returns
-------
tuple
(dict of arguments needed for the solver, inverse data)
"""
# This is a reference implementation following SCS conventions
# Implementations for other solvers may amend or override the implementation entirely
problem, data, inv_data = self._prepare_data_and_inv_data(problem)
# Apply parameter values.
# Obtain A, b such that Ax + s = b, s \in cones.
c, d, A, b = problem.apply_parameters()
data[s.C] = c
inv_data[s.OFFSET] = d
data[s.A] = -A
data[s.B] = b
return data, inv_data
| 40.605263
| 93
| 0.585728
|
from typing import Tuple
import numpy as np
import scipy.sparse as sp
import cvxpy.settings as s
from cvxpy.constraints import PSD, SOC, ExpCone, NonNeg, PowCone3D, Zero
from cvxpy.reductions.cvx_attr2constr import convex_attributes
from cvxpy.reductions.dcp2cone.cone_matrix_stuffing import ParamConeProg
from cvxpy.reductions.solution import Solution, failure_solution
from cvxpy.reductions.solvers import utilities
from cvxpy.reductions.solvers.solver import Solver
class LinearOperator:
def __init__(self, linear_op, shape: Tuple[int, ...]) -> None:
if sp.issparse(linear_op):
self._matmul = lambda X: linear_op @ X
else:
self._matmul = linear_op
self.shape = shape
def __call__(self, X):
return self._matmul(X)
def as_linear_operator(linear_op):
if isinstance(linear_op, LinearOperator):
return linear_op
elif sp.issparse(linear_op):
return LinearOperator(linear_op, linear_op.shape)
def as_block_diag_linear_operator(matrices) -> LinearOperator:
linear_operators = [as_linear_operator(op) for op in matrices]
nrows = [op.shape[0] for op in linear_operators]
ncols = [op.shape[1] for op in linear_operators]
m, n = sum(nrows), sum(ncols)
col_indices = np.append(0, np.cumsum(ncols))
def matmul(X):
outputs = []
for i, op in enumerate(linear_operators):
Xi = X[col_indices[i]:col_indices[i + 1]]
outputs.append(op(Xi))
return sp.vstack(outputs)
return LinearOperator(matmul, (m, n))
def dims_to_solver_dict(cone_dims):
cones = {
'f': cone_dims.zero,
'l': cone_dims.nonneg,
'q': cone_dims.soc,
'ep': cone_dims.exp,
's': cone_dims.psd,
'p': cone_dims.p3d
}
return cones
class ConicSolver(Solver):
DIMS = "dims"
SUPPORTED_CONSTRAINTS = [Zero, NonNeg]
REQUIRES_CONSTR = False
EXP_CONE_ORDER = None
def accepts(self, problem):
return (isinstance(problem, ParamConeProg)
and (self.MIP_CAPABLE or not problem.is_mixed_integer())
and not convex_attributes([problem.x])
and (len(problem.constraints) > 0 or not self.REQUIRES_CONSTR)
and all(type(c) in self.SUPPORTED_CONSTRAINTS for c in
problem.constraints))
@staticmethod
def get_spacing_matrix(shape: Tuple[int, ...], spacing, streak, num_blocks, offset):
num_values = num_blocks * streak
val_arr = np.ones(num_values, dtype=np.float64)
streak_plus_spacing = streak + spacing
row_arr = np.arange(0, num_blocks * streak_plus_spacing).reshape(
num_blocks, streak_plus_spacing)[:, :streak].flatten() + offset
col_arr = np.arange(num_values)
return sp.csc_matrix((val_arr, (row_arr, col_arr)), shape)
@staticmethod
def psd_format_mat(constr):
return sp.eye(constr.size, format='csc')
def format_constraints(self, problem, exp_cone_order):
restruct_mat = []
for constr in problem.constraints:
total_height = sum([arg.size for arg in constr.args])
if type(constr) == Zero:
restruct_mat.append(-sp.eye(constr.size, format='csr'))
elif type(constr) == NonNeg:
restruct_mat.append(sp.eye(constr.size, format='csr'))
elif type(constr) == SOC:
assert constr.axis == 0, 'SOC must be lowered to axis == 0'
t_spacer = ConicSolver.get_spacing_matrix(
shape=(total_height, constr.args[0].size),
spacing=constr.args[1].shape[0],
streak=1,
num_blocks=constr.args[0].size,
offset=0,
)
X_spacer = ConicSolver.get_spacing_matrix(
shape=(total_height, constr.args[1].size),
spacing=1,
streak=constr.args[1].shape[0],
num_blocks=constr.args[0].size,
offset=1,
)
restruct_mat.append(sp.hstack([t_spacer, X_spacer]))
elif type(constr) == ExpCone:
arg_mats = []
for i, arg in enumerate(constr.args):
space_mat = ConicSolver.get_spacing_matrix(
shape=(total_height, arg.size),
spacing=len(exp_cone_order) - 1,
streak=1,
num_blocks=arg.size,
offset=exp_cone_order[i],
)
arg_mats.append(space_mat)
restruct_mat.append(sp.hstack(arg_mats))
elif type(constr) == PowCone3D:
arg_mats = []
for i, arg in enumerate(constr.args):
space_mat = ConicSolver.get_spacing_matrix(
shape=(total_height, arg.size), spacing=2,
streak=1, num_blocks=arg.size, offset=i,
)
arg_mats.append(space_mat)
restruct_mat.append(sp.hstack(arg_mats))
elif type(constr) == PSD:
restruct_mat.append(self.psd_format_mat(constr))
else:
raise ValueError("Unsupported constraint type.")
if restruct_mat:
restruct_mat = as_block_diag_linear_operator(restruct_mat)
unspecified, remainder = divmod(problem.A.shape[0] *
problem.A.shape[1],
restruct_mat.shape[1])
reshaped_A = problem.A.reshape(restruct_mat.shape[1],
unspecified, order='F').tocsr()
restructured_A = restruct_mat(reshaped_A).tocoo()
restructured_A.row = restructured_A.row.astype(np.int64)
restructured_A.col = restructured_A.col.astype(np.int64)
restructured_A = restructured_A.reshape(
np.int64(restruct_mat.shape[0]) * (np.int64(problem.x.size) + 1),
problem.A.shape[1], order='F')
else:
restructured_A = problem.A
new_param_cone_prog = ParamConeProg(problem.c,
problem.x,
restructured_A,
problem.variables,
problem.var_id_to_col,
problem.constraints,
problem.parameters,
problem.param_id_to_col,
formatted=True)
return new_param_cone_prog
def invert(self, solution, inverse_data):
status = solution['status']
if status in s.SOLUTION_PRESENT:
opt_val = solution['value']
primal_vars = {inverse_data[self.VAR_ID]: solution['primal']}
eq_dual = utilities.get_dual_values(
solution['eq_dual'],
utilities.extract_dual_value,
inverse_data[Solver.EQ_CONSTR])
leq_dual = utilities.get_dual_values(
solution['ineq_dual'],
utilities.extract_dual_value,
inverse_data[Solver.NEQ_CONSTR])
eq_dual.update(leq_dual)
dual_vars = eq_dual
return Solution(status, opt_val, primal_vars, dual_vars, {})
else:
return failure_solution(status)
def _prepare_data_and_inv_data(self, problem):
data = {}
inv_data = {self.VAR_ID: problem.x.id}
if not problem.formatted:
problem = self.format_constraints(problem, self.EXP_CONE_ORDER)
data[s.PARAM_PROB] = problem
data[self.DIMS] = problem.cone_dims
inv_data[self.DIMS] = problem.cone_dims
constr_map = problem.constr_map
inv_data[self.EQ_CONSTR] = constr_map[Zero]
inv_data[self.NEQ_CONSTR] = constr_map[NonNeg] + constr_map[SOC] + \
constr_map[PSD] + constr_map[ExpCone] + constr_map[PowCone3D]
return problem, data, inv_data
def apply(self, problem):
problem, data, inv_data = self._prepare_data_and_inv_data(problem)
c, d, A, b = problem.apply_parameters()
data[s.C] = c
inv_data[s.OFFSET] = d
data[s.A] = -A
data[s.B] = b
return data, inv_data
| true
| true
|
1c4a4ffc7df2bbba0362421473040901614f36f3
| 4,959
|
py
|
Python
|
projects/RAMADDA_publish/sphinx/source/conf.py
|
Unidata/drilsdown
|
55aca7168fb390f31c36729605401564e9b82c56
|
[
"MIT"
] | 3
|
2018-05-25T00:19:12.000Z
|
2021-01-08T15:54:36.000Z
|
projects/RAMADDA_publish/sphinx/source/conf.py
|
suvarchal/drilsdown
|
e82f58396f640fef847353caf1bd4b2bf016c7a6
|
[
"MIT"
] | 11
|
2017-10-31T20:15:24.000Z
|
2019-12-16T21:01:55.000Z
|
projects/RAMADDA_publish/sphinx/source/conf.py
|
suvarchal/drilsdown
|
e82f58396f640fef847353caf1bd4b2bf016c7a6
|
[
"MIT"
] | 10
|
2018-02-08T22:23:28.000Z
|
2019-09-29T23:25:19.000Z
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'RAMADDA Publish'
copyright = ''
author = 'Suvarchal'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '1.3'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'RAMADDAPublishdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'RAMADDAPublish.tex', 'RAMADDA Publish Documentation',
'Suvarchal', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ramaddapublish', 'RAMADDA Publish Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'RAMADDAPublish', 'RAMADDA Publish Documentation',
author, 'RAMADDAPublish', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| 30.423313
| 79
| 0.654971
|
project = 'RAMADDA Publish'
copyright = ''
author = 'Suvarchal'
version = ''
release = '1.3'
extensions = [
'sphinx.ext.githubpages',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = None
exclude_patterns = []
pygments_style = 'sphinx'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_static_path = ['_static']
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'RAMADDAPublishdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'RAMADDAPublish.tex', 'RAMADDA Publish Documentation',
'Suvarchal', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ramaddapublish', 'RAMADDA Publish Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'RAMADDAPublish', 'RAMADDA Publish Documentation',
author, 'RAMADDAPublish', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| true
| true
|
1c4a51c816367f7be461f80db74b01f5bb2fc407
| 124
|
py
|
Python
|
model/group.py
|
Den21rus/barancev_training
|
892cd38ffde0954278ea2cebe72379b9db55a29c
|
[
"Apache-2.0"
] | null | null | null |
model/group.py
|
Den21rus/barancev_training
|
892cd38ffde0954278ea2cebe72379b9db55a29c
|
[
"Apache-2.0"
] | null | null | null |
model/group.py
|
Den21rus/barancev_training
|
892cd38ffde0954278ea2cebe72379b9db55a29c
|
[
"Apache-2.0"
] | null | null | null |
class Group:
def __init__(self, username, password):
self.username = username
self.password = password
| 20.666667
| 43
| 0.653226
|
class Group:
def __init__(self, username, password):
self.username = username
self.password = password
| true
| true
|
1c4a522a10fa8856197c75ae296fd1e45edb4dc0
| 5,150
|
py
|
Python
|
official/vision/detection/executor/detection_executor.py
|
Silas-Asamoah/models
|
833e6939acb42f695b0ae3765f98fe494f06115c
|
[
"Apache-2.0"
] | 2
|
2019-11-30T03:43:50.000Z
|
2019-11-30T03:43:55.000Z
|
official/vision/detection/executor/detection_executor.py
|
utpal0401/models
|
426b2c6e894c22ffb17f32581305ea87c3b8b377
|
[
"Apache-2.0"
] | 1
|
2021-03-31T21:30:38.000Z
|
2021-03-31T21:30:38.000Z
|
official/vision/detection/executor/detection_executor.py
|
utpal0401/models
|
426b2c6e894c22ffb17f32581305ea87c3b8b377
|
[
"Apache-2.0"
] | 2
|
2019-11-10T07:48:51.000Z
|
2020-02-04T04:17:41.000Z
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""An executor class for running model on TensorFlow 2.0."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
from absl import logging
import os
import json
import tensorflow.compat.v2 as tf
from official.modeling.training import distributed_executor as executor
class DetectionDistributedExecutor(executor.DistributedExecutor):
"""Detection specific customer training loop executor.
Subclasses the DistributedExecutor and adds support for numpy based metrics.
"""
def __init__(self,
predict_post_process_fn=None,
trainable_variables_filter=None,
**kwargs):
super(DetectionDistributedExecutor, self).__init__(**kwargs)
params = kwargs['params']
if predict_post_process_fn:
assert callable(predict_post_process_fn)
if trainable_variables_filter:
assert callable(trainable_variables_filter)
self._predict_post_process_fn = predict_post_process_fn
self._trainable_variables_filter = trainable_variables_filter
def _create_replicated_step(self,
strategy,
model,
loss_fn,
optimizer,
metric=None):
trainable_variables = model.trainable_variables
if self._trainable_variables_filter:
trainable_variables = self._trainable_variables_filter(
trainable_variables)
logging.info('Filter trainable variables from %d to %d',
len(model.trainable_variables), len(trainable_variables))
def _replicated_step(inputs):
"""Replicated training step."""
inputs, labels = inputs
with tf.GradientTape() as tape:
outputs = model(inputs, training=True)
all_losses = loss_fn(labels, outputs)
losses = {}
for k, v in all_losses.items():
v = tf.reduce_mean(v) / strategy.num_replicas_in_sync
losses[k] = v
loss = losses['total_loss']
if isinstance(metric, tf.keras.metrics.Metric):
metric.update_state(labels, outputs)
else:
logging.error('train metric is not an instance of '
'tf.keras.metrics.Metric.')
grads = tape.gradient(loss, trainable_variables)
optimizer.apply_gradients(zip(grads, trainable_variables))
return loss
return _replicated_step
def _create_test_step(self, strategy, model, metric):
"""Creates a distributed test step."""
@tf.function
def test_step(iterator):
"""Calculates evaluation metrics on distributed devices."""
def _test_step_fn(inputs):
"""Replicated accuracy calculation."""
inputs, labels = inputs
model_outputs = model(inputs, training=False)
if self._predict_post_process_fn:
labels, prediction_outputs = self._predict_post_process_fn(
labels, model_outputs)
return labels, prediction_outputs
labels, outputs = strategy.experimental_run_v2(
_test_step_fn, args=(next(iterator),))
outputs = tf.nest.map_structure(strategy.experimental_local_results,
outputs)
labels = tf.nest.map_structure(strategy.experimental_local_results,
labels)
return labels, outputs
return test_step
def _run_evaluation(self, test_step, current_training_step, metric,
test_iterator):
"""Runs validation steps and aggregate metrics."""
if not test_iterator or not metric:
logging.warning(
'Both test_iterator (%s) and metrics (%s) must not be None.',
test_iterator, metric)
return None
logging.info('Running evaluation after step: %s.', current_training_step)
while True:
try:
labels, outputs = test_step(test_iterator)
if metric:
metric.update_state(labels, outputs)
except (StopIteration, tf.errors.OutOfRangeError):
break
metric_result = metric.result()
if isinstance(metric, tf.keras.metrics.Metric):
metric_result = tf.nest.map_structure(lambda x: x.numpy().astype(float),
metric_result)
logging.info('Step: [%d] Validation metric = %s', current_training_step,
metric_result)
return metric_result
| 37.867647
| 80
| 0.66
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
import os
import json
import tensorflow.compat.v2 as tf
from official.modeling.training import distributed_executor as executor
class DetectionDistributedExecutor(executor.DistributedExecutor):
def __init__(self,
predict_post_process_fn=None,
trainable_variables_filter=None,
**kwargs):
super(DetectionDistributedExecutor, self).__init__(**kwargs)
params = kwargs['params']
if predict_post_process_fn:
assert callable(predict_post_process_fn)
if trainable_variables_filter:
assert callable(trainable_variables_filter)
self._predict_post_process_fn = predict_post_process_fn
self._trainable_variables_filter = trainable_variables_filter
def _create_replicated_step(self,
strategy,
model,
loss_fn,
optimizer,
metric=None):
trainable_variables = model.trainable_variables
if self._trainable_variables_filter:
trainable_variables = self._trainable_variables_filter(
trainable_variables)
logging.info('Filter trainable variables from %d to %d',
len(model.trainable_variables), len(trainable_variables))
def _replicated_step(inputs):
inputs, labels = inputs
with tf.GradientTape() as tape:
outputs = model(inputs, training=True)
all_losses = loss_fn(labels, outputs)
losses = {}
for k, v in all_losses.items():
v = tf.reduce_mean(v) / strategy.num_replicas_in_sync
losses[k] = v
loss = losses['total_loss']
if isinstance(metric, tf.keras.metrics.Metric):
metric.update_state(labels, outputs)
else:
logging.error('train metric is not an instance of '
'tf.keras.metrics.Metric.')
grads = tape.gradient(loss, trainable_variables)
optimizer.apply_gradients(zip(grads, trainable_variables))
return loss
return _replicated_step
def _create_test_step(self, strategy, model, metric):
@tf.function
def test_step(iterator):
def _test_step_fn(inputs):
inputs, labels = inputs
model_outputs = model(inputs, training=False)
if self._predict_post_process_fn:
labels, prediction_outputs = self._predict_post_process_fn(
labels, model_outputs)
return labels, prediction_outputs
labels, outputs = strategy.experimental_run_v2(
_test_step_fn, args=(next(iterator),))
outputs = tf.nest.map_structure(strategy.experimental_local_results,
outputs)
labels = tf.nest.map_structure(strategy.experimental_local_results,
labels)
return labels, outputs
return test_step
def _run_evaluation(self, test_step, current_training_step, metric,
test_iterator):
if not test_iterator or not metric:
logging.warning(
'Both test_iterator (%s) and metrics (%s) must not be None.',
test_iterator, metric)
return None
logging.info('Running evaluation after step: %s.', current_training_step)
while True:
try:
labels, outputs = test_step(test_iterator)
if metric:
metric.update_state(labels, outputs)
except (StopIteration, tf.errors.OutOfRangeError):
break
metric_result = metric.result()
if isinstance(metric, tf.keras.metrics.Metric):
metric_result = tf.nest.map_structure(lambda x: x.numpy().astype(float),
metric_result)
logging.info('Step: [%d] Validation metric = %s', current_training_step,
metric_result)
return metric_result
| true
| true
|
1c4a52e6f133e6c9b67ce57eaeec57e4ff28a9dd
| 2,392
|
py
|
Python
|
tests/wallet/test_wallet_interested_store.py
|
Chinilla/chinilla-blockchain
|
59bebcf94e65b74fbb53ad4929bbd79cb28be619
|
[
"Apache-2.0"
] | null | null | null |
tests/wallet/test_wallet_interested_store.py
|
Chinilla/chinilla-blockchain
|
59bebcf94e65b74fbb53ad4929bbd79cb28be619
|
[
"Apache-2.0"
] | null | null | null |
tests/wallet/test_wallet_interested_store.py
|
Chinilla/chinilla-blockchain
|
59bebcf94e65b74fbb53ad4929bbd79cb28be619
|
[
"Apache-2.0"
] | null | null | null |
from pathlib import Path
from secrets import token_bytes
import aiosqlite
import pytest
from chinilla.types.blockchain_format.coin import Coin
from chinilla.util.db_wrapper import DBWrapper
from chinilla.util.ints import uint64
from chinilla.wallet.wallet_interested_store import WalletInterestedStore
class TestWalletInterestedStore:
@pytest.mark.asyncio
async def test_store(self):
db_filename = Path("wallet_interested_store_test.db")
if db_filename.exists():
db_filename.unlink()
db_connection = await aiosqlite.connect(db_filename)
db_wrapper = DBWrapper(db_connection)
store = await WalletInterestedStore.create(db_wrapper)
try:
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_2 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
assert (await store.get_interested_coin_ids()) == []
await store.add_interested_coin_id(coin_1.name())
assert (await store.get_interested_coin_ids()) == [coin_1.name()]
await store.add_interested_coin_id(coin_1.name())
assert (await store.get_interested_coin_ids()) == [coin_1.name()]
await store.add_interested_coin_id(coin_2.name())
assert set(await store.get_interested_coin_ids()) == {coin_1.name(), coin_2.name()}
puzzle_hash = token_bytes(32)
assert len(await store.get_interested_puzzle_hashes()) == 0
await store.add_interested_puzzle_hash(puzzle_hash, 2)
assert len(await store.get_interested_puzzle_hashes()) == 1
await store.add_interested_puzzle_hash(puzzle_hash, 2)
assert len(await store.get_interested_puzzle_hashes()) == 1
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) == 2
await store.add_interested_puzzle_hash(puzzle_hash, 3)
assert len(await store.get_interested_puzzle_hashes()) == 1
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) == 3
await store.remove_interested_puzzle_hash(puzzle_hash)
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) is None
assert len(await store.get_interested_puzzle_hashes()) == 0
finally:
await db_connection.close()
db_filename.unlink()
| 45.132075
| 95
| 0.695234
|
from pathlib import Path
from secrets import token_bytes
import aiosqlite
import pytest
from chinilla.types.blockchain_format.coin import Coin
from chinilla.util.db_wrapper import DBWrapper
from chinilla.util.ints import uint64
from chinilla.wallet.wallet_interested_store import WalletInterestedStore
class TestWalletInterestedStore:
@pytest.mark.asyncio
async def test_store(self):
db_filename = Path("wallet_interested_store_test.db")
if db_filename.exists():
db_filename.unlink()
db_connection = await aiosqlite.connect(db_filename)
db_wrapper = DBWrapper(db_connection)
store = await WalletInterestedStore.create(db_wrapper)
try:
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_2 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
assert (await store.get_interested_coin_ids()) == []
await store.add_interested_coin_id(coin_1.name())
assert (await store.get_interested_coin_ids()) == [coin_1.name()]
await store.add_interested_coin_id(coin_1.name())
assert (await store.get_interested_coin_ids()) == [coin_1.name()]
await store.add_interested_coin_id(coin_2.name())
assert set(await store.get_interested_coin_ids()) == {coin_1.name(), coin_2.name()}
puzzle_hash = token_bytes(32)
assert len(await store.get_interested_puzzle_hashes()) == 0
await store.add_interested_puzzle_hash(puzzle_hash, 2)
assert len(await store.get_interested_puzzle_hashes()) == 1
await store.add_interested_puzzle_hash(puzzle_hash, 2)
assert len(await store.get_interested_puzzle_hashes()) == 1
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) == 2
await store.add_interested_puzzle_hash(puzzle_hash, 3)
assert len(await store.get_interested_puzzle_hashes()) == 1
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) == 3
await store.remove_interested_puzzle_hash(puzzle_hash)
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) is None
assert len(await store.get_interested_puzzle_hashes()) == 0
finally:
await db_connection.close()
db_filename.unlink()
| true
| true
|
1c4a530ffc5d9d6be0b083b255751a4622ff6ed6
| 645
|
py
|
Python
|
teal_algos/bubble.py
|
Taneristique/TEAL
|
e8860741be02a98b2562d36da46864e7bdc9594b
|
[
"MIT"
] | null | null | null |
teal_algos/bubble.py
|
Taneristique/TEAL
|
e8860741be02a98b2562d36da46864e7bdc9594b
|
[
"MIT"
] | null | null | null |
teal_algos/bubble.py
|
Taneristique/TEAL
|
e8860741be02a98b2562d36da46864e7bdc9594b
|
[
"MIT"
] | null | null | null |
import time
start=time.time()
def bubble(x):
"""Function takes list element x as parameter which is consist of numbers"""
for i in range(len(x)-1): #number of the loops
for j in range(1,len(x)): #number of the comperations
if x[j-1]>x[j]: #swap i[j] with i[j-1]
chg=x[j]
x[j]=x[j-1]
x[j-1]=chg
if x[j-1]==x[j] or x[j-1]<x[j]: #do nothing if two elements are equal or first element little than second one.
pass
print('step ',i+1 ,x)
bubble([29,32,4,11,2,3])
end=time.time()
print(f'Runtime of algorithm : {end-start}')
| 40.3125
| 122
| 0.542636
|
import time
start=time.time()
def bubble(x):
for i in range(len(x)-1):
for j in range(1,len(x)):
if x[j-1]>x[j]:
chg=x[j]
x[j]=x[j-1]
x[j-1]=chg
if x[j-1]==x[j] or x[j-1]<x[j]:
pass
print('step ',i+1 ,x)
bubble([29,32,4,11,2,3])
end=time.time()
print(f'Runtime of algorithm : {end-start}')
| true
| true
|
1c4a5380b287324f5ee930287fd0199210559d6b
| 4,852
|
py
|
Python
|
bin/additem.py
|
CakeLancelot/UnityPackFF
|
ee3368b16aec3c6b95c70778105dfcbf7379647f
|
[
"MIT"
] | 6
|
2020-11-03T13:23:40.000Z
|
2021-10-06T15:25:29.000Z
|
bin/additem.py
|
CakeLancelot/UnityPackFF
|
ee3368b16aec3c6b95c70778105dfcbf7379647f
|
[
"MIT"
] | 1
|
2021-02-15T20:16:40.000Z
|
2021-02-15T20:16:40.000Z
|
bin/additem.py
|
CakeLancelot/UnityPackFF
|
ee3368b16aec3c6b95c70778105dfcbf7379647f
|
[
"MIT"
] | 10
|
2020-11-03T15:08:10.000Z
|
2022-02-13T07:32:52.000Z
|
#!/usr/bin/env python3
# Adds a (retextured) item into the game. Will need to be modified slightly
# to add items other than armor. Remember to use dumpxdt.py (and make it read
# the generated _new TableData!) so your server allows you to spawn the
# new items. Will also need to be modified to work with girls' or unisex items.
from unitypack.asset import Asset
from unitypack.object import FFOrderedDict
from unitypack.modding import import_texture
# asset bundles
TABLEDATA_PATH = 'CustomAssetBundle-1dca92eecee4742d985b799d8226666d'
CHARTEX_PATH = 'CustomAssetBundle-aa120043d3c634fe9adfb5cbe08e6970'
ICONS_PATH = 'CustomAssetBundle-784fa24bcf2da4f5eabe9547958616eb'
# template items
TEMPL_ITEMID = 152 # changing this is one way to change the base model
TEMPL_TEXTURE_PATHID = 589 # these other two can stay the same
TEMPL_ICON_PATHID = 1000
# new item properties
ITEM_TEXTURE_PATH = 'shirt_davestrider2.png'
ITEM_ICON_PATH = 'shirt_davestrider2_icon.png'
ITEM_NAME = 'Dave Strider Shirt'
ITEM_COMMENT = 'Dave Strider from Homestuck! (I know nothing about this character)'
ITEM_TEXTURE_NAME = 'shirt_davestrider2'
ITEM_TYPE = 'Shirts' # one of Shirts, Pants, Shoes, Hat, Glass, Back, Weapon, Vehicle
ITEM_DEFENSE = 50
def findnexticon(tabledata, typ):
xdtdata = tabledata.objects[7].contents
categories = ['Shirts', 'Pants', 'Shoes', 'Hat', 'Glass', 'Back', 'Weapon', 'Vehicle']
ret = 1
for cat in categories:
icontable = xdtdata['m_p' + cat + 'ItemTable']['m_pItemIconData']
if icontable[1]['m_iIconType'] == typ:
ret = max(ret, *[x['m_iIconNumber'] for x in icontable])
return ret + 1
def fromtempl(table, src, dst):
table.append(FFOrderedDict())
for k, v in table[src].items():
table[dst][k] = v
def mod_tabledata(tabledata):
itemtable = tabledata.objects[7].contents['m_p' + ITEM_TYPE + 'ItemTable']
itemid = len(itemtable['m_pItemData'])
assert len(itemtable['m_pItemData']) == len(itemtable['m_pItemStringData'])
# construct item object
fromtempl(itemtable['m_pItemData'], TEMPL_ITEMID, itemid)
# fix item id
itemtable['m_pItemData'][itemid]['m_iItemNumber'] = itemid
itemtable['m_pItemData'][itemid]['m_iItemName'] = itemid
itemtable['m_pItemData'][itemid]['m_iComment'] = itemid
# configure properties
itemtable['m_pItemData'][itemid]['m_iDefenseRat'] = ITEM_DEFENSE
# ...and any other changes you want
# construct item strings object
fromtempl(itemtable['m_pItemStringData'], TEMPL_ITEMID, itemid)
# set strings
itemtable['m_pItemStringData'][itemid]['m_strName'] = ITEM_NAME
itemtable['m_pItemStringData'][itemid]['m_strComment'] = ITEM_COMMENT
meshid = len(itemtable['m_pItemMeshData'])
templ_meshid = itemtable['m_pItemData'][TEMPL_ITEMID]['m_iMesh']
itemtable['m_pItemData'][itemid]['m_iMesh'] = meshid
# construct item mesh info object
fromtempl(itemtable['m_pItemMeshData'], templ_meshid, meshid)
itemtable['m_pItemMeshData'][meshid]['m_pstrMTextureString'] = ITEM_TEXTURE_NAME
# female texture
# itemtable['m_pItemMeshData'][meshid]['m_pstrFTextureString'] = ITEM_TEXTURE_NAME
iconnum = findnexticon(tabledata, 3)
# construct icon object
iconid = len(itemtable['m_pItemIconData'])
itemtable['m_pItemIconData'].append(FFOrderedDict())
itemtable['m_pItemIconData'][iconid]['m_iIconType'] = 3
itemtable['m_pItemIconData'][iconid]['m_iIconNumber'] = iconnum
itemtable['m_pItemData'][itemid]['m_iIcon'] = iconid
print('added itemid {} to tabledata.\n\tmeshid: {}, iconid: {}, iconum: {}'
.format(itemid, meshid, iconid, iconnum))
return iconnum
def mod_texture(asset, imgpath, load_path, name, templ_pathid, comp='dxt1'):
obj = asset.add_object(28)
import_texture(obj._contents, imgpath, name, comp)
ab_ent = asset.add2ab(load_path, obj.path_id)
print('inserted texture.\n\tpath_id: {}'.format(obj.path_id))
def main():
print('inserting {}...'.format(ITEM_NAME))
print('modding TableData...')
with open(TABLEDATA_PATH, 'rb') as f:
tabledata = Asset.from_file(f)
iconnum = mod_tabledata(tabledata)
with open(TABLEDATA_PATH + '_new', 'wb') as outf:
tabledata.save(outf)
icon_name = 'cosicon_{}'.format(iconnum)
icon_path = 'icons/{}.png'.format(icon_name)
print('icon_name: {}, icon_path: {}'.format(icon_name, icon_path))
print('modding CharTexture...')
with open(CHARTEX_PATH, 'rb') as f:
chartex = Asset.from_file(f)
mod_texture(chartex, ITEM_TEXTURE_PATH, 'texture/' + ITEM_TEXTURE_NAME + '.dds',
ITEM_TEXTURE_NAME, TEMPL_TEXTURE_PATHID)
with open(CHARTEX_PATH + '_new', 'wb') as outf:
chartex.save(outf)
print('modding Icons...')
with open(ICONS_PATH, 'rb') as f:
icons = Asset.from_file(f)
mod_texture(icons, ITEM_ICON_PATH, icon_path, icon_name, TEMPL_ICON_PATHID, 'dxt5')
with open(ICONS_PATH + '_new', 'wb') as outf:
icons.save(outf)
print('done.')
if __name__ == '__main__':
main()
| 32.783784
| 87
| 0.739489
|
from unitypack.asset import Asset
from unitypack.object import FFOrderedDict
from unitypack.modding import import_texture
# asset bundles
TABLEDATA_PATH = 'CustomAssetBundle-1dca92eecee4742d985b799d8226666d'
CHARTEX_PATH = 'CustomAssetBundle-aa120043d3c634fe9adfb5cbe08e6970'
ICONS_PATH = 'CustomAssetBundle-784fa24bcf2da4f5eabe9547958616eb'
# template items
TEMPL_ITEMID = 152 # changing this is one way to change the base model
TEMPL_TEXTURE_PATHID = 589 # these other two can stay the same
TEMPL_ICON_PATHID = 1000
# new item properties
ITEM_TEXTURE_PATH = 'shirt_davestrider2.png'
ITEM_ICON_PATH = 'shirt_davestrider2_icon.png'
ITEM_NAME = 'Dave Strider Shirt'
ITEM_COMMENT = 'Dave Strider from Homestuck! (I know nothing about this character)'
ITEM_TEXTURE_NAME = 'shirt_davestrider2'
ITEM_TYPE = 'Shirts' # one of Shirts, Pants, Shoes, Hat, Glass, Back, Weapon, Vehicle
ITEM_DEFENSE = 50
def findnexticon(tabledata, typ):
xdtdata = tabledata.objects[7].contents
categories = ['Shirts', 'Pants', 'Shoes', 'Hat', 'Glass', 'Back', 'Weapon', 'Vehicle']
ret = 1
for cat in categories:
icontable = xdtdata['m_p' + cat + 'ItemTable']['m_pItemIconData']
if icontable[1]['m_iIconType'] == typ:
ret = max(ret, *[x['m_iIconNumber'] for x in icontable])
return ret + 1
def fromtempl(table, src, dst):
table.append(FFOrderedDict())
for k, v in table[src].items():
table[dst][k] = v
def mod_tabledata(tabledata):
itemtable = tabledata.objects[7].contents['m_p' + ITEM_TYPE + 'ItemTable']
itemid = len(itemtable['m_pItemData'])
assert len(itemtable['m_pItemData']) == len(itemtable['m_pItemStringData'])
# construct item object
fromtempl(itemtable['m_pItemData'], TEMPL_ITEMID, itemid)
# fix item id
itemtable['m_pItemData'][itemid]['m_iItemNumber'] = itemid
itemtable['m_pItemData'][itemid]['m_iItemName'] = itemid
itemtable['m_pItemData'][itemid]['m_iComment'] = itemid
# configure properties
itemtable['m_pItemData'][itemid]['m_iDefenseRat'] = ITEM_DEFENSE
# ...and any other changes you want
# construct item strings object
fromtempl(itemtable['m_pItemStringData'], TEMPL_ITEMID, itemid)
# set strings
itemtable['m_pItemStringData'][itemid]['m_strName'] = ITEM_NAME
itemtable['m_pItemStringData'][itemid]['m_strComment'] = ITEM_COMMENT
meshid = len(itemtable['m_pItemMeshData'])
templ_meshid = itemtable['m_pItemData'][TEMPL_ITEMID]['m_iMesh']
itemtable['m_pItemData'][itemid]['m_iMesh'] = meshid
# construct item mesh info object
fromtempl(itemtable['m_pItemMeshData'], templ_meshid, meshid)
itemtable['m_pItemMeshData'][meshid]['m_pstrMTextureString'] = ITEM_TEXTURE_NAME
# female texture
# itemtable['m_pItemMeshData'][meshid]['m_pstrFTextureString'] = ITEM_TEXTURE_NAME
iconnum = findnexticon(tabledata, 3)
# construct icon object
iconid = len(itemtable['m_pItemIconData'])
itemtable['m_pItemIconData'].append(FFOrderedDict())
itemtable['m_pItemIconData'][iconid]['m_iIconType'] = 3
itemtable['m_pItemIconData'][iconid]['m_iIconNumber'] = iconnum
itemtable['m_pItemData'][itemid]['m_iIcon'] = iconid
print('added itemid {} to tabledata.\n\tmeshid: {}, iconid: {}, iconum: {}'
.format(itemid, meshid, iconid, iconnum))
return iconnum
def mod_texture(asset, imgpath, load_path, name, templ_pathid, comp='dxt1'):
obj = asset.add_object(28)
import_texture(obj._contents, imgpath, name, comp)
ab_ent = asset.add2ab(load_path, obj.path_id)
print('inserted texture.\n\tpath_id: {}'.format(obj.path_id))
def main():
print('inserting {}...'.format(ITEM_NAME))
print('modding TableData...')
with open(TABLEDATA_PATH, 'rb') as f:
tabledata = Asset.from_file(f)
iconnum = mod_tabledata(tabledata)
with open(TABLEDATA_PATH + '_new', 'wb') as outf:
tabledata.save(outf)
icon_name = 'cosicon_{}'.format(iconnum)
icon_path = 'icons/{}.png'.format(icon_name)
print('icon_name: {}, icon_path: {}'.format(icon_name, icon_path))
print('modding CharTexture...')
with open(CHARTEX_PATH, 'rb') as f:
chartex = Asset.from_file(f)
mod_texture(chartex, ITEM_TEXTURE_PATH, 'texture/' + ITEM_TEXTURE_NAME + '.dds',
ITEM_TEXTURE_NAME, TEMPL_TEXTURE_PATHID)
with open(CHARTEX_PATH + '_new', 'wb') as outf:
chartex.save(outf)
print('modding Icons...')
with open(ICONS_PATH, 'rb') as f:
icons = Asset.from_file(f)
mod_texture(icons, ITEM_ICON_PATH, icon_path, icon_name, TEMPL_ICON_PATHID, 'dxt5')
with open(ICONS_PATH + '_new', 'wb') as outf:
icons.save(outf)
print('done.')
if __name__ == '__main__':
main()
| true
| true
|
1c4a540b90827e76c2c1079b89be91800a5d28c8
| 126
|
py
|
Python
|
vit/formatter/wait_epoch.py
|
kinifwyne/vit
|
e2cbafce922b1e09c4a66e7dc9592c51fe628e9d
|
[
"MIT"
] | 179
|
2020-07-28T08:21:51.000Z
|
2022-03-30T21:39:37.000Z
|
vit/formatter/wait_epoch.py
|
kinifwyne/vit
|
e2cbafce922b1e09c4a66e7dc9592c51fe628e9d
|
[
"MIT"
] | 255
|
2017-02-01T11:49:12.000Z
|
2020-07-26T22:31:25.000Z
|
vit/formatter/wait_epoch.py
|
kinifwyne/vit
|
e2cbafce922b1e09c4a66e7dc9592c51fe628e9d
|
[
"MIT"
] | 26
|
2017-01-17T20:31:13.000Z
|
2020-06-17T13:09:01.000Z
|
from vit.formatter.wait import Wait
class WaitEpoch(Wait):
def format(self, wait, task):
return self.epoch(wait)
| 21
| 35
| 0.698413
|
from vit.formatter.wait import Wait
class WaitEpoch(Wait):
def format(self, wait, task):
return self.epoch(wait)
| true
| true
|
1c4a549672ab2b68bbe9ad8488637c7d44891b43
| 2,819
|
py
|
Python
|
test/lint/check-doc.py
|
VaderCoinProject/vadercoin
|
b513c794b014d40e5aad281dd1f54845c46d216c
|
[
"MIT"
] | null | null | null |
test/lint/check-doc.py
|
VaderCoinProject/vadercoin
|
b513c794b014d40e5aad281dd1f54845c46d216c
|
[
"MIT"
] | null | null | null |
test/lint/check-doc.py
|
VaderCoinProject/vadercoin
|
b513c794b014d40e5aad281dd1f54845c46d216c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2015-2020 The Vadercoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
REGEX_ARG = r'(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\("(-[^"]+)"'
REGEX_DOC = r'AddArg\("(-[^"=]+?)(?:=|")'
CMD_ROOT_DIR = '$(git rev-parse --show-toplevel)/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"git grep --perl-regexp '{}' -- {} ':(exclude){}'".format(REGEX_ARG, CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_WALLET_ARGS = r"git grep --function-context 'void WalletInit::AddWalletOptions' -- {} | grep AddArg".format(CMD_ROOT_DIR)
CMD_GREP_WALLET_HIDDEN_ARGS = r"git grep --function-context 'void DummyWalletInit::AddWalletOptions' -- {}".format(CMD_ROOT_DIR)
CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR)
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb', '-zapwallettxes'])
def lint_missing_argument_documentation():
used = check_output(CMD_GREP_ARGS, shell=True).decode('utf8').strip()
docd = check_output(CMD_GREP_DOCS, shell=True).decode('utf8').strip()
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
assert 0 == len(args_need_doc), "Please document the following arguments: {}".format(args_need_doc)
def lint_missing_hidden_wallet_args():
wallet_args = check_output(CMD_GREP_WALLET_ARGS, shell=True).decode('utf8').strip()
wallet_hidden_args = check_output(CMD_GREP_WALLET_HIDDEN_ARGS, shell=True).decode('utf8').strip()
wallet_args = set(re.findall(re.compile(REGEX_DOC), wallet_args))
wallet_hidden_args = set(re.findall(re.compile(r' "([^"=]+)'), wallet_hidden_args))
hidden_missing = wallet_args.difference(wallet_hidden_args)
if hidden_missing:
assert 0, "Please add {} to the hidden args in DummyWalletInit::AddWalletOptions".format(hidden_missing)
def main():
lint_missing_argument_documentation()
lint_missing_hidden_wallet_args()
if __name__ == "__main__":
main()
| 42.074627
| 130
| 0.723306
|
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
REGEX_ARG = r'(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\("(-[^"]+)"'
REGEX_DOC = r'AddArg\("(-[^"=]+?)(?:=|")'
CMD_ROOT_DIR = '$(git rev-parse --show-toplevel)/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"git grep --perl-regexp '{}' -- {} ':(exclude){}'".format(REGEX_ARG, CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_WALLET_ARGS = r"git grep --function-context 'void WalletInit::AddWalletOptions' -- {} | grep AddArg".format(CMD_ROOT_DIR)
CMD_GREP_WALLET_HIDDEN_ARGS = r"git grep --function-context 'void DummyWalletInit::AddWalletOptions' -- {}".format(CMD_ROOT_DIR)
CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR)
SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb', '-zapwallettxes'])
def lint_missing_argument_documentation():
used = check_output(CMD_GREP_ARGS, shell=True).decode('utf8').strip()
docd = check_output(CMD_GREP_DOCS, shell=True).decode('utf8').strip()
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
assert 0 == len(args_need_doc), "Please document the following arguments: {}".format(args_need_doc)
def lint_missing_hidden_wallet_args():
wallet_args = check_output(CMD_GREP_WALLET_ARGS, shell=True).decode('utf8').strip()
wallet_hidden_args = check_output(CMD_GREP_WALLET_HIDDEN_ARGS, shell=True).decode('utf8').strip()
wallet_args = set(re.findall(re.compile(REGEX_DOC), wallet_args))
wallet_hidden_args = set(re.findall(re.compile(r' "([^"=]+)'), wallet_hidden_args))
hidden_missing = wallet_args.difference(wallet_hidden_args)
if hidden_missing:
assert 0, "Please add {} to the hidden args in DummyWalletInit::AddWalletOptions".format(hidden_missing)
def main():
lint_missing_argument_documentation()
lint_missing_hidden_wallet_args()
if __name__ == "__main__":
main()
| true
| true
|
1c4a564f9a4cae704bf503183576b795d60fbbf4
| 142
|
py
|
Python
|
playrcc/src/gui/__init__.py
|
Gloryness/playrcc
|
3816a935f19c786db59ba5a46a98cc527053cc29
|
[
"MIT"
] | 4
|
2020-09-24T14:25:01.000Z
|
2020-11-02T22:18:12.000Z
|
playrcc/src/gui/__init__.py
|
Gloryness/playrcc
|
3816a935f19c786db59ba5a46a98cc527053cc29
|
[
"MIT"
] | null | null | null |
playrcc/src/gui/__init__.py
|
Gloryness/playrcc
|
3816a935f19c786db59ba5a46a98cc527053cc29
|
[
"MIT"
] | null | null | null |
from .mainwindow import SecretCodeWindow
__title__ = 'gui'
__author__ = 'Goryness'
__license__ = 'MIT License'
__all__ = ['SecretCodeWindow']
| 23.666667
| 40
| 0.774648
|
from .mainwindow import SecretCodeWindow
__title__ = 'gui'
__author__ = 'Goryness'
__license__ = 'MIT License'
__all__ = ['SecretCodeWindow']
| true
| true
|
1c4a5693a9652bf3ef6a37798165c3bbc52518da
| 4,605
|
py
|
Python
|
melodic/src/ros_comm/rospy/src/rospy/__init__.py
|
disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA
|
3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0
|
[
"BSD-3-Clause"
] | 2
|
2021-07-14T12:33:55.000Z
|
2021-11-21T07:14:13.000Z
|
melodic/src/ros_comm/rospy/src/rospy/__init__.py
|
disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA
|
3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0
|
[
"BSD-3-Clause"
] | null | null | null |
melodic/src/ros_comm/rospy/src/rospy/__init__.py
|
disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA
|
3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0
|
[
"BSD-3-Clause"
] | null | null | null |
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Copyright (c) 2008, Willow Garage, Inc.
# Revision $Id$
"""
ROS client library for Python.
See U{http://ros.org/wiki/rospy}
@author: Ken Conley (kwc)
"""
# import symbols into rospy namespace
# NOTE: there are much better ways to configure python module
# dictionaries, but the rospy codebase isn't quite in shape for that
# yet
from std_msgs.msg import Header
from .client import spin, myargv, init_node, \
get_published_topics, \
wait_for_message, \
get_master, \
on_shutdown, \
get_param, get_param_cached, get_param_names, set_param, delete_param, has_param, search_param,\
DEBUG, INFO, WARN, ERROR, FATAL
from .timer import sleep, Rate, Timer
from .core import is_shutdown, signal_shutdown, \
get_node_uri, get_ros_root, \
logdebug, logwarn, loginfo, logout, logerr, logfatal, \
logdebug_throttle, logwarn_throttle, loginfo_throttle, logerr_throttle, logfatal_throttle, \
logdebug_throttle_identical, logwarn_throttle_identical, loginfo_throttle_identical, logerr_throttle_identical, logfatal_throttle_identical, \
logdebug_once, logwarn_once, loginfo_once, logerr_once, logfatal_once, \
parse_rosrpc_uri
from .exceptions import *
from .msg import AnyMsg
from .msproxy import MasterProxy
from .names import get_name, get_caller_id, get_namespace, resolve_name, remap_name
from .rostime import Time, Duration, get_rostime, get_time
from .service import ServiceException
# - use tcp ros implementation of services
from .impl.tcpros_service import Service, ServiceProxy, wait_for_service
from .topics import Message, SubscribeListener, Publisher, Subscriber
## \defgroup validators Validators
## \defgroup clientapi Client API
__all__ = [
'Header',
'spin',
'myargv',
'init_node',
'get_master',
'get_published_topics',
'wait_for_service',
'on_shutdown',
'get_param',
'get_param_cached',
'get_param_names',
'set_param',
'delete_param',
'has_param',
'search_param',
'sleep',
'Rate',
'DEBUG',
'INFO',
'WARN',
'ERROR',
'FATAL',
'is_shutdown',
'signal_shutdown',
'get_node_uri',
'get_ros_root',
'logdebug',
'logwarn', 'loginfo',
'logout', 'logerr', 'logfatal',
'logdebug_throttle',
'logwarn_throttle', 'loginfo_throttle',
'logerr_throttle', 'logfatal_throttle',
'logdebug_once',
'logwarn_once', 'loginfo_once',
'logerr_once', 'logfatal_once',
'parse_rosrpc_uri',
'MasterProxy',
'NodeProxy',
'ROSException',
'ROSSerializationException',
'ROSInitException',
'ROSInterruptException',
'ROSInternalException',
'TransportException',
'TransportTerminated',
'TransportInitError',
'AnyMsg', 'Message',
'get_name',
'get_caller_id',
'get_namespace',
'resolve_name',
'remap_name',
'Time', 'Duration', 'get_rostime', 'get_time',
'ServiceException',
'Service', 'ServiceProxy',
'SubscribeListener', 'Publisher', 'Subscriber',
]
| 33.860294
| 146
| 0.728339
|
# yet
from std_msgs.msg import Header
from .client import spin, myargv, init_node, \
get_published_topics, \
wait_for_message, \
get_master, \
on_shutdown, \
get_param, get_param_cached, get_param_names, set_param, delete_param, has_param, search_param,\
DEBUG, INFO, WARN, ERROR, FATAL
from .timer import sleep, Rate, Timer
from .core import is_shutdown, signal_shutdown, \
get_node_uri, get_ros_root, \
logdebug, logwarn, loginfo, logout, logerr, logfatal, \
logdebug_throttle, logwarn_throttle, loginfo_throttle, logerr_throttle, logfatal_throttle, \
logdebug_throttle_identical, logwarn_throttle_identical, loginfo_throttle_identical, logerr_throttle_identical, logfatal_throttle_identical, \
logdebug_once, logwarn_once, loginfo_once, logerr_once, logfatal_once, \
parse_rosrpc_uri
from .exceptions import *
from .msg import AnyMsg
from .msproxy import MasterProxy
from .names import get_name, get_caller_id, get_namespace, resolve_name, remap_name
from .rostime import Time, Duration, get_rostime, get_time
from .service import ServiceException
# - use tcp ros implementation of services
from .impl.tcpros_service import Service, ServiceProxy, wait_for_service
from .topics import Message, SubscribeListener, Publisher, Subscriber
## \defgroup validators Validators
## \defgroup clientapi Client API
__all__ = [
'Header',
'spin',
'myargv',
'init_node',
'get_master',
'get_published_topics',
'wait_for_service',
'on_shutdown',
'get_param',
'get_param_cached',
'get_param_names',
'set_param',
'delete_param',
'has_param',
'search_param',
'sleep',
'Rate',
'DEBUG',
'INFO',
'WARN',
'ERROR',
'FATAL',
'is_shutdown',
'signal_shutdown',
'get_node_uri',
'get_ros_root',
'logdebug',
'logwarn', 'loginfo',
'logout', 'logerr', 'logfatal',
'logdebug_throttle',
'logwarn_throttle', 'loginfo_throttle',
'logerr_throttle', 'logfatal_throttle',
'logdebug_once',
'logwarn_once', 'loginfo_once',
'logerr_once', 'logfatal_once',
'parse_rosrpc_uri',
'MasterProxy',
'NodeProxy',
'ROSException',
'ROSSerializationException',
'ROSInitException',
'ROSInterruptException',
'ROSInternalException',
'TransportException',
'TransportTerminated',
'TransportInitError',
'AnyMsg', 'Message',
'get_name',
'get_caller_id',
'get_namespace',
'resolve_name',
'remap_name',
'Time', 'Duration', 'get_rostime', 'get_time',
'ServiceException',
'Service', 'ServiceProxy',
'SubscribeListener', 'Publisher', 'Subscriber',
]
| true
| true
|
1c4a56aeca55d753088ec0ac5ef51be958e3e1da
| 823
|
py
|
Python
|
packages/__init__.py
|
fetchai/agents-yoti
|
d71d57508079e5cd3854037bc3c473e24915af6f
|
[
"Apache-2.0"
] | 4
|
2021-01-19T17:53:58.000Z
|
2021-09-08T05:28:58.000Z
|
packages/__init__.py
|
fetchai/agents-yoti
|
d71d57508079e5cd3854037bc3c473e24915af6f
|
[
"Apache-2.0"
] | null | null | null |
packages/__init__.py
|
fetchai/agents-yoti
|
d71d57508079e5cd3854037bc3c473e24915af6f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""AEA packages folder."""
| 39.190476
| 80
| 0.575942
| true
| true
|
|
1c4a571b018141a2957d2b7a89a4c12fd814a302
| 286
|
py
|
Python
|
frappe/patches/v12_0/remove_deprecated_fields_from_doctype.py
|
jimmyrianto/frappe
|
40051410436b11e0415c8c8f0a8335bdd572ce6e
|
[
"MIT"
] | 5
|
2017-09-12T15:56:31.000Z
|
2022-03-09T13:50:21.000Z
|
frappe/patches/v12_0/remove_deprecated_fields_from_doctype.py
|
alexbow2008/frappe
|
ce592a40b4c5e80a9c6cbdc541105218bf98c966
|
[
"MIT"
] | 212
|
2017-08-16T13:03:18.000Z
|
2020-10-06T12:26:21.000Z
|
frappe/patches/v12_0/remove_deprecated_fields_from_doctype.py
|
alexbow2008/frappe
|
ce592a40b4c5e80a9c6cbdc541105218bf98c966
|
[
"MIT"
] | 14
|
2020-11-04T11:22:44.000Z
|
2022-02-01T20:59:37.000Z
|
import frappe
def execute():
frappe.reload_doc('core', 'doctype', 'doctype')
frappe.model.delete_fields({
'DocType': ['hide_heading', 'image_view', 'read_only_onload']
}, delete=1)
frappe.db.sql('''
DELETE from `tabProperty Setter`
WHERE property = 'read_only_onload'
''')
| 22
| 63
| 0.695804
|
import frappe
def execute():
frappe.reload_doc('core', 'doctype', 'doctype')
frappe.model.delete_fields({
'DocType': ['hide_heading', 'image_view', 'read_only_onload']
}, delete=1)
frappe.db.sql('''
DELETE from `tabProperty Setter`
WHERE property = 'read_only_onload'
''')
| true
| true
|
1c4a57858093aea769da74e5245781bdca0980dc
| 44,092
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20190501/_inputs.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_native/network/v20190501/_inputs.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_native/network/v20190501/_inputs.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = [
'BackendArgs',
'BackendPoolArgs',
'BackendPoolsSettingsArgs',
'CacheConfigurationArgs',
'ForwardingConfigurationArgs',
'FrontendEndpointArgs',
'FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs',
'HealthProbeSettingsModelArgs',
'LoadBalancingSettingsModelArgs',
'RedirectConfigurationArgs',
'RoutingRuleArgs',
'SubResourceArgs',
]
@pulumi.input_type
class BackendArgs:
def __init__(__self__, *,
address: Optional[pulumi.Input[str]] = None,
backend_host_header: Optional[pulumi.Input[str]] = None,
enabled_state: Optional[pulumi.Input[Union[str, 'BackendEnabledState']]] = None,
http_port: Optional[pulumi.Input[int]] = None,
https_port: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[int]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
Backend address of a frontDoor load balancer.
:param pulumi.Input[str] address: Location of the backend (IP address or FQDN)
:param pulumi.Input[str] backend_host_header: The value to use as the host header sent to the backend. If blank or unspecified, this defaults to the incoming host.
:param pulumi.Input[Union[str, 'BackendEnabledState']] enabled_state: Whether to enable use of this backend. Permitted values are 'Enabled' or 'Disabled'
:param pulumi.Input[int] http_port: The HTTP TCP port number. Must be between 1 and 65535.
:param pulumi.Input[int] https_port: The HTTPS TCP port number. Must be between 1 and 65535.
:param pulumi.Input[int] priority: Priority to use for load balancing. Higher priorities will not be used for load balancing if any lower priority backend is healthy.
:param pulumi.Input[int] weight: Weight of this endpoint for load balancing purposes.
"""
if address is not None:
pulumi.set(__self__, "address", address)
if backend_host_header is not None:
pulumi.set(__self__, "backend_host_header", backend_host_header)
if enabled_state is not None:
pulumi.set(__self__, "enabled_state", enabled_state)
if http_port is not None:
pulumi.set(__self__, "http_port", http_port)
if https_port is not None:
pulumi.set(__self__, "https_port", https_port)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
"""
Location of the backend (IP address or FQDN)
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter(name="backendHostHeader")
def backend_host_header(self) -> Optional[pulumi.Input[str]]:
"""
The value to use as the host header sent to the backend. If blank or unspecified, this defaults to the incoming host.
"""
return pulumi.get(self, "backend_host_header")
@backend_host_header.setter
def backend_host_header(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend_host_header", value)
@property
@pulumi.getter(name="enabledState")
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'BackendEnabledState']]]:
"""
Whether to enable use of this backend. Permitted values are 'Enabled' or 'Disabled'
"""
return pulumi.get(self, "enabled_state")
@enabled_state.setter
def enabled_state(self, value: Optional[pulumi.Input[Union[str, 'BackendEnabledState']]]):
pulumi.set(self, "enabled_state", value)
@property
@pulumi.getter(name="httpPort")
def http_port(self) -> Optional[pulumi.Input[int]]:
"""
The HTTP TCP port number. Must be between 1 and 65535.
"""
return pulumi.get(self, "http_port")
@http_port.setter
def http_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "http_port", value)
@property
@pulumi.getter(name="httpsPort")
def https_port(self) -> Optional[pulumi.Input[int]]:
"""
The HTTPS TCP port number. Must be between 1 and 65535.
"""
return pulumi.get(self, "https_port")
@https_port.setter
def https_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "https_port", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Priority to use for load balancing. Higher priorities will not be used for load balancing if any lower priority backend is healthy.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Weight of this endpoint for load balancing purposes.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class BackendPoolArgs:
def __init__(__self__, *,
backends: Optional[pulumi.Input[Sequence[pulumi.Input['BackendArgs']]]] = None,
health_probe_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
id: Optional[pulumi.Input[str]] = None,
load_balancing_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
name: Optional[pulumi.Input[str]] = None):
"""
A backend pool is a collection of backends that can be routed to.
:param pulumi.Input[Sequence[pulumi.Input['BackendArgs']]] backends: The set of backends for this pool
:param pulumi.Input['SubResourceArgs'] health_probe_settings: L7 health probe settings for a backend pool
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input['SubResourceArgs'] load_balancing_settings: Load balancing settings for a backend pool
:param pulumi.Input[str] name: Resource name.
"""
if backends is not None:
pulumi.set(__self__, "backends", backends)
if health_probe_settings is not None:
pulumi.set(__self__, "health_probe_settings", health_probe_settings)
if id is not None:
pulumi.set(__self__, "id", id)
if load_balancing_settings is not None:
pulumi.set(__self__, "load_balancing_settings", load_balancing_settings)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def backends(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendArgs']]]]:
"""
The set of backends for this pool
"""
return pulumi.get(self, "backends")
@backends.setter
def backends(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendArgs']]]]):
pulumi.set(self, "backends", value)
@property
@pulumi.getter(name="healthProbeSettings")
def health_probe_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
L7 health probe settings for a backend pool
"""
return pulumi.get(self, "health_probe_settings")
@health_probe_settings.setter
def health_probe_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "health_probe_settings", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="loadBalancingSettings")
def load_balancing_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Load balancing settings for a backend pool
"""
return pulumi.get(self, "load_balancing_settings")
@load_balancing_settings.setter
def load_balancing_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "load_balancing_settings", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class BackendPoolsSettingsArgs:
def __init__(__self__, *,
enforce_certificate_name_check: Optional[pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']]] = None,
send_recv_timeout_seconds: Optional[pulumi.Input[int]] = None):
"""
Settings that apply to all backend pools.
:param pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']] enforce_certificate_name_check: Whether to enforce certificate name check on HTTPS requests to all backend pools. No effect on non-HTTPS requests.
:param pulumi.Input[int] send_recv_timeout_seconds: Send and receive timeout on forwarding request to the backend. When timeout is reached, the request fails and returns.
"""
if enforce_certificate_name_check is None:
enforce_certificate_name_check = 'Enabled'
if enforce_certificate_name_check is not None:
pulumi.set(__self__, "enforce_certificate_name_check", enforce_certificate_name_check)
if send_recv_timeout_seconds is not None:
pulumi.set(__self__, "send_recv_timeout_seconds", send_recv_timeout_seconds)
@property
@pulumi.getter(name="enforceCertificateNameCheck")
def enforce_certificate_name_check(self) -> Optional[pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']]]:
"""
Whether to enforce certificate name check on HTTPS requests to all backend pools. No effect on non-HTTPS requests.
"""
return pulumi.get(self, "enforce_certificate_name_check")
@enforce_certificate_name_check.setter
def enforce_certificate_name_check(self, value: Optional[pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']]]):
pulumi.set(self, "enforce_certificate_name_check", value)
@property
@pulumi.getter(name="sendRecvTimeoutSeconds")
def send_recv_timeout_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Send and receive timeout on forwarding request to the backend. When timeout is reached, the request fails and returns.
"""
return pulumi.get(self, "send_recv_timeout_seconds")
@send_recv_timeout_seconds.setter
def send_recv_timeout_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "send_recv_timeout_seconds", value)
@pulumi.input_type
class CacheConfigurationArgs:
def __init__(__self__, *,
dynamic_compression: Optional[pulumi.Input[Union[str, 'DynamicCompressionEnabled']]] = None,
query_parameter_strip_directive: Optional[pulumi.Input[Union[str, 'FrontDoorQuery']]] = None):
"""
Caching settings for a caching-type route. To disable caching, do not provide a cacheConfiguration object.
:param pulumi.Input[Union[str, 'DynamicCompressionEnabled']] dynamic_compression: Whether to use dynamic compression for cached content
:param pulumi.Input[Union[str, 'FrontDoorQuery']] query_parameter_strip_directive: Treatment of URL query terms when forming the cache key.
"""
if dynamic_compression is not None:
pulumi.set(__self__, "dynamic_compression", dynamic_compression)
if query_parameter_strip_directive is not None:
pulumi.set(__self__, "query_parameter_strip_directive", query_parameter_strip_directive)
@property
@pulumi.getter(name="dynamicCompression")
def dynamic_compression(self) -> Optional[pulumi.Input[Union[str, 'DynamicCompressionEnabled']]]:
"""
Whether to use dynamic compression for cached content
"""
return pulumi.get(self, "dynamic_compression")
@dynamic_compression.setter
def dynamic_compression(self, value: Optional[pulumi.Input[Union[str, 'DynamicCompressionEnabled']]]):
pulumi.set(self, "dynamic_compression", value)
@property
@pulumi.getter(name="queryParameterStripDirective")
def query_parameter_strip_directive(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorQuery']]]:
"""
Treatment of URL query terms when forming the cache key.
"""
return pulumi.get(self, "query_parameter_strip_directive")
@query_parameter_strip_directive.setter
def query_parameter_strip_directive(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorQuery']]]):
pulumi.set(self, "query_parameter_strip_directive", value)
@pulumi.input_type
class ForwardingConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
backend_pool: Optional[pulumi.Input['SubResourceArgs']] = None,
cache_configuration: Optional[pulumi.Input['CacheConfigurationArgs']] = None,
custom_forwarding_path: Optional[pulumi.Input[str]] = None,
forwarding_protocol: Optional[pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']]] = None):
"""
Describes Forwarding Route.
:param pulumi.Input[str] odata_type:
Expected value is '#Microsoft.Azure.FrontDoor.Models.FrontdoorForwardingConfiguration'.
:param pulumi.Input['SubResourceArgs'] backend_pool: A reference to the BackendPool which this rule routes to.
:param pulumi.Input['CacheConfigurationArgs'] cache_configuration: The caching configuration associated with this rule.
:param pulumi.Input[str] custom_forwarding_path: A custom path used to rewrite resource paths matched by this rule. Leave empty to use incoming path.
:param pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']] forwarding_protocol: Protocol this rule will use when forwarding traffic to backends.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Azure.FrontDoor.Models.FrontdoorForwardingConfiguration')
if backend_pool is not None:
pulumi.set(__self__, "backend_pool", backend_pool)
if cache_configuration is not None:
pulumi.set(__self__, "cache_configuration", cache_configuration)
if custom_forwarding_path is not None:
pulumi.set(__self__, "custom_forwarding_path", custom_forwarding_path)
if forwarding_protocol is not None:
pulumi.set(__self__, "forwarding_protocol", forwarding_protocol)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
Expected value is '#Microsoft.Azure.FrontDoor.Models.FrontdoorForwardingConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="backendPool")
def backend_pool(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
A reference to the BackendPool which this rule routes to.
"""
return pulumi.get(self, "backend_pool")
@backend_pool.setter
def backend_pool(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_pool", value)
@property
@pulumi.getter(name="cacheConfiguration")
def cache_configuration(self) -> Optional[pulumi.Input['CacheConfigurationArgs']]:
"""
The caching configuration associated with this rule.
"""
return pulumi.get(self, "cache_configuration")
@cache_configuration.setter
def cache_configuration(self, value: Optional[pulumi.Input['CacheConfigurationArgs']]):
pulumi.set(self, "cache_configuration", value)
@property
@pulumi.getter(name="customForwardingPath")
def custom_forwarding_path(self) -> Optional[pulumi.Input[str]]:
"""
A custom path used to rewrite resource paths matched by this rule. Leave empty to use incoming path.
"""
return pulumi.get(self, "custom_forwarding_path")
@custom_forwarding_path.setter
def custom_forwarding_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_forwarding_path", value)
@property
@pulumi.getter(name="forwardingProtocol")
def forwarding_protocol(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']]]:
"""
Protocol this rule will use when forwarding traffic to backends.
"""
return pulumi.get(self, "forwarding_protocol")
@forwarding_protocol.setter
def forwarding_protocol(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']]]):
pulumi.set(self, "forwarding_protocol", value)
@pulumi.input_type
class FrontendEndpointArgs:
def __init__(__self__, *,
host_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
session_affinity_enabled_state: Optional[pulumi.Input[Union[str, 'SessionAffinityEnabledState']]] = None,
session_affinity_ttl_seconds: Optional[pulumi.Input[int]] = None,
web_application_firewall_policy_link: Optional[pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs']] = None):
"""
A frontend endpoint used for routing.
:param pulumi.Input[str] host_name: The host name of the frontendEndpoint. Must be a domain name.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Resource name.
:param pulumi.Input[Union[str, 'SessionAffinityEnabledState']] session_affinity_enabled_state: Whether to allow session affinity on this host. Valid options are 'Enabled' or 'Disabled'
:param pulumi.Input[int] session_affinity_ttl_seconds: UNUSED. This field will be ignored. The TTL to use in seconds for session affinity, if applicable.
:param pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs'] web_application_firewall_policy_link: Defines the Web Application Firewall policy for each host (if applicable)
"""
if host_name is not None:
pulumi.set(__self__, "host_name", host_name)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if session_affinity_enabled_state is not None:
pulumi.set(__self__, "session_affinity_enabled_state", session_affinity_enabled_state)
if session_affinity_ttl_seconds is not None:
pulumi.set(__self__, "session_affinity_ttl_seconds", session_affinity_ttl_seconds)
if web_application_firewall_policy_link is not None:
pulumi.set(__self__, "web_application_firewall_policy_link", web_application_firewall_policy_link)
@property
@pulumi.getter(name="hostName")
def host_name(self) -> Optional[pulumi.Input[str]]:
"""
The host name of the frontendEndpoint. Must be a domain name.
"""
return pulumi.get(self, "host_name")
@host_name.setter
def host_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_name", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="sessionAffinityEnabledState")
def session_affinity_enabled_state(self) -> Optional[pulumi.Input[Union[str, 'SessionAffinityEnabledState']]]:
"""
Whether to allow session affinity on this host. Valid options are 'Enabled' or 'Disabled'
"""
return pulumi.get(self, "session_affinity_enabled_state")
@session_affinity_enabled_state.setter
def session_affinity_enabled_state(self, value: Optional[pulumi.Input[Union[str, 'SessionAffinityEnabledState']]]):
pulumi.set(self, "session_affinity_enabled_state", value)
@property
@pulumi.getter(name="sessionAffinityTtlSeconds")
def session_affinity_ttl_seconds(self) -> Optional[pulumi.Input[int]]:
"""
UNUSED. This field will be ignored. The TTL to use in seconds for session affinity, if applicable.
"""
return pulumi.get(self, "session_affinity_ttl_seconds")
@session_affinity_ttl_seconds.setter
def session_affinity_ttl_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "session_affinity_ttl_seconds", value)
@property
@pulumi.getter(name="webApplicationFirewallPolicyLink")
def web_application_firewall_policy_link(self) -> Optional[pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs']]:
"""
Defines the Web Application Firewall policy for each host (if applicable)
"""
return pulumi.get(self, "web_application_firewall_policy_link")
@web_application_firewall_policy_link.setter
def web_application_firewall_policy_link(self, value: Optional[pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs']]):
pulumi.set(self, "web_application_firewall_policy_link", value)
@pulumi.input_type
class FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None):
"""
Defines the Web Application Firewall policy for each host (if applicable)
:param pulumi.Input[str] id: Resource ID.
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@pulumi.input_type
class HealthProbeSettingsModelArgs:
def __init__(__self__, *,
enabled_state: Optional[pulumi.Input[Union[str, 'HealthProbeEnabled']]] = None,
health_probe_method: Optional[pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']]] = None,
id: Optional[pulumi.Input[str]] = None,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[Union[str, 'FrontDoorProtocol']]] = None):
"""
Load balancing settings for a backend pool
:param pulumi.Input[Union[str, 'HealthProbeEnabled']] enabled_state: Whether to enable health probes to be made against backends defined under backendPools. Health probes can only be disabled if there is a single enabled backend in single enabled backend pool.
:param pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']] health_probe_method: Configures which HTTP method to use to probe the backends defined under backendPools.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] interval_in_seconds: The number of seconds between health probes.
:param pulumi.Input[str] name: Resource name.
:param pulumi.Input[str] path: The path to use for the health probe. Default is /
:param pulumi.Input[Union[str, 'FrontDoorProtocol']] protocol: Protocol scheme to use for this probe
"""
if enabled_state is not None:
pulumi.set(__self__, "enabled_state", enabled_state)
if health_probe_method is None:
health_probe_method = 'HEAD'
if health_probe_method is not None:
pulumi.set(__self__, "health_probe_method", health_probe_method)
if id is not None:
pulumi.set(__self__, "id", id)
if interval_in_seconds is not None:
pulumi.set(__self__, "interval_in_seconds", interval_in_seconds)
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter(name="enabledState")
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'HealthProbeEnabled']]]:
"""
Whether to enable health probes to be made against backends defined under backendPools. Health probes can only be disabled if there is a single enabled backend in single enabled backend pool.
"""
return pulumi.get(self, "enabled_state")
@enabled_state.setter
def enabled_state(self, value: Optional[pulumi.Input[Union[str, 'HealthProbeEnabled']]]):
pulumi.set(self, "enabled_state", value)
@property
@pulumi.getter(name="healthProbeMethod")
def health_probe_method(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']]]:
"""
Configures which HTTP method to use to probe the backends defined under backendPools.
"""
return pulumi.get(self, "health_probe_method")
@health_probe_method.setter
def health_probe_method(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']]]):
pulumi.set(self, "health_probe_method", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The number of seconds between health probes.
"""
return pulumi.get(self, "interval_in_seconds")
@interval_in_seconds.setter
def interval_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval_in_seconds", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The path to use for the health probe. Default is /
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorProtocol']]]:
"""
Protocol scheme to use for this probe
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorProtocol']]]):
pulumi.set(self, "protocol", value)
@pulumi.input_type
class LoadBalancingSettingsModelArgs:
def __init__(__self__, *,
additional_latency_milliseconds: Optional[pulumi.Input[int]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
sample_size: Optional[pulumi.Input[int]] = None,
successful_samples_required: Optional[pulumi.Input[int]] = None):
"""
Load balancing settings for a backend pool
:param pulumi.Input[int] additional_latency_milliseconds: The additional latency in milliseconds for probes to fall into the lowest latency bucket
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Resource name.
:param pulumi.Input[int] sample_size: The number of samples to consider for load balancing decisions
:param pulumi.Input[int] successful_samples_required: The number of samples within the sample period that must succeed
"""
if additional_latency_milliseconds is not None:
pulumi.set(__self__, "additional_latency_milliseconds", additional_latency_milliseconds)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if sample_size is not None:
pulumi.set(__self__, "sample_size", sample_size)
if successful_samples_required is not None:
pulumi.set(__self__, "successful_samples_required", successful_samples_required)
@property
@pulumi.getter(name="additionalLatencyMilliseconds")
def additional_latency_milliseconds(self) -> Optional[pulumi.Input[int]]:
"""
The additional latency in milliseconds for probes to fall into the lowest latency bucket
"""
return pulumi.get(self, "additional_latency_milliseconds")
@additional_latency_milliseconds.setter
def additional_latency_milliseconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "additional_latency_milliseconds", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="sampleSize")
def sample_size(self) -> Optional[pulumi.Input[int]]:
"""
The number of samples to consider for load balancing decisions
"""
return pulumi.get(self, "sample_size")
@sample_size.setter
def sample_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sample_size", value)
@property
@pulumi.getter(name="successfulSamplesRequired")
def successful_samples_required(self) -> Optional[pulumi.Input[int]]:
"""
The number of samples within the sample period that must succeed
"""
return pulumi.get(self, "successful_samples_required")
@successful_samples_required.setter
def successful_samples_required(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "successful_samples_required", value)
@pulumi.input_type
class RedirectConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
custom_fragment: Optional[pulumi.Input[str]] = None,
custom_host: Optional[pulumi.Input[str]] = None,
custom_path: Optional[pulumi.Input[str]] = None,
custom_query_string: Optional[pulumi.Input[str]] = None,
redirect_protocol: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']]] = None,
redirect_type: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectType']]] = None):
"""
Describes Redirect Route.
:param pulumi.Input[str] odata_type:
Expected value is '#Microsoft.Azure.FrontDoor.Models.FrontdoorRedirectConfiguration'.
:param pulumi.Input[str] custom_fragment: Fragment to add to the redirect URL. Fragment is the part of the URL that comes after #. Do not include the #.
:param pulumi.Input[str] custom_host: Host to redirect. Leave empty to use the incoming host as the destination host.
:param pulumi.Input[str] custom_path: The full path to redirect. Path cannot be empty and must start with /. Leave empty to use the incoming path as destination path.
:param pulumi.Input[str] custom_query_string: The set of query strings to be placed in the redirect URL. Setting this value would replace any existing query string; leave empty to preserve the incoming query string. Query string must be in <key>=<value> format. The first ? and & will be added automatically so do not include them in the front, but do separate multiple query strings with &.
:param pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']] redirect_protocol: The protocol of the destination to where the traffic is redirected
:param pulumi.Input[Union[str, 'FrontDoorRedirectType']] redirect_type: The redirect type the rule will use when redirecting traffic.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Azure.FrontDoor.Models.FrontdoorRedirectConfiguration')
if custom_fragment is not None:
pulumi.set(__self__, "custom_fragment", custom_fragment)
if custom_host is not None:
pulumi.set(__self__, "custom_host", custom_host)
if custom_path is not None:
pulumi.set(__self__, "custom_path", custom_path)
if custom_query_string is not None:
pulumi.set(__self__, "custom_query_string", custom_query_string)
if redirect_protocol is not None:
pulumi.set(__self__, "redirect_protocol", redirect_protocol)
if redirect_type is not None:
pulumi.set(__self__, "redirect_type", redirect_type)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
Expected value is '#Microsoft.Azure.FrontDoor.Models.FrontdoorRedirectConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="customFragment")
def custom_fragment(self) -> Optional[pulumi.Input[str]]:
"""
Fragment to add to the redirect URL. Fragment is the part of the URL that comes after #. Do not include the #.
"""
return pulumi.get(self, "custom_fragment")
@custom_fragment.setter
def custom_fragment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_fragment", value)
@property
@pulumi.getter(name="customHost")
def custom_host(self) -> Optional[pulumi.Input[str]]:
"""
Host to redirect. Leave empty to use the incoming host as the destination host.
"""
return pulumi.get(self, "custom_host")
@custom_host.setter
def custom_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_host", value)
@property
@pulumi.getter(name="customPath")
def custom_path(self) -> Optional[pulumi.Input[str]]:
"""
The full path to redirect. Path cannot be empty and must start with /. Leave empty to use the incoming path as destination path.
"""
return pulumi.get(self, "custom_path")
@custom_path.setter
def custom_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_path", value)
@property
@pulumi.getter(name="customQueryString")
def custom_query_string(self) -> Optional[pulumi.Input[str]]:
"""
The set of query strings to be placed in the redirect URL. Setting this value would replace any existing query string; leave empty to preserve the incoming query string. Query string must be in <key>=<value> format. The first ? and & will be added automatically so do not include them in the front, but do separate multiple query strings with &.
"""
return pulumi.get(self, "custom_query_string")
@custom_query_string.setter
def custom_query_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_query_string", value)
@property
@pulumi.getter(name="redirectProtocol")
def redirect_protocol(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']]]:
"""
The protocol of the destination to where the traffic is redirected
"""
return pulumi.get(self, "redirect_protocol")
@redirect_protocol.setter
def redirect_protocol(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']]]):
pulumi.set(self, "redirect_protocol", value)
@property
@pulumi.getter(name="redirectType")
def redirect_type(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorRedirectType']]]:
"""
The redirect type the rule will use when redirecting traffic.
"""
return pulumi.get(self, "redirect_type")
@redirect_type.setter
def redirect_type(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectType']]]):
pulumi.set(self, "redirect_type", value)
@pulumi.input_type
class RoutingRuleArgs:
def __init__(__self__, *,
accepted_protocols: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]]] = None,
enabled_state: Optional[pulumi.Input[Union[str, 'RoutingRuleEnabledState']]] = None,
frontend_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
patterns_to_match: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
route_configuration: Optional[pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']]] = None):
"""
A routing rule represents a specification for traffic to treat and where to send it, along with health probe information.
:param pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]] accepted_protocols: Protocol schemes to match for this rule
:param pulumi.Input[Union[str, 'RoutingRuleEnabledState']] enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or 'Disabled'
:param pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]] frontend_endpoints: Frontend endpoints associated with this rule
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Resource name.
:param pulumi.Input[Sequence[pulumi.Input[str]]] patterns_to_match: The route patterns of the rule.
:param pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']] route_configuration: A reference to the routing configuration.
"""
if accepted_protocols is not None:
pulumi.set(__self__, "accepted_protocols", accepted_protocols)
if enabled_state is not None:
pulumi.set(__self__, "enabled_state", enabled_state)
if frontend_endpoints is not None:
pulumi.set(__self__, "frontend_endpoints", frontend_endpoints)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if patterns_to_match is not None:
pulumi.set(__self__, "patterns_to_match", patterns_to_match)
if route_configuration is not None:
pulumi.set(__self__, "route_configuration", route_configuration)
@property
@pulumi.getter(name="acceptedProtocols")
def accepted_protocols(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]]]:
"""
Protocol schemes to match for this rule
"""
return pulumi.get(self, "accepted_protocols")
@accepted_protocols.setter
def accepted_protocols(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]]]):
pulumi.set(self, "accepted_protocols", value)
@property
@pulumi.getter(name="enabledState")
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'RoutingRuleEnabledState']]]:
"""
Whether to enable use of this rule. Permitted values are 'Enabled' or 'Disabled'
"""
return pulumi.get(self, "enabled_state")
@enabled_state.setter
def enabled_state(self, value: Optional[pulumi.Input[Union[str, 'RoutingRuleEnabledState']]]):
pulumi.set(self, "enabled_state", value)
@property
@pulumi.getter(name="frontendEndpoints")
def frontend_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]:
"""
Frontend endpoints associated with this rule
"""
return pulumi.get(self, "frontend_endpoints")
@frontend_endpoints.setter
def frontend_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]):
pulumi.set(self, "frontend_endpoints", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="patternsToMatch")
def patterns_to_match(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The route patterns of the rule.
"""
return pulumi.get(self, "patterns_to_match")
@patterns_to_match.setter
def patterns_to_match(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "patterns_to_match", value)
@property
@pulumi.getter(name="routeConfiguration")
def route_configuration(self) -> Optional[pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']]]:
"""
A reference to the routing configuration.
"""
return pulumi.get(self, "route_configuration")
@route_configuration.setter
def route_configuration(self, value: Optional[pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']]]):
pulumi.set(self, "route_configuration", value)
@pulumi.input_type
class SubResourceArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None):
"""
Reference to another subresource.
:param pulumi.Input[str] id: Resource ID.
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
| 43.698712
| 399
| 0.673909
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = [
'BackendArgs',
'BackendPoolArgs',
'BackendPoolsSettingsArgs',
'CacheConfigurationArgs',
'ForwardingConfigurationArgs',
'FrontendEndpointArgs',
'FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs',
'HealthProbeSettingsModelArgs',
'LoadBalancingSettingsModelArgs',
'RedirectConfigurationArgs',
'RoutingRuleArgs',
'SubResourceArgs',
]
@pulumi.input_type
class BackendArgs:
def __init__(__self__, *,
address: Optional[pulumi.Input[str]] = None,
backend_host_header: Optional[pulumi.Input[str]] = None,
enabled_state: Optional[pulumi.Input[Union[str, 'BackendEnabledState']]] = None,
http_port: Optional[pulumi.Input[int]] = None,
https_port: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[int]] = None,
weight: Optional[pulumi.Input[int]] = None):
if address is not None:
pulumi.set(__self__, "address", address)
if backend_host_header is not None:
pulumi.set(__self__, "backend_host_header", backend_host_header)
if enabled_state is not None:
pulumi.set(__self__, "enabled_state", enabled_state)
if http_port is not None:
pulumi.set(__self__, "http_port", http_port)
if https_port is not None:
pulumi.set(__self__, "https_port", https_port)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter(name="backendHostHeader")
def backend_host_header(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "backend_host_header")
@backend_host_header.setter
def backend_host_header(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend_host_header", value)
@property
@pulumi.getter(name="enabledState")
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'BackendEnabledState']]]:
return pulumi.get(self, "enabled_state")
@enabled_state.setter
def enabled_state(self, value: Optional[pulumi.Input[Union[str, 'BackendEnabledState']]]):
pulumi.set(self, "enabled_state", value)
@property
@pulumi.getter(name="httpPort")
def http_port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "http_port")
@http_port.setter
def http_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "http_port", value)
@property
@pulumi.getter(name="httpsPort")
def https_port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "https_port")
@https_port.setter
def https_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "https_port", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class BackendPoolArgs:
def __init__(__self__, *,
backends: Optional[pulumi.Input[Sequence[pulumi.Input['BackendArgs']]]] = None,
health_probe_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
id: Optional[pulumi.Input[str]] = None,
load_balancing_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
name: Optional[pulumi.Input[str]] = None):
if backends is not None:
pulumi.set(__self__, "backends", backends)
if health_probe_settings is not None:
pulumi.set(__self__, "health_probe_settings", health_probe_settings)
if id is not None:
pulumi.set(__self__, "id", id)
if load_balancing_settings is not None:
pulumi.set(__self__, "load_balancing_settings", load_balancing_settings)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def backends(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendArgs']]]]:
return pulumi.get(self, "backends")
@backends.setter
def backends(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendArgs']]]]):
pulumi.set(self, "backends", value)
@property
@pulumi.getter(name="healthProbeSettings")
def health_probe_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
return pulumi.get(self, "health_probe_settings")
@health_probe_settings.setter
def health_probe_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "health_probe_settings", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="loadBalancingSettings")
def load_balancing_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
return pulumi.get(self, "load_balancing_settings")
@load_balancing_settings.setter
def load_balancing_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "load_balancing_settings", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class BackendPoolsSettingsArgs:
def __init__(__self__, *,
enforce_certificate_name_check: Optional[pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']]] = None,
send_recv_timeout_seconds: Optional[pulumi.Input[int]] = None):
if enforce_certificate_name_check is None:
enforce_certificate_name_check = 'Enabled'
if enforce_certificate_name_check is not None:
pulumi.set(__self__, "enforce_certificate_name_check", enforce_certificate_name_check)
if send_recv_timeout_seconds is not None:
pulumi.set(__self__, "send_recv_timeout_seconds", send_recv_timeout_seconds)
@property
@pulumi.getter(name="enforceCertificateNameCheck")
def enforce_certificate_name_check(self) -> Optional[pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']]]:
return pulumi.get(self, "enforce_certificate_name_check")
@enforce_certificate_name_check.setter
def enforce_certificate_name_check(self, value: Optional[pulumi.Input[Union[str, 'EnforceCertificateNameCheckEnabledState']]]):
pulumi.set(self, "enforce_certificate_name_check", value)
@property
@pulumi.getter(name="sendRecvTimeoutSeconds")
def send_recv_timeout_seconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "send_recv_timeout_seconds")
@send_recv_timeout_seconds.setter
def send_recv_timeout_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "send_recv_timeout_seconds", value)
@pulumi.input_type
class CacheConfigurationArgs:
def __init__(__self__, *,
dynamic_compression: Optional[pulumi.Input[Union[str, 'DynamicCompressionEnabled']]] = None,
query_parameter_strip_directive: Optional[pulumi.Input[Union[str, 'FrontDoorQuery']]] = None):
if dynamic_compression is not None:
pulumi.set(__self__, "dynamic_compression", dynamic_compression)
if query_parameter_strip_directive is not None:
pulumi.set(__self__, "query_parameter_strip_directive", query_parameter_strip_directive)
@property
@pulumi.getter(name="dynamicCompression")
def dynamic_compression(self) -> Optional[pulumi.Input[Union[str, 'DynamicCompressionEnabled']]]:
return pulumi.get(self, "dynamic_compression")
@dynamic_compression.setter
def dynamic_compression(self, value: Optional[pulumi.Input[Union[str, 'DynamicCompressionEnabled']]]):
pulumi.set(self, "dynamic_compression", value)
@property
@pulumi.getter(name="queryParameterStripDirective")
def query_parameter_strip_directive(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorQuery']]]:
return pulumi.get(self, "query_parameter_strip_directive")
@query_parameter_strip_directive.setter
def query_parameter_strip_directive(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorQuery']]]):
pulumi.set(self, "query_parameter_strip_directive", value)
@pulumi.input_type
class ForwardingConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
backend_pool: Optional[pulumi.Input['SubResourceArgs']] = None,
cache_configuration: Optional[pulumi.Input['CacheConfigurationArgs']] = None,
custom_forwarding_path: Optional[pulumi.Input[str]] = None,
forwarding_protocol: Optional[pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']]] = None):
pulumi.set(__self__, "odata_type", '
if backend_pool is not None:
pulumi.set(__self__, "backend_pool", backend_pool)
if cache_configuration is not None:
pulumi.set(__self__, "cache_configuration", cache_configuration)
if custom_forwarding_path is not None:
pulumi.set(__self__, "custom_forwarding_path", custom_forwarding_path)
if forwarding_protocol is not None:
pulumi.set(__self__, "forwarding_protocol", forwarding_protocol)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="backendPool")
def backend_pool(self) -> Optional[pulumi.Input['SubResourceArgs']]:
return pulumi.get(self, "backend_pool")
@backend_pool.setter
def backend_pool(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_pool", value)
@property
@pulumi.getter(name="cacheConfiguration")
def cache_configuration(self) -> Optional[pulumi.Input['CacheConfigurationArgs']]:
return pulumi.get(self, "cache_configuration")
@cache_configuration.setter
def cache_configuration(self, value: Optional[pulumi.Input['CacheConfigurationArgs']]):
pulumi.set(self, "cache_configuration", value)
@property
@pulumi.getter(name="customForwardingPath")
def custom_forwarding_path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_forwarding_path")
@custom_forwarding_path.setter
def custom_forwarding_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_forwarding_path", value)
@property
@pulumi.getter(name="forwardingProtocol")
def forwarding_protocol(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']]]:
return pulumi.get(self, "forwarding_protocol")
@forwarding_protocol.setter
def forwarding_protocol(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorForwardingProtocol']]]):
pulumi.set(self, "forwarding_protocol", value)
@pulumi.input_type
class FrontendEndpointArgs:
def __init__(__self__, *,
host_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
session_affinity_enabled_state: Optional[pulumi.Input[Union[str, 'SessionAffinityEnabledState']]] = None,
session_affinity_ttl_seconds: Optional[pulumi.Input[int]] = None,
web_application_firewall_policy_link: Optional[pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs']] = None):
if host_name is not None:
pulumi.set(__self__, "host_name", host_name)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if session_affinity_enabled_state is not None:
pulumi.set(__self__, "session_affinity_enabled_state", session_affinity_enabled_state)
if session_affinity_ttl_seconds is not None:
pulumi.set(__self__, "session_affinity_ttl_seconds", session_affinity_ttl_seconds)
if web_application_firewall_policy_link is not None:
pulumi.set(__self__, "web_application_firewall_policy_link", web_application_firewall_policy_link)
@property
@pulumi.getter(name="hostName")
def host_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "host_name")
@host_name.setter
def host_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_name", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="sessionAffinityEnabledState")
def session_affinity_enabled_state(self) -> Optional[pulumi.Input[Union[str, 'SessionAffinityEnabledState']]]:
return pulumi.get(self, "session_affinity_enabled_state")
@session_affinity_enabled_state.setter
def session_affinity_enabled_state(self, value: Optional[pulumi.Input[Union[str, 'SessionAffinityEnabledState']]]):
pulumi.set(self, "session_affinity_enabled_state", value)
@property
@pulumi.getter(name="sessionAffinityTtlSeconds")
def session_affinity_ttl_seconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "session_affinity_ttl_seconds")
@session_affinity_ttl_seconds.setter
def session_affinity_ttl_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "session_affinity_ttl_seconds", value)
@property
@pulumi.getter(name="webApplicationFirewallPolicyLink")
def web_application_firewall_policy_link(self) -> Optional[pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs']]:
return pulumi.get(self, "web_application_firewall_policy_link")
@web_application_firewall_policy_link.setter
def web_application_firewall_policy_link(self, value: Optional[pulumi.Input['FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs']]):
pulumi.set(self, "web_application_firewall_policy_link", value)
@pulumi.input_type
class FrontendEndpointUpdateParametersWebApplicationFirewallPolicyLinkArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None):
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@pulumi.input_type
class HealthProbeSettingsModelArgs:
def __init__(__self__, *,
enabled_state: Optional[pulumi.Input[Union[str, 'HealthProbeEnabled']]] = None,
health_probe_method: Optional[pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']]] = None,
id: Optional[pulumi.Input[str]] = None,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[Union[str, 'FrontDoorProtocol']]] = None):
if enabled_state is not None:
pulumi.set(__self__, "enabled_state", enabled_state)
if health_probe_method is None:
health_probe_method = 'HEAD'
if health_probe_method is not None:
pulumi.set(__self__, "health_probe_method", health_probe_method)
if id is not None:
pulumi.set(__self__, "id", id)
if interval_in_seconds is not None:
pulumi.set(__self__, "interval_in_seconds", interval_in_seconds)
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter(name="enabledState")
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'HealthProbeEnabled']]]:
return pulumi.get(self, "enabled_state")
@enabled_state.setter
def enabled_state(self, value: Optional[pulumi.Input[Union[str, 'HealthProbeEnabled']]]):
pulumi.set(self, "enabled_state", value)
@property
@pulumi.getter(name="healthProbeMethod")
def health_probe_method(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']]]:
return pulumi.get(self, "health_probe_method")
@health_probe_method.setter
def health_probe_method(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorHealthProbeMethod']]]):
pulumi.set(self, "health_probe_method", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "interval_in_seconds")
@interval_in_seconds.setter
def interval_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval_in_seconds", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorProtocol']]]:
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorProtocol']]]):
pulumi.set(self, "protocol", value)
@pulumi.input_type
class LoadBalancingSettingsModelArgs:
def __init__(__self__, *,
additional_latency_milliseconds: Optional[pulumi.Input[int]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
sample_size: Optional[pulumi.Input[int]] = None,
successful_samples_required: Optional[pulumi.Input[int]] = None):
if additional_latency_milliseconds is not None:
pulumi.set(__self__, "additional_latency_milliseconds", additional_latency_milliseconds)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if sample_size is not None:
pulumi.set(__self__, "sample_size", sample_size)
if successful_samples_required is not None:
pulumi.set(__self__, "successful_samples_required", successful_samples_required)
@property
@pulumi.getter(name="additionalLatencyMilliseconds")
def additional_latency_milliseconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "additional_latency_milliseconds")
@additional_latency_milliseconds.setter
def additional_latency_milliseconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "additional_latency_milliseconds", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="sampleSize")
def sample_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "sample_size")
@sample_size.setter
def sample_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sample_size", value)
@property
@pulumi.getter(name="successfulSamplesRequired")
def successful_samples_required(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "successful_samples_required")
@successful_samples_required.setter
def successful_samples_required(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "successful_samples_required", value)
@pulumi.input_type
class RedirectConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
custom_fragment: Optional[pulumi.Input[str]] = None,
custom_host: Optional[pulumi.Input[str]] = None,
custom_path: Optional[pulumi.Input[str]] = None,
custom_query_string: Optional[pulumi.Input[str]] = None,
redirect_protocol: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']]] = None,
redirect_type: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectType']]] = None):
pulumi.set(__self__, "odata_type", '
if custom_fragment is not None:
pulumi.set(__self__, "custom_fragment", custom_fragment)
if custom_host is not None:
pulumi.set(__self__, "custom_host", custom_host)
if custom_path is not None:
pulumi.set(__self__, "custom_path", custom_path)
if custom_query_string is not None:
pulumi.set(__self__, "custom_query_string", custom_query_string)
if redirect_protocol is not None:
pulumi.set(__self__, "redirect_protocol", redirect_protocol)
if redirect_type is not None:
pulumi.set(__self__, "redirect_type", redirect_type)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="customFragment")
def custom_fragment(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_fragment")
@custom_fragment.setter
def custom_fragment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_fragment", value)
@property
@pulumi.getter(name="customHost")
def custom_host(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_host")
@custom_host.setter
def custom_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_host", value)
@property
@pulumi.getter(name="customPath")
def custom_path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_path")
@custom_path.setter
def custom_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_path", value)
@property
@pulumi.getter(name="customQueryString")
def custom_query_string(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_query_string")
@custom_query_string.setter
def custom_query_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_query_string", value)
@property
@pulumi.getter(name="redirectProtocol")
def redirect_protocol(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']]]:
return pulumi.get(self, "redirect_protocol")
@redirect_protocol.setter
def redirect_protocol(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectProtocol']]]):
pulumi.set(self, "redirect_protocol", value)
@property
@pulumi.getter(name="redirectType")
def redirect_type(self) -> Optional[pulumi.Input[Union[str, 'FrontDoorRedirectType']]]:
return pulumi.get(self, "redirect_type")
@redirect_type.setter
def redirect_type(self, value: Optional[pulumi.Input[Union[str, 'FrontDoorRedirectType']]]):
pulumi.set(self, "redirect_type", value)
@pulumi.input_type
class RoutingRuleArgs:
def __init__(__self__, *,
accepted_protocols: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]]] = None,
enabled_state: Optional[pulumi.Input[Union[str, 'RoutingRuleEnabledState']]] = None,
frontend_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
patterns_to_match: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
route_configuration: Optional[pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']]] = None):
if accepted_protocols is not None:
pulumi.set(__self__, "accepted_protocols", accepted_protocols)
if enabled_state is not None:
pulumi.set(__self__, "enabled_state", enabled_state)
if frontend_endpoints is not None:
pulumi.set(__self__, "frontend_endpoints", frontend_endpoints)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if patterns_to_match is not None:
pulumi.set(__self__, "patterns_to_match", patterns_to_match)
if route_configuration is not None:
pulumi.set(__self__, "route_configuration", route_configuration)
@property
@pulumi.getter(name="acceptedProtocols")
def accepted_protocols(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]]]:
return pulumi.get(self, "accepted_protocols")
@accepted_protocols.setter
def accepted_protocols(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'FrontDoorProtocol']]]]]):
pulumi.set(self, "accepted_protocols", value)
@property
@pulumi.getter(name="enabledState")
def enabled_state(self) -> Optional[pulumi.Input[Union[str, 'RoutingRuleEnabledState']]]:
return pulumi.get(self, "enabled_state")
@enabled_state.setter
def enabled_state(self, value: Optional[pulumi.Input[Union[str, 'RoutingRuleEnabledState']]]):
pulumi.set(self, "enabled_state", value)
@property
@pulumi.getter(name="frontendEndpoints")
def frontend_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]:
return pulumi.get(self, "frontend_endpoints")
@frontend_endpoints.setter
def frontend_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]):
pulumi.set(self, "frontend_endpoints", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="patternsToMatch")
def patterns_to_match(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "patterns_to_match")
@patterns_to_match.setter
def patterns_to_match(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "patterns_to_match", value)
@property
@pulumi.getter(name="routeConfiguration")
def route_configuration(self) -> Optional[pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']]]:
return pulumi.get(self, "route_configuration")
@route_configuration.setter
def route_configuration(self, value: Optional[pulumi.Input[Union['ForwardingConfigurationArgs', 'RedirectConfigurationArgs']]]):
pulumi.set(self, "route_configuration", value)
@pulumi.input_type
class SubResourceArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None):
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
| true
| true
|
1c4a57868ba829a1a46f4c3661d4ed036f1ad5f9
| 1,295
|
py
|
Python
|
src/tengi/telegram/telegram_api_utils.py
|
luckybots/tengi
|
1eef42596fb59035a43d6e1fa7b2aa552b52dffc
|
[
"Apache-2.0"
] | 2
|
2021-08-09T18:02:59.000Z
|
2022-01-15T15:11:02.000Z
|
src/tengi/telegram/telegram_api_utils.py
|
luckybots/tengi
|
1eef42596fb59035a43d6e1fa7b2aa552b52dffc
|
[
"Apache-2.0"
] | null | null | null |
src/tengi/telegram/telegram_api_utils.py
|
luckybots/tengi
|
1eef42596fb59035a43d6e1fa7b2aa552b52dffc
|
[
"Apache-2.0"
] | null | null | null |
from typing import Iterable
from telebot import types as bot_types
from telethon.tl import types as api_types
def api_to_bot_markup(api_markup: api_types.ReplyInlineMarkup) -> bot_types.InlineKeyboardMarkup:
bot_markup = bot_types.InlineKeyboardMarkup()
if api_markup is not None:
for api_r in api_markup.rows:
bot_r = []
for api_b in api_r.buttons:
if isinstance(api_b, api_types.KeyboardButtonCallback):
bot_b = bot_types.InlineKeyboardButton(text=api_b.text,
callback_data=api_b.data.decode(encoding='utf-8'))
elif isinstance(api_b, api_types.KeyboardButtonUrl):
bot_b = bot_types.InlineKeyboardButton(text=api_b.text,
url=api_b.url)
else:
raise TypeError(f'Unhandled button type: {type(api_b)}')
bot_r.append(bot_b)
bot_markup.add(*bot_r)
return bot_markup
def iterate_buttons(message: api_types.Message) -> Iterable[api_types.KeyboardButton]:
if message.reply_markup is not None:
for row in message.reply_markup.rows:
for b in row.buttons:
yield b
| 43.166667
| 109
| 0.601544
|
from typing import Iterable
from telebot import types as bot_types
from telethon.tl import types as api_types
def api_to_bot_markup(api_markup: api_types.ReplyInlineMarkup) -> bot_types.InlineKeyboardMarkup:
bot_markup = bot_types.InlineKeyboardMarkup()
if api_markup is not None:
for api_r in api_markup.rows:
bot_r = []
for api_b in api_r.buttons:
if isinstance(api_b, api_types.KeyboardButtonCallback):
bot_b = bot_types.InlineKeyboardButton(text=api_b.text,
callback_data=api_b.data.decode(encoding='utf-8'))
elif isinstance(api_b, api_types.KeyboardButtonUrl):
bot_b = bot_types.InlineKeyboardButton(text=api_b.text,
url=api_b.url)
else:
raise TypeError(f'Unhandled button type: {type(api_b)}')
bot_r.append(bot_b)
bot_markup.add(*bot_r)
return bot_markup
def iterate_buttons(message: api_types.Message) -> Iterable[api_types.KeyboardButton]:
if message.reply_markup is not None:
for row in message.reply_markup.rows:
for b in row.buttons:
yield b
| true
| true
|
1c4a582f3f25a8a9a114c1133dfd741294ed8ca5
| 5,628
|
py
|
Python
|
src/game/visualisation/visualise_step.py
|
IvanRoblesMunoz/hungry_geese_game
|
806454bbd1178f214ceae51ea9724faffbb13396
|
[
"MIT"
] | 1
|
2021-12-04T13:11:07.000Z
|
2021-12-04T13:11:07.000Z
|
src/game/visualisation/visualise_step.py
|
IvanRoblesMunoz/hungry_geese_game
|
806454bbd1178f214ceae51ea9724faffbb13396
|
[
"MIT"
] | null | null | null |
src/game/visualisation/visualise_step.py
|
IvanRoblesMunoz/hungry_geese_game
|
806454bbd1178f214ceae51ea9724faffbb13396
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 28 18:03:25 2021
@author: roblesi
This module makes the visuals for the game.
"""
# pylint: disable=E0401
# =============================================================================
# Imports
# =============================================================================
import os
from pathlib import Path
from typing import Tuple
import pygame
from pygame import display
# =============================================================================
# Statics
# =============================================================================
from src.game.visualisation.visualisation_statics import (
START_HEIGHT,
START_WIDTH,
WIDTH_STEP,
HEIGHT_STEP,
)
REPO_PATH = Path(os.getcwd())
ASSETS_PATH = REPO_PATH / "src/game/assets"
BACKGROUND = pygame.image.load(ASSETS_PATH / "background_sprite.png")
LOADING_SCREEN = pygame.image.load(ASSETS_PATH / "loading_screen.png")
FOOD = pygame.image.load(ASSETS_PATH / "food_sprite.png")
OBJECT_WIDTH = FOOD.get_width()
COLOR_GEESE = {
0: (255, 255, 255), # White
1: (255, 0, 0), # Red
2: (0, 255, 0), # Green
3: (0, 0, 255), # Blue
}
# =============================================================================
# Function
# =============================================================================
def cell_to_coordinates(cell: int) -> Tuple[float]:
"""Convert cell number to x,y coordinates."""
cells_width = (cell) % 11
cells_height = (cell) // 11
x_coord = START_WIDTH + cells_width * WIDTH_STEP
y_coord = START_HEIGHT + cells_height * HEIGHT_STEP
return x_coord, y_coord
def draw_goose(dis: display, goose_pos: list, goose_idx: int) -> None:
for body_idx, goose_part in enumerate(goose_pos):
x_coord, y_coord = cell_to_coordinates(goose_part)
# Draw head
if body_idx == 0:
pygame.draw.polygon(
surface=dis,
color=COLOR_GEESE[goose_idx],
points=[
(x_coord + HEIGHT_STEP / 3, y_coord),
(x_coord, y_coord + HEIGHT_STEP / 3),
(x_coord + WIDTH_STEP / 3, y_coord + HEIGHT_STEP * 2 / 3),
(x_coord + WIDTH_STEP * 2 / 3, y_coord + HEIGHT_STEP / 3),
],
)
# Draw tail
elif body_idx == len(goose_pos) - 1:
pygame.draw.circle(
surface=dis,
color=COLOR_GEESE[goose_idx],
center=(x_coord + WIDTH_STEP / 3, y_coord + HEIGHT_STEP / 3),
radius=WIDTH_STEP / 3,
)
# Draw bpdy
else:
pygame.draw.rect(
dis,
COLOR_GEESE[goose_idx],
(x_coord, y_coord, OBJECT_WIDTH, OBJECT_WIDTH),
)
def draw_step(dis: display, obs: dict) -> None:
"""
Draws step of game.
Parameters
----------
dis : display
Game display.
obs : dict
Observation representing the current state of the game.
Returns
-------
None
"""
# Draw background
# dis.blit(BACKGROUND, (0, 0))
dis.blit(BACKGROUND, (0, 0))
# Draw food
for food_cell in obs["food"]:
dis.blit(FOOD, cell_to_coordinates(food_cell))
# Draw geese
for goose_idx in range(4):
goose_pos = obs["geese"][goose_idx]
draw_goose(dis, goose_pos, goose_idx)
# Update
pygame.display.flip()
pygame.display.update()
def draw_loading_screen(dis: display) -> None:
"""Draw loading screen."""
msg1 = "Welcome to Hungry Geese!!!"
msg2 = "Please press any key to start."
msg3 = "You will play the white character, you can control it"
msg4 = "using the direction keys. After each input, the NPCs"
msg5 = "will take a second to submit their direction."
# Draw background
dis.blit(LOADING_SCREEN, (0, 0))
# Write message
font_style1 = pygame.font.SysFont("bahnschrift", 40)
dis.blit(font_style1.render(msg1, True, (0, 196, 151)), [40, 40])
font_style2 = pygame.font.SysFont("bahnschrift", 35)
dis.blit(font_style2.render(msg2, True, (0, 196, 151)), [40, 80])
font_style3 = pygame.font.SysFont("bahnschrift", 25)
dis.blit(font_style3.render(msg3, True, (0, 196, 151)), [30, 175])
dis.blit(font_style3.render(msg4, True, (0, 196, 151)), [30, 200])
dis.blit(font_style3.render(msg5, True, (0, 196, 151)), [30, 225])
# Update
pygame.display.flip()
pygame.display.update()
def draw_endgame_screen(dis: display, position: int) -> None:
"""Draw end game screen."""
msg1 = f"Game ended, you placed {position} of 4!!!"
msg2 = "Please press any key to start."
msg3 = "You will play the white character, you can control it"
msg4 = "using the direction keys. After each input, the NPCs"
msg5 = "will take a second to submit their direction."
# Draw background
dis.blit(LOADING_SCREEN, (0, 0))
# Write message
font_style1 = pygame.font.SysFont("bahnschrift", 40)
dis.blit(font_style1.render(msg1, True, (0, 196, 151)), [40, 50])
font_style2 = pygame.font.SysFont("bahnschrift", 35)
dis.blit(font_style2.render(msg2, True, (0, 196, 151)), [40, 90])
font_style3 = pygame.font.SysFont("bahnschrift", 25)
dis.blit(font_style3.render(msg3, True, (0, 196, 151)), [30, 175])
dis.blit(font_style3.render(msg4, True, (0, 196, 151)), [30, 200])
dis.blit(font_style3.render(msg5, True, (0, 196, 151)), [30, 225])
# Update
pygame.display.flip()
pygame.display.update()
| 29.621053
| 79
| 0.559168
|
import os
from pathlib import Path
from typing import Tuple
import pygame
from pygame import display
from src.game.visualisation.visualisation_statics import (
START_HEIGHT,
START_WIDTH,
WIDTH_STEP,
HEIGHT_STEP,
)
REPO_PATH = Path(os.getcwd())
ASSETS_PATH = REPO_PATH / "src/game/assets"
BACKGROUND = pygame.image.load(ASSETS_PATH / "background_sprite.png")
LOADING_SCREEN = pygame.image.load(ASSETS_PATH / "loading_screen.png")
FOOD = pygame.image.load(ASSETS_PATH / "food_sprite.png")
OBJECT_WIDTH = FOOD.get_width()
COLOR_GEESE = {
0: (255, 255, 255),
1: (255, 0, 0),
2: (0, 255, 0),
3: (0, 0, 255),
}
def cell_to_coordinates(cell: int) -> Tuple[float]:
cells_width = (cell) % 11
cells_height = (cell) // 11
x_coord = START_WIDTH + cells_width * WIDTH_STEP
y_coord = START_HEIGHT + cells_height * HEIGHT_STEP
return x_coord, y_coord
def draw_goose(dis: display, goose_pos: list, goose_idx: int) -> None:
for body_idx, goose_part in enumerate(goose_pos):
x_coord, y_coord = cell_to_coordinates(goose_part)
if body_idx == 0:
pygame.draw.polygon(
surface=dis,
color=COLOR_GEESE[goose_idx],
points=[
(x_coord + HEIGHT_STEP / 3, y_coord),
(x_coord, y_coord + HEIGHT_STEP / 3),
(x_coord + WIDTH_STEP / 3, y_coord + HEIGHT_STEP * 2 / 3),
(x_coord + WIDTH_STEP * 2 / 3, y_coord + HEIGHT_STEP / 3),
],
)
elif body_idx == len(goose_pos) - 1:
pygame.draw.circle(
surface=dis,
color=COLOR_GEESE[goose_idx],
center=(x_coord + WIDTH_STEP / 3, y_coord + HEIGHT_STEP / 3),
radius=WIDTH_STEP / 3,
)
else:
pygame.draw.rect(
dis,
COLOR_GEESE[goose_idx],
(x_coord, y_coord, OBJECT_WIDTH, OBJECT_WIDTH),
)
def draw_step(dis: display, obs: dict) -> None:
dis.blit(BACKGROUND, (0, 0))
for food_cell in obs["food"]:
dis.blit(FOOD, cell_to_coordinates(food_cell))
for goose_idx in range(4):
goose_pos = obs["geese"][goose_idx]
draw_goose(dis, goose_pos, goose_idx)
pygame.display.flip()
pygame.display.update()
def draw_loading_screen(dis: display) -> None:
msg1 = "Welcome to Hungry Geese!!!"
msg2 = "Please press any key to start."
msg3 = "You will play the white character, you can control it"
msg4 = "using the direction keys. After each input, the NPCs"
msg5 = "will take a second to submit their direction."
dis.blit(LOADING_SCREEN, (0, 0))
font_style1 = pygame.font.SysFont("bahnschrift", 40)
dis.blit(font_style1.render(msg1, True, (0, 196, 151)), [40, 40])
font_style2 = pygame.font.SysFont("bahnschrift", 35)
dis.blit(font_style2.render(msg2, True, (0, 196, 151)), [40, 80])
font_style3 = pygame.font.SysFont("bahnschrift", 25)
dis.blit(font_style3.render(msg3, True, (0, 196, 151)), [30, 175])
dis.blit(font_style3.render(msg4, True, (0, 196, 151)), [30, 200])
dis.blit(font_style3.render(msg5, True, (0, 196, 151)), [30, 225])
pygame.display.flip()
pygame.display.update()
def draw_endgame_screen(dis: display, position: int) -> None:
msg1 = f"Game ended, you placed {position} of 4!!!"
msg2 = "Please press any key to start."
msg3 = "You will play the white character, you can control it"
msg4 = "using the direction keys. After each input, the NPCs"
msg5 = "will take a second to submit their direction."
dis.blit(LOADING_SCREEN, (0, 0))
font_style1 = pygame.font.SysFont("bahnschrift", 40)
dis.blit(font_style1.render(msg1, True, (0, 196, 151)), [40, 50])
font_style2 = pygame.font.SysFont("bahnschrift", 35)
dis.blit(font_style2.render(msg2, True, (0, 196, 151)), [40, 90])
font_style3 = pygame.font.SysFont("bahnschrift", 25)
dis.blit(font_style3.render(msg3, True, (0, 196, 151)), [30, 175])
dis.blit(font_style3.render(msg4, True, (0, 196, 151)), [30, 200])
dis.blit(font_style3.render(msg5, True, (0, 196, 151)), [30, 225])
pygame.display.flip()
pygame.display.update()
| true
| true
|
1c4a583ce9a5a25fca06dd78ceeeadaabb85e3d8
| 16,466
|
py
|
Python
|
electrumsv/util/__init__.py
|
electrumsv/electrumsv
|
a2d9027ccec338cadfca778888e6ef7f077b1651
|
[
"MIT"
] | 136
|
2019-01-10T15:49:09.000Z
|
2022-02-20T04:46:39.000Z
|
electrumsv/util/__init__.py
|
electrumsv/electrumsv
|
a2d9027ccec338cadfca778888e6ef7f077b1651
|
[
"MIT"
] | 790
|
2019-01-07T01:53:35.000Z
|
2022-03-30T23:04:28.000Z
|
electrumsv/util/__init__.py
|
electrumsv/electrumsv
|
a2d9027ccec338cadfca778888e6ef7f077b1651
|
[
"MIT"
] | 65
|
2019-01-10T23:55:30.000Z
|
2021-12-19T06:47:13.000Z
|
# ElectrumSV - lightweight Bitcoin SV client
# Copyright (C) 2019-2020 The ElectrumSV Developers
# Copyright (C) 2011 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from collections import defaultdict
from decimal import Decimal
from datetime import datetime, timedelta, tzinfo
import json
import hmac
import os
import stat
import sys
import threading
import time
import types
from typing import Any, Callable, cast, Dict, Iterable, List, Optional, Sequence, Set, Tuple, \
TypedDict, TypeVar, Union
from bitcoinx import PublicKey
from ..logs import logs
from ..startup import package_dir
from ..types import ExceptionInfoType
from ..version import PACKAGE_DATE
T1 = TypeVar("T1")
def protocol_tuple(s: str) -> Tuple[int, ...]:
'''Converts a protocol version number, such as "1.0" to a tuple (1, 0).
If the version number is bad, (0, ) indicating version 0 is returned.'''
try:
return tuple(int(part) for part in s.split('.'))
except (TypeError, ValueError, AttributeError):
raise ValueError(f'invalid protocol version: {s}') from None
def version_string(ptuple: Tuple[int, ...]) -> str:
'''Convert a version tuple such as (1, 2) to "1.2".
There is always at least one dot, so (1, ) becomes "1.0".'''
while len(ptuple) < 2:
ptuple += (0, )
return '.'.join(str(p) for p in ptuple)
class MyEncoder(json.JSONEncoder):
# https://github.com/PyCQA/pylint/issues/414
def default(self, o: Any) -> Any: # pylint: disable=method-hidden
from ..transaction import Transaction, TransactionContext
if isinstance(o, Transaction):
return o.to_dict(TransactionContext())
return super(MyEncoder, self).default(o)
class JSON:
classes: Dict[str, Any] = {}
@classmethod
def register(cls, *classes: Any) -> None:
for klass in classes:
cls.classes[klass.__name__] = klass
@classmethod
def dumps(cls, obj: Any, **kwargs: Any) -> str:
def encode_obj(obj: Any) -> Dict[str, Any]:
class_name = obj.__class__.__name__
if class_name not in cls.classes:
raise TypeError(f'object of type {class_name} is not JSON serializable')
return {'_sv': (class_name, obj.to_json())}
kwargs['default'] = encode_obj
return json.dumps(obj, **kwargs)
@classmethod
def loads(cls, s: Union[str, bytes], **kwargs: Any) -> Any:
def decode_obj(obj: Dict[str, Any]) -> Any:
if '_sv' in obj:
class_name, ser = obj['_sv']
obj = cls.classes[class_name].from_json(ser)
return obj
kwargs['object_hook'] = decode_obj
return json.loads(s, **kwargs)
class DaemonThread(threading.Thread):
""" daemon thread that terminates cleanly """
def __init__(self, name: str) -> None:
threading.Thread.__init__(self)
self.name = name
self.parent_thread = threading.currentThread()
self.running = False
self.running_lock = threading.Lock()
self.logger = logs.get_logger(f'{name} thread')
def start(self) -> None:
with self.running_lock:
self.running = True
threading.Thread.start(self)
def is_running(self) -> bool:
with self.running_lock:
return self.running and self.parent_thread.is_alive()
def stop(self) -> None:
with self.running_lock:
self.running = False
def on_stop(self) -> None:
self.logger.debug("stopped")
def json_encode(obj: Any) -> str:
try:
s = json.dumps(obj, sort_keys = True, indent = 4, cls=MyEncoder)
except TypeError:
s = repr(obj)
return s
def json_decode(x: Union[str, bytes]) -> Any:
try:
return json.loads(x, parse_float=Decimal)
except Exception:
return x
# taken from Django Source Code
def constant_time_compare(val1: str, val2: str) -> bool:
"""Return True if the two strings are equal, False otherwise."""
return hmac.compare_digest(val1.encode('utf8'), val2.encode('utf8'))
# decorator that prints execution time
def profiler(func: Callable[..., T1]) -> Callable[..., T1]:
def do_profile(func: Callable[..., T1], args: Tuple[Any, ...], kw_args: Dict[str, Any]) -> T1:
n = func.__name__
logger = logs.get_logger("profiler")
t0 = time.time()
o = func(*args, **kw_args)
t = time.time() - t0
logger.debug("%s %.4f", n, t)
return o
return lambda *args, **kw_args: do_profile(func, args, kw_args)
def assert_datadir_available(config_path: str) -> None:
path = config_path
if os.path.exists(path):
return
else:
raise FileNotFoundError(
'ElectrumSV datadir does not exist. Was it deleted while running?' + '\n' +
'Should be at {}'.format(path))
def assert_bytes(*args: Any) -> None:
"""
porting helper, assert args type
"""
try:
for x in args:
assert isinstance(x, (bytes, bytearray))
except AssertionError:
logs.root.error('assert bytes failed %s', [type(arg) for arg in args])
raise
def make_dir(path: str) -> None:
# Make directory if it does not yet exist.
if not os.path.exists(path):
if os.path.islink(path):
raise Exception('Dangling link: ' + path)
os.mkdir(path)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
def format_satoshis_plain(x: int, decimal_point: int=8) -> str:
"""Display a satoshi amount scaled. Always uses a '.' as a decimal
point and has no thousands separator"""
scale_factor = pow(10, decimal_point)
return "{:.8f}".format(Decimal(x) / scale_factor).rstrip('0').rstrip('.')
def format_satoshis(x: Optional[int], num_zeros: int=0, decimal_point: int=8,
precision: Optional[int]=None,
is_diff: bool=False, whitespaces: bool=False) -> str:
from locale import localeconv
if x is None:
return 'unknown'
if precision is None:
precision = decimal_point
decimal_format = ",.0" + str(precision) if precision > 0 else ""
if is_diff:
decimal_format = '+' + decimal_format
fmt_string = "{:" + decimal_format + "f}"
result = (fmt_string).format(x / pow (10, decimal_point)).rstrip('0')
integer_part, fract_part = result.split(".")
dp = cast(str, localeconv()['decimal_point'])
if len(fract_part) < num_zeros:
fract_part += "0" * (num_zeros - len(fract_part))
result = integer_part + dp + fract_part
if whitespaces:
result += " " * (decimal_point - len(fract_part))
result = " " * (15 - len(result)) + result
return result
def format_fee_satoshis(fee: int, num_zeros: int=0) -> str:
return format_satoshis(fee, num_zeros, 0, precision=num_zeros)
def get_posix_timestamp() -> int:
# In theory we can just return `int(time.time())` but this returns the posix timestamp and
# try reading the documentation for `time.time` and being sure of that.
return int(datetime.now().timestamp())
def posix_timestamp_to_datetime(timestamp: int) -> datetime:
"Get a local timezone unaware datetime object for the given posix timestamp."
return datetime.fromtimestamp(timestamp)
def format_posix_timestamp(timestamp: int, default_text: str) -> str:
"Get the date and time for the given posix timestamp."
date = posix_timestamp_to_datetime(timestamp)
if date:
return date.isoformat(' ')[:16]
return default_text
# Takes a timestamp and returns a string with the approximation of the age
def age(from_timestamp: Optional[float], since_date: Optional[datetime]=None,
target_tz: Optional[tzinfo]=None, include_seconds: bool=False) -> str:
if from_timestamp is None:
return "Unknown"
from_date = datetime.fromtimestamp(from_timestamp)
if since_date is None:
since_date = datetime.now(target_tz)
td = time_difference(from_date - since_date, include_seconds)
return td + " ago" if from_date < since_date else "in " + td
def time_difference(distance_in_time: timedelta, include_seconds: bool) -> str:
#distance_in_time = since_date - from_date
distance_in_seconds = int(round(abs(distance_in_time.days * 86400 + distance_in_time.seconds)))
distance_in_minutes = int(round(distance_in_seconds/60))
if distance_in_minutes <= 1:
if include_seconds:
for remainder in [5, 10, 20]:
if distance_in_seconds < remainder:
return "less than %s seconds" % remainder
if distance_in_seconds < 40:
return "half a minute"
elif distance_in_seconds < 60:
return "less than a minute"
else:
return "1 minute"
else:
if distance_in_minutes == 0:
return "less than a minute"
else:
return "1 minute"
elif distance_in_minutes < 45:
return "%s minutes" % distance_in_minutes
elif distance_in_minutes < 90:
return "about 1 hour"
elif distance_in_minutes < 1440:
return "about %d hours" % (round(distance_in_minutes / 60.0))
elif distance_in_minutes < 2880:
return "1 day"
elif distance_in_minutes < 43220:
return "%d days" % (round(distance_in_minutes / 1440))
elif distance_in_minutes < 86400:
return "about 1 month"
elif distance_in_minutes < 525600:
return "%d months" % (round(distance_in_minutes / 43200))
elif distance_in_minutes < 1051200:
return "about 1 year"
else:
return "over %d years" % (round(distance_in_minutes / 525600))
def setup_thread_excepthook() -> None:
"""
Workaround for `sys.excepthook` thread bug from:
http://bugs.python.org/issue1230540
Call once from the main thread before creating any threads.
"""
init_original = threading.Thread.__init__
def init(self: threading.Thread, *args: Any, **kwargs: Any) -> None:
init_original(self, *args, **kwargs)
run_original = self.run
def run_with_except_hook() -> None:
try:
run_original()
except Exception:
sys.excepthook(*sys.exc_info())
# NOTE(typing) mypy tells us we cannot assign to a method, but we really can and do..
self.run = run_with_except_hook # type: ignore
# NOTE(typing) mypy tells us we cannot assign to a method, but we really can and do..
threading.Thread.__init__ = init # type: ignore
def get_wallet_name_from_path(wallet_path: str) -> str:
return os.path.splitext(os.path.basename(wallet_path))[0]
def versiontuple(v: str) -> Tuple[int, ...]:
return tuple(int(x) for x in v.split("."))
def resource_path(*parts: Sequence[str]) -> str:
return os.path.join(package_dir, "data", *parts) # type: ignore
def read_resource_file(filename: str) -> str:
path = resource_path(filename)
with open(path, 'r') as f:
return f.read()
def text_resource_path(*parts: Sequence[str]) -> str:
return resource_path("text", *parts)
def read_resource_text(*parts: Sequence[str]) -> str:
# NOTE(typing) Does not recognize the sequence of strings as strings, waste of time.
return read_resource_file(os.path.join("text", *parts)) # type:ignore
def get_update_check_dates(new_date: str) -> Tuple[datetime, datetime]:
from dateutil.parser import isoparse
# This is the latest stable release date.
release_date = isoparse(new_date).astimezone()
# This is the rough date of the current release (might be stable or unstable).
current_date = isoparse(PACKAGE_DATE).astimezone()
return release_date, current_date
class ReleaseEntryType(TypedDict):
version: str
date: str
signatures: List[str]
class ReleaseDocumentType(TypedDict, total=False):
stable: ReleaseEntryType
unstable: ReleaseEntryType
UpdateCheckResultType = Union[ExceptionInfoType, ReleaseDocumentType]
def get_identified_release_signers(entry: ReleaseEntryType) -> Set[str]:
signature_addresses = [
("rt121212121", "1Bu6ABvLAXn1ARFo1gjq6sogpajGbp6iK6"),
("kyuupichan", "1BH8E3TkuJMCcH5WGD11kVweKZuhh6vb7V"),
]
release_version = entry['version']
release_date = entry['date']
release_signatures = entry.get('signatures', [])
message = release_version + release_date
signed_names = set()
for signature in release_signatures:
for signer_name, signer_address in signature_addresses:
if signer_name not in signed_names:
# They are mainnet addresses
if PublicKey.verify_message_and_address(signature, message, signer_address):
signed_names.add(signer_name)
break
return signed_names
def chunks(items: List[T1], size: int) -> Iterable[List[T1]]:
'''Break up items, an iterable, into chunks of length size.'''
for i in range(0, len(items), size):
yield items[i: i + size]
class TriggeredCallbacks:
def __init__(self) -> None:
self._callbacks: Dict[str, List[Callable[..., None]]] = defaultdict(list)
self._callback_lock = threading.Lock()
self._callback_logger = logs.get_logger("callback-logger")
def register_callback(self, callback: Callable[..., None], events: List[str]) -> None:
with self._callback_lock:
for event in events:
if callback in self._callbacks[event]:
self._callback_logger.error("Callback reregistered %s %s", event, callback)
continue
self._callbacks[event].append(callback)
def unregister_callback(self, callback: Callable[..., None]) -> None:
with self._callback_lock:
for callbacks in self._callbacks.values():
if callback in callbacks:
callbacks.remove(callback)
def unregister_callbacks_for_object(self, owner: object) -> None:
with self._callback_lock:
for callbacks in self._callbacks.values():
for callback in callbacks[:]:
if isinstance(callback, types.MethodType):
if callback.__self__ is owner:
callbacks.remove(callback)
def trigger_callback(self, event: str, *args: Any) -> None:
with self._callback_lock:
callbacks = self._callbacks[event][:]
[callback(event, *args) for callback in callbacks]
class ValueLocks:
def __init__(self) -> None:
self._namespace_lock = threading.Lock()
self._namespace: Dict[Any, threading.RLock] = {}
self._counters: Dict[Any, int] = {}
def acquire_lock(self, value: Any) -> None:
with self._namespace_lock:
if value in self._namespace:
self._counters[value] += 1
else:
self._namespace[value] = threading.RLock()
self._counters[value] = 1
self._namespace[value].acquire()
def release_lock(self, value: Any) -> None:
with self._namespace_lock:
if self._counters[value] == 1:
del self._counters[value]
lock = self._namespace.pop(value)
else:
self._counters[value] -= 1
lock = self._namespace[value]
lock.release()
| 34.665263
| 99
| 0.649034
|
from collections import defaultdict
from decimal import Decimal
from datetime import datetime, timedelta, tzinfo
import json
import hmac
import os
import stat
import sys
import threading
import time
import types
from typing import Any, Callable, cast, Dict, Iterable, List, Optional, Sequence, Set, Tuple, \
TypedDict, TypeVar, Union
from bitcoinx import PublicKey
from ..logs import logs
from ..startup import package_dir
from ..types import ExceptionInfoType
from ..version import PACKAGE_DATE
T1 = TypeVar("T1")
def protocol_tuple(s: str) -> Tuple[int, ...]:
try:
return tuple(int(part) for part in s.split('.'))
except (TypeError, ValueError, AttributeError):
raise ValueError(f'invalid protocol version: {s}') from None
def version_string(ptuple: Tuple[int, ...]) -> str:
while len(ptuple) < 2:
ptuple += (0, )
return '.'.join(str(p) for p in ptuple)
class MyEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
from ..transaction import Transaction, TransactionContext
if isinstance(o, Transaction):
return o.to_dict(TransactionContext())
return super(MyEncoder, self).default(o)
class JSON:
classes: Dict[str, Any] = {}
@classmethod
def register(cls, *classes: Any) -> None:
for klass in classes:
cls.classes[klass.__name__] = klass
@classmethod
def dumps(cls, obj: Any, **kwargs: Any) -> str:
def encode_obj(obj: Any) -> Dict[str, Any]:
class_name = obj.__class__.__name__
if class_name not in cls.classes:
raise TypeError(f'object of type {class_name} is not JSON serializable')
return {'_sv': (class_name, obj.to_json())}
kwargs['default'] = encode_obj
return json.dumps(obj, **kwargs)
@classmethod
def loads(cls, s: Union[str, bytes], **kwargs: Any) -> Any:
def decode_obj(obj: Dict[str, Any]) -> Any:
if '_sv' in obj:
class_name, ser = obj['_sv']
obj = cls.classes[class_name].from_json(ser)
return obj
kwargs['object_hook'] = decode_obj
return json.loads(s, **kwargs)
class DaemonThread(threading.Thread):
def __init__(self, name: str) -> None:
threading.Thread.__init__(self)
self.name = name
self.parent_thread = threading.currentThread()
self.running = False
self.running_lock = threading.Lock()
self.logger = logs.get_logger(f'{name} thread')
def start(self) -> None:
with self.running_lock:
self.running = True
threading.Thread.start(self)
def is_running(self) -> bool:
with self.running_lock:
return self.running and self.parent_thread.is_alive()
def stop(self) -> None:
with self.running_lock:
self.running = False
def on_stop(self) -> None:
self.logger.debug("stopped")
def json_encode(obj: Any) -> str:
try:
s = json.dumps(obj, sort_keys = True, indent = 4, cls=MyEncoder)
except TypeError:
s = repr(obj)
return s
def json_decode(x: Union[str, bytes]) -> Any:
try:
return json.loads(x, parse_float=Decimal)
except Exception:
return x
def constant_time_compare(val1: str, val2: str) -> bool:
return hmac.compare_digest(val1.encode('utf8'), val2.encode('utf8'))
def profiler(func: Callable[..., T1]) -> Callable[..., T1]:
def do_profile(func: Callable[..., T1], args: Tuple[Any, ...], kw_args: Dict[str, Any]) -> T1:
n = func.__name__
logger = logs.get_logger("profiler")
t0 = time.time()
o = func(*args, **kw_args)
t = time.time() - t0
logger.debug("%s %.4f", n, t)
return o
return lambda *args, **kw_args: do_profile(func, args, kw_args)
def assert_datadir_available(config_path: str) -> None:
path = config_path
if os.path.exists(path):
return
else:
raise FileNotFoundError(
'ElectrumSV datadir does not exist. Was it deleted while running?' + '\n' +
'Should be at {}'.format(path))
def assert_bytes(*args: Any) -> None:
try:
for x in args:
assert isinstance(x, (bytes, bytearray))
except AssertionError:
logs.root.error('assert bytes failed %s', [type(arg) for arg in args])
raise
def make_dir(path: str) -> None:
if not os.path.exists(path):
if os.path.islink(path):
raise Exception('Dangling link: ' + path)
os.mkdir(path)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
def format_satoshis_plain(x: int, decimal_point: int=8) -> str:
scale_factor = pow(10, decimal_point)
return "{:.8f}".format(Decimal(x) / scale_factor).rstrip('0').rstrip('.')
def format_satoshis(x: Optional[int], num_zeros: int=0, decimal_point: int=8,
precision: Optional[int]=None,
is_diff: bool=False, whitespaces: bool=False) -> str:
from locale import localeconv
if x is None:
return 'unknown'
if precision is None:
precision = decimal_point
decimal_format = ",.0" + str(precision) if precision > 0 else ""
if is_diff:
decimal_format = '+' + decimal_format
fmt_string = "{:" + decimal_format + "f}"
result = (fmt_string).format(x / pow (10, decimal_point)).rstrip('0')
integer_part, fract_part = result.split(".")
dp = cast(str, localeconv()['decimal_point'])
if len(fract_part) < num_zeros:
fract_part += "0" * (num_zeros - len(fract_part))
result = integer_part + dp + fract_part
if whitespaces:
result += " " * (decimal_point - len(fract_part))
result = " " * (15 - len(result)) + result
return result
def format_fee_satoshis(fee: int, num_zeros: int=0) -> str:
return format_satoshis(fee, num_zeros, 0, precision=num_zeros)
def get_posix_timestamp() -> int:
return int(datetime.now().timestamp())
def posix_timestamp_to_datetime(timestamp: int) -> datetime:
return datetime.fromtimestamp(timestamp)
def format_posix_timestamp(timestamp: int, default_text: str) -> str:
date = posix_timestamp_to_datetime(timestamp)
if date:
return date.isoformat(' ')[:16]
return default_text
def age(from_timestamp: Optional[float], since_date: Optional[datetime]=None,
target_tz: Optional[tzinfo]=None, include_seconds: bool=False) -> str:
if from_timestamp is None:
return "Unknown"
from_date = datetime.fromtimestamp(from_timestamp)
if since_date is None:
since_date = datetime.now(target_tz)
td = time_difference(from_date - since_date, include_seconds)
return td + " ago" if from_date < since_date else "in " + td
def time_difference(distance_in_time: timedelta, include_seconds: bool) -> str:
distance_in_seconds = int(round(abs(distance_in_time.days * 86400 + distance_in_time.seconds)))
distance_in_minutes = int(round(distance_in_seconds/60))
if distance_in_minutes <= 1:
if include_seconds:
for remainder in [5, 10, 20]:
if distance_in_seconds < remainder:
return "less than %s seconds" % remainder
if distance_in_seconds < 40:
return "half a minute"
elif distance_in_seconds < 60:
return "less than a minute"
else:
return "1 minute"
else:
if distance_in_minutes == 0:
return "less than a minute"
else:
return "1 minute"
elif distance_in_minutes < 45:
return "%s minutes" % distance_in_minutes
elif distance_in_minutes < 90:
return "about 1 hour"
elif distance_in_minutes < 1440:
return "about %d hours" % (round(distance_in_minutes / 60.0))
elif distance_in_minutes < 2880:
return "1 day"
elif distance_in_minutes < 43220:
return "%d days" % (round(distance_in_minutes / 1440))
elif distance_in_minutes < 86400:
return "about 1 month"
elif distance_in_minutes < 525600:
return "%d months" % (round(distance_in_minutes / 43200))
elif distance_in_minutes < 1051200:
return "about 1 year"
else:
return "over %d years" % (round(distance_in_minutes / 525600))
def setup_thread_excepthook() -> None:
init_original = threading.Thread.__init__
def init(self: threading.Thread, *args: Any, **kwargs: Any) -> None:
init_original(self, *args, **kwargs)
run_original = self.run
def run_with_except_hook() -> None:
try:
run_original()
except Exception:
sys.excepthook(*sys.exc_info())
self.run = run_with_except_hook
threading.Thread.__init__ = init
def get_wallet_name_from_path(wallet_path: str) -> str:
return os.path.splitext(os.path.basename(wallet_path))[0]
def versiontuple(v: str) -> Tuple[int, ...]:
return tuple(int(x) for x in v.split("."))
def resource_path(*parts: Sequence[str]) -> str:
return os.path.join(package_dir, "data", *parts)
def read_resource_file(filename: str) -> str:
path = resource_path(filename)
with open(path, 'r') as f:
return f.read()
def text_resource_path(*parts: Sequence[str]) -> str:
return resource_path("text", *parts)
def read_resource_text(*parts: Sequence[str]) -> str:
return read_resource_file(os.path.join("text", *parts))
def get_update_check_dates(new_date: str) -> Tuple[datetime, datetime]:
from dateutil.parser import isoparse
release_date = isoparse(new_date).astimezone()
current_date = isoparse(PACKAGE_DATE).astimezone()
return release_date, current_date
class ReleaseEntryType(TypedDict):
version: str
date: str
signatures: List[str]
class ReleaseDocumentType(TypedDict, total=False):
stable: ReleaseEntryType
unstable: ReleaseEntryType
UpdateCheckResultType = Union[ExceptionInfoType, ReleaseDocumentType]
def get_identified_release_signers(entry: ReleaseEntryType) -> Set[str]:
signature_addresses = [
("rt121212121", "1Bu6ABvLAXn1ARFo1gjq6sogpajGbp6iK6"),
("kyuupichan", "1BH8E3TkuJMCcH5WGD11kVweKZuhh6vb7V"),
]
release_version = entry['version']
release_date = entry['date']
release_signatures = entry.get('signatures', [])
message = release_version + release_date
signed_names = set()
for signature in release_signatures:
for signer_name, signer_address in signature_addresses:
if signer_name not in signed_names:
if PublicKey.verify_message_and_address(signature, message, signer_address):
signed_names.add(signer_name)
break
return signed_names
def chunks(items: List[T1], size: int) -> Iterable[List[T1]]:
for i in range(0, len(items), size):
yield items[i: i + size]
class TriggeredCallbacks:
def __init__(self) -> None:
self._callbacks: Dict[str, List[Callable[..., None]]] = defaultdict(list)
self._callback_lock = threading.Lock()
self._callback_logger = logs.get_logger("callback-logger")
def register_callback(self, callback: Callable[..., None], events: List[str]) -> None:
with self._callback_lock:
for event in events:
if callback in self._callbacks[event]:
self._callback_logger.error("Callback reregistered %s %s", event, callback)
continue
self._callbacks[event].append(callback)
def unregister_callback(self, callback: Callable[..., None]) -> None:
with self._callback_lock:
for callbacks in self._callbacks.values():
if callback in callbacks:
callbacks.remove(callback)
def unregister_callbacks_for_object(self, owner: object) -> None:
with self._callback_lock:
for callbacks in self._callbacks.values():
for callback in callbacks[:]:
if isinstance(callback, types.MethodType):
if callback.__self__ is owner:
callbacks.remove(callback)
def trigger_callback(self, event: str, *args: Any) -> None:
with self._callback_lock:
callbacks = self._callbacks[event][:]
[callback(event, *args) for callback in callbacks]
class ValueLocks:
def __init__(self) -> None:
self._namespace_lock = threading.Lock()
self._namespace: Dict[Any, threading.RLock] = {}
self._counters: Dict[Any, int] = {}
def acquire_lock(self, value: Any) -> None:
with self._namespace_lock:
if value in self._namespace:
self._counters[value] += 1
else:
self._namespace[value] = threading.RLock()
self._counters[value] = 1
self._namespace[value].acquire()
def release_lock(self, value: Any) -> None:
with self._namespace_lock:
if self._counters[value] == 1:
del self._counters[value]
lock = self._namespace.pop(value)
else:
self._counters[value] -= 1
lock = self._namespace[value]
lock.release()
| true
| true
|
1c4a5a1c7c070e81fc5736a95a9baebb21c9b24b
| 896
|
py
|
Python
|
unused/py3_tf2_wide_deep/python_v2/lib/utils/image_preprocessing.py
|
WenqiJiang/FPGA-Accelerator-for-Recommender-Systems
|
6c3031487cd1447b7f5362483c14b108177387bb
|
[
"MIT"
] | 4
|
2020-03-03T12:51:05.000Z
|
2021-06-19T17:34:45.000Z
|
unused/tf2_wide_deep/python/lib/utils/image_preprocessing.py
|
WenqiJiang/FPGA-Accelerator-for-Recommender-Systems
|
6c3031487cd1447b7f5362483c14b108177387bb
|
[
"MIT"
] | null | null | null |
unused/tf2_wide_deep/python/lib/utils/image_preprocessing.py
|
WenqiJiang/FPGA-Accelerator-for-Recommender-Systems
|
6c3031487cd1447b7f5362483c14b108177387bb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: lapis-hong
# @Date : 2018/3/5
"""Provides custom function to preprocess images.
TODO: custom preprocess for CTR task
"""
import tensorflow as tf
def preprocess_image(image, is_training, height, width, depth):
"""Preprocess a single image of layout [height, width, depth]."""
if is_training:
# Resize the image to add four extra pixels on each side.
image = tf.image.resize_with_crop_or_pad(
image, height + 8, width + 8)
# Randomly crop a [_HEIGHT, _WIDTH] section of the image.
image = tf.image.random_crop(image, [height, width, depth])
# Randomly flip the image horizontally.
image = tf.image.random_flip_left_right(image)
# Subtract off the mean and divide by the variance of the pixels.
image = tf.image.per_image_standardization(image)
return image
| 35.84
| 69
| 0.679688
|
import tensorflow as tf
def preprocess_image(image, is_training, height, width, depth):
if is_training:
image = tf.image.resize_with_crop_or_pad(
image, height + 8, width + 8)
image = tf.image.random_crop(image, [height, width, depth])
image = tf.image.random_flip_left_right(image)
image = tf.image.per_image_standardization(image)
return image
| true
| true
|
1c4a5a3252aa102f145f995b3c7e86bbde65c4e8
| 455
|
py
|
Python
|
WebSite/PrayerWall/bookings/tests.py
|
Tinka8ell/Prayer-Wall
|
e9e6f3b94a88fc68f26a660b7abc5a781bad8f71
|
[
"Apache-2.0"
] | null | null | null |
WebSite/PrayerWall/bookings/tests.py
|
Tinka8ell/Prayer-Wall
|
e9e6f3b94a88fc68f26a660b7abc5a781bad8f71
|
[
"Apache-2.0"
] | null | null | null |
WebSite/PrayerWall/bookings/tests.py
|
Tinka8ell/Prayer-Wall
|
e9e6f3b94a88fc68f26a660b7abc5a781bad8f71
|
[
"Apache-2.0"
] | null | null | null |
from django.test import TestCase
# for management command testing
from io import StringIO
from django.core.management import call_command
# Create your tests for bookings here.
class StartEventTest(TestCase):
def test_command_output(self):
out = StringIO()
call_command("startevent 'My New Event' 2020/11/27-20 ('online',) ('104', 2)", stdout=out)
self.assertIn('Successfully created event "My New Event"', out.getvalue())
| 30.333333
| 98
| 0.723077
|
from django.test import TestCase
from io import StringIO
from django.core.management import call_command
class StartEventTest(TestCase):
def test_command_output(self):
out = StringIO()
call_command("startevent 'My New Event' 2020/11/27-20 ('online',) ('104', 2)", stdout=out)
self.assertIn('Successfully created event "My New Event"', out.getvalue())
| true
| true
|
1c4a5a4d9077dba1b07057603efd5ddf4d638abb
| 10,165
|
py
|
Python
|
isi_sdk_8_2_2/isi_sdk_8_2_2/models/event_channel_extended_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_2_2/isi_sdk_8_2_2/models/event_channel_extended_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_2_2/isi_sdk_8_2_2/models/event_channel_extended_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 9
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_2_2.models.event_channel_parameters import EventChannelParameters # noqa: F401,E501
class EventChannelExtendedExtended(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'allowed_nodes': 'list[int]',
'enabled': 'bool',
'excluded_nodes': 'list[int]',
'id': 'int',
'name': 'str',
'parameters': 'EventChannelParameters',
'system': 'bool',
'type': 'str'
}
attribute_map = {
'allowed_nodes': 'allowed_nodes',
'enabled': 'enabled',
'excluded_nodes': 'excluded_nodes',
'id': 'id',
'name': 'name',
'parameters': 'parameters',
'system': 'system',
'type': 'type'
}
def __init__(self, allowed_nodes=None, enabled=None, excluded_nodes=None, id=None, name=None, parameters=None, system=None, type=None): # noqa: E501
"""EventChannelExtendedExtended - a model defined in Swagger""" # noqa: E501
self._allowed_nodes = None
self._enabled = None
self._excluded_nodes = None
self._id = None
self._name = None
self._parameters = None
self._system = None
self._type = None
self.discriminator = None
if allowed_nodes is not None:
self.allowed_nodes = allowed_nodes
if enabled is not None:
self.enabled = enabled
if excluded_nodes is not None:
self.excluded_nodes = excluded_nodes
if id is not None:
self.id = id
if name is not None:
self.name = name
if parameters is not None:
self.parameters = parameters
if system is not None:
self.system = system
if type is not None:
self.type = type
@property
def allowed_nodes(self):
"""Gets the allowed_nodes of this EventChannelExtendedExtended. # noqa: E501
Nodes (LNNs) that can be masters for this channel. # noqa: E501
:return: The allowed_nodes of this EventChannelExtendedExtended. # noqa: E501
:rtype: list[int]
"""
return self._allowed_nodes
@allowed_nodes.setter
def allowed_nodes(self, allowed_nodes):
"""Sets the allowed_nodes of this EventChannelExtendedExtended.
Nodes (LNNs) that can be masters for this channel. # noqa: E501
:param allowed_nodes: The allowed_nodes of this EventChannelExtendedExtended. # noqa: E501
:type: list[int]
"""
self._allowed_nodes = allowed_nodes
@property
def enabled(self):
"""Gets the enabled of this EventChannelExtendedExtended. # noqa: E501
Channel is to be used or not. # noqa: E501
:return: The enabled of this EventChannelExtendedExtended. # noqa: E501
:rtype: bool
"""
return self._enabled
@enabled.setter
def enabled(self, enabled):
"""Sets the enabled of this EventChannelExtendedExtended.
Channel is to be used or not. # noqa: E501
:param enabled: The enabled of this EventChannelExtendedExtended. # noqa: E501
:type: bool
"""
self._enabled = enabled
@property
def excluded_nodes(self):
"""Gets the excluded_nodes of this EventChannelExtendedExtended. # noqa: E501
Nodes (LNNs) that can NOT be the masters for this channel. # noqa: E501
:return: The excluded_nodes of this EventChannelExtendedExtended. # noqa: E501
:rtype: list[int]
"""
return self._excluded_nodes
@excluded_nodes.setter
def excluded_nodes(self, excluded_nodes):
"""Sets the excluded_nodes of this EventChannelExtendedExtended.
Nodes (LNNs) that can NOT be the masters for this channel. # noqa: E501
:param excluded_nodes: The excluded_nodes of this EventChannelExtendedExtended. # noqa: E501
:type: list[int]
"""
self._excluded_nodes = excluded_nodes
@property
def id(self):
"""Gets the id of this EventChannelExtendedExtended. # noqa: E501
Unique identifier. # noqa: E501
:return: The id of this EventChannelExtendedExtended. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this EventChannelExtendedExtended.
Unique identifier. # noqa: E501
:param id: The id of this EventChannelExtendedExtended. # noqa: E501
:type: int
"""
if id is not None and id > 4294967295: # noqa: E501
raise ValueError("Invalid value for `id`, must be a value less than or equal to `4294967295`") # noqa: E501
if id is not None and id < 0: # noqa: E501
raise ValueError("Invalid value for `id`, must be a value greater than or equal to `0`") # noqa: E501
self._id = id
@property
def name(self):
"""Gets the name of this EventChannelExtendedExtended. # noqa: E501
Channel name, may not contain / # noqa: E501
:return: The name of this EventChannelExtendedExtended. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this EventChannelExtendedExtended.
Channel name, may not contain / # noqa: E501
:param name: The name of this EventChannelExtendedExtended. # noqa: E501
:type: str
"""
if name is not None and len(name) > 254:
raise ValueError("Invalid value for `name`, length must be less than or equal to `254`") # noqa: E501
if name is not None and len(name) < 1:
raise ValueError("Invalid value for `name`, length must be greater than or equal to `1`") # noqa: E501
self._name = name
@property
def parameters(self):
"""Gets the parameters of this EventChannelExtendedExtended. # noqa: E501
Parameters to be used for an smtp channel. # noqa: E501
:return: The parameters of this EventChannelExtendedExtended. # noqa: E501
:rtype: EventChannelParameters
"""
return self._parameters
@parameters.setter
def parameters(self, parameters):
"""Sets the parameters of this EventChannelExtendedExtended.
Parameters to be used for an smtp channel. # noqa: E501
:param parameters: The parameters of this EventChannelExtendedExtended. # noqa: E501
:type: EventChannelParameters
"""
self._parameters = parameters
@property
def system(self):
"""Gets the system of this EventChannelExtendedExtended. # noqa: E501
Channel is a pre-defined system channel. # noqa: E501
:return: The system of this EventChannelExtendedExtended. # noqa: E501
:rtype: bool
"""
return self._system
@system.setter
def system(self, system):
"""Sets the system of this EventChannelExtendedExtended.
Channel is a pre-defined system channel. # noqa: E501
:param system: The system of this EventChannelExtendedExtended. # noqa: E501
:type: bool
"""
self._system = system
@property
def type(self):
"""Gets the type of this EventChannelExtendedExtended. # noqa: E501
The mechanism used by the channel. # noqa: E501
:return: The type of this EventChannelExtendedExtended. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this EventChannelExtendedExtended.
The mechanism used by the channel. # noqa: E501
:param type: The type of this EventChannelExtendedExtended. # noqa: E501
:type: str
"""
allowed_values = ["connectemc", "smtp", "snmp", "heartbeat"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EventChannelExtendedExtended):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.085627
| 153
| 0.600787
|
import pprint
import re
import six
from isi_sdk_8_2_2.models.event_channel_parameters import EventChannelParameters
class EventChannelExtendedExtended(object):
swagger_types = {
'allowed_nodes': 'list[int]',
'enabled': 'bool',
'excluded_nodes': 'list[int]',
'id': 'int',
'name': 'str',
'parameters': 'EventChannelParameters',
'system': 'bool',
'type': 'str'
}
attribute_map = {
'allowed_nodes': 'allowed_nodes',
'enabled': 'enabled',
'excluded_nodes': 'excluded_nodes',
'id': 'id',
'name': 'name',
'parameters': 'parameters',
'system': 'system',
'type': 'type'
}
def __init__(self, allowed_nodes=None, enabled=None, excluded_nodes=None, id=None, name=None, parameters=None, system=None, type=None):
self._allowed_nodes = None
self._enabled = None
self._excluded_nodes = None
self._id = None
self._name = None
self._parameters = None
self._system = None
self._type = None
self.discriminator = None
if allowed_nodes is not None:
self.allowed_nodes = allowed_nodes
if enabled is not None:
self.enabled = enabled
if excluded_nodes is not None:
self.excluded_nodes = excluded_nodes
if id is not None:
self.id = id
if name is not None:
self.name = name
if parameters is not None:
self.parameters = parameters
if system is not None:
self.system = system
if type is not None:
self.type = type
@property
def allowed_nodes(self):
return self._allowed_nodes
@allowed_nodes.setter
def allowed_nodes(self, allowed_nodes):
self._allowed_nodes = allowed_nodes
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, enabled):
self._enabled = enabled
@property
def excluded_nodes(self):
return self._excluded_nodes
@excluded_nodes.setter
def excluded_nodes(self, excluded_nodes):
self._excluded_nodes = excluded_nodes
@property
def id(self):
return self._id
@id.setter
def id(self, id):
if id is not None and id > 4294967295:
raise ValueError("Invalid value for `id`, must be a value less than or equal to `4294967295`")
if id is not None and id < 0:
raise ValueError("Invalid value for `id`, must be a value greater than or equal to `0`")
self._id = id
@property
def name(self):
return self._name
@name.setter
def name(self, name):
if name is not None and len(name) > 254:
raise ValueError("Invalid value for `name`, length must be less than or equal to `254`")
if name is not None and len(name) < 1:
raise ValueError("Invalid value for `name`, length must be greater than or equal to `1`")
self._name = name
@property
def parameters(self):
return self._parameters
@parameters.setter
def parameters(self, parameters):
self._parameters = parameters
@property
def system(self):
return self._system
@system.setter
def system(self, system):
self._system = system
@property
def type(self):
return self._type
@type.setter
def type(self, type):
allowed_values = ["connectemc", "smtp", "snmp", "heartbeat"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}"
.format(type, allowed_values)
)
self._type = type
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, EventChannelExtendedExtended):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
1c4a5cf471c60ef2a56210e6cc5aeab54a57bcce
| 4,028
|
py
|
Python
|
configs/top_down/resnetv1d/coco/resnetv1d152_coco_256x192.py
|
jcwon0/BlurHPE
|
c97a57e92a8a7f171b0403aee640222a32513562
|
[
"Apache-2.0"
] | null | null | null |
configs/top_down/resnetv1d/coco/resnetv1d152_coco_256x192.py
|
jcwon0/BlurHPE
|
c97a57e92a8a7f171b0403aee640222a32513562
|
[
"Apache-2.0"
] | null | null | null |
configs/top_down/resnetv1d/coco/resnetv1d152_coco_256x192.py
|
jcwon0/BlurHPE
|
c97a57e92a8a7f171b0403aee640222a32513562
|
[
"Apache-2.0"
] | null | null | null |
log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=10)
evaluation = dict(interval=10, metric='mAP', key_indicator='AP')
optimizer = dict(
type='Adam',
lr=5e-4,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
channel_cfg = dict(
num_output_channels=17,
dataset_joints=17,
dataset_channel=[
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
],
inference_channel=[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
])
# model settings
model = dict(
type='TopDown',
pretrained='mmcls://resnet152_v1d',
backbone=dict(type='ResNetV1d', depth=152),
keypoint_head=dict(
type='TopDownSimpleHead',
in_channels=2048,
out_channels=channel_cfg['num_output_channels'],
loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process='default',
shift_heatmap=True,
modulate_kernel=11))
data_cfg = dict(
image_size=[192, 256],
heatmap_size=[48, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
use_gt_bbox=False,
det_bbox_thr=0.0,
bbox_file='data/coco/person_detection_results/'
'COCO_val2017_detections_AP_H_56_person.json',
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownHalfBodyTransform',
num_joints_half_body=8,
prob_half_body=0.3),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data_root = 'data/coco'
data = dict(
samples_per_gpu=32,
workers_per_gpu=2,
train=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_train2017.json',
img_prefix=f'{data_root}/train2017/',
data_cfg=data_cfg,
pipeline=train_pipeline),
val=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_val2017.json',
img_prefix=f'{data_root}/val2017/',
data_cfg=data_cfg,
pipeline=val_pipeline),
test=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_val2017.json',
img_prefix=f'{data_root}/val2017/',
data_cfg=data_cfg,
pipeline=val_pipeline),
)
| 28.771429
| 80
| 0.606504
|
log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=10)
evaluation = dict(interval=10, metric='mAP', key_indicator='AP')
optimizer = dict(
type='Adam',
lr=5e-4,
)
optimizer_config = dict(grad_clip=None)
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
])
channel_cfg = dict(
num_output_channels=17,
dataset_joints=17,
dataset_channel=[
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
],
inference_channel=[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
])
model = dict(
type='TopDown',
pretrained='mmcls://resnet152_v1d',
backbone=dict(type='ResNetV1d', depth=152),
keypoint_head=dict(
type='TopDownSimpleHead',
in_channels=2048,
out_channels=channel_cfg['num_output_channels'],
loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process='default',
shift_heatmap=True,
modulate_kernel=11))
data_cfg = dict(
image_size=[192, 256],
heatmap_size=[48, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
use_gt_bbox=False,
det_bbox_thr=0.0,
bbox_file='data/coco/person_detection_results/'
'COCO_val2017_detections_AP_H_56_person.json',
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownHalfBodyTransform',
num_joints_half_body=8,
prob_half_body=0.3),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data_root = 'data/coco'
data = dict(
samples_per_gpu=32,
workers_per_gpu=2,
train=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_train2017.json',
img_prefix=f'{data_root}/train2017/',
data_cfg=data_cfg,
pipeline=train_pipeline),
val=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_val2017.json',
img_prefix=f'{data_root}/val2017/',
data_cfg=data_cfg,
pipeline=val_pipeline),
test=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_val2017.json',
img_prefix=f'{data_root}/val2017/',
data_cfg=data_cfg,
pipeline=val_pipeline),
)
| true
| true
|
1c4a5d624386288eaed2cfbb319df3274578f578
| 28,100
|
py
|
Python
|
paddlenlp/datasets/dataset.py
|
JunnYu/ConvBERT-Prod
|
a1351e1e7f9400cb8c71d0a15d23629b4cb055d4
|
[
"Apache-2.0"
] | 11
|
2022-01-06T07:39:47.000Z
|
2022-03-22T06:18:40.000Z
|
paddlenlp/datasets/dataset.py
|
JunnYu/ConvBERT-Prod
|
a1351e1e7f9400cb8c71d0a15d23629b4cb055d4
|
[
"Apache-2.0"
] | null | null | null |
paddlenlp/datasets/dataset.py
|
JunnYu/ConvBERT-Prod
|
a1351e1e7f9400cb8c71d0a15d23629b4cb055d4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atexit
import collections
import io
import math
import os
import warnings
import sys
import inspect
from multiprocess import Pool, RLock
import time
import paddle.distributed as dist
from paddle.io import Dataset, IterableDataset
from paddle.dataset.common import md5file
from paddle.utils.download import get_path_from_url, _get_unique_endpoints
from paddlenlp.utils.env import DATA_HOME
from typing import Iterable, Iterator, Optional, List, Any, Callable, Union
import importlib
from functools import partial
__all__ = ['MapDataset', 'DatasetBuilder', 'IterDataset', 'load_dataset']
DATASETS_MODULE_PATH = "paddlenlp.datasets."
def import_main_class(module_path):
"""
Import a module at module_path and return its DatasetBuilder class.
"""
module_path = DATASETS_MODULE_PATH + module_path
module = importlib.import_module(module_path)
main_cls_type = DatasetBuilder
# Find the main class in our imported module
module_main_cls = None
for name, obj in module.__dict__.items():
if isinstance(obj, type) and issubclass(obj, main_cls_type):
if name == 'DatasetBuilder':
continue
module_main_cls = obj
break
return module_main_cls
def load_dataset(path_or_read_func,
name=None,
data_files=None,
splits=None,
lazy=None,
**kwargs):
"""
This method will load a dataset, either form PaddleNLP library or from a
self-defined data loading script, by calling functions in `DatasetBuilder`.
For all the names of datasets in PaddleNLP library, see here: `dataset_list
<https://paddlenlp.readthedocs.io/zh/latest/data_prepare/dataset_list.html>`__.
Either `splits` or `data_files` must be specified.
Args:
path_or_read_func (str|callable): Name of the dataset processing script
in PaddleNLP library or a custom data reading function.
name (str, optional): Additional name to select a more specific dataset.
Defaults to None.
data_files (str|list|tuple|dict, optional): Defining the path of dataset
files. If None. `splits` must be specified. Defaults to None.
splits (str|list|tuple, optional): Which split of the data to load. If None.
`data_files` must be specified. Defaults to None.
lazy (bool, optional): Weather to return `MapDataset` or an `IterDataset`.
True for `IterDataset`. False for `MapDataset`. If None, return the
default type of this dataset. Defaults to None.
kwargs (dict): Other keyword arguments to be passed to the `DatasetBuilder`.
Returns:
A `MapDataset` or `IterDataset` or a tuple of those.
For how to use this function, please see `dataset_load
<https://paddlenlp.readthedocs.io/zh/latest/data_prepare/dataset_load.html>`__
and `dataset_self_defined
<https://paddlenlp.readthedocs.io/zh/latest/data_prepare/dataset_self_defined.html>`__
"""
if inspect.isfunction(path_or_read_func):
assert lazy is not None, "lazy can not be None in custom mode."
kwargs['name'] = name
kwargs['data_files'] = data_files
kwargs['splits'] = splits
custom_kwargs = {}
for name in inspect.signature(path_or_read_func).parameters.keys():
if name in kwargs.keys():
custom_kwargs[name] = kwargs[name]
reader_instance = SimpleBuilder(lazy=lazy, read_func=path_or_read_func)
return reader_instance.read(**custom_kwargs)
else:
reader_cls = import_main_class(path_or_read_func)
reader_instance = reader_cls(lazy=lazy, name=name, **kwargs)
# Check if selected name and split is valid in this DatasetBuilder
if hasattr(reader_instance, 'BUILDER_CONFIGS'):
if name in reader_cls.BUILDER_CONFIGS.keys():
split_names = reader_cls.BUILDER_CONFIGS[name]['splits'].keys()
else:
raise ValueError(
'Invalid name "{}". Should be one of {}.'.format(
name, list(reader_cls.BUILDER_CONFIGS.keys())))
elif hasattr(reader_instance, 'SPLITS'):
split_names = reader_instance.SPLITS.keys()
else:
raise AttributeError(
"Either 'SPLITS' or 'BUILDER_CONFIGS' must be implemented for DatasetBuilder."
)
selected_splits = []
if isinstance(splits, list) or isinstance(splits, tuple):
selected_splits.extend(splits)
else:
selected_splits += [splits]
for split_name in selected_splits:
if split_name not in split_names and split_name != None:
raise ValueError('Invalid split "{}". Should be one of {}.'.
format(split_name, list(split_names)))
datasets = reader_instance.read_datasets(
data_files=data_files, splits=splits)
return datasets
class MapDataset(Dataset):
"""
Wraps a map-style dataset-like object as an instance of `MapDataset`, and equips it
with `map` and other utility methods. All non-magic methods of the raw object
are also accessible.
Args:
data (list|Dataset): An object with `__getitem__` and `__len__` methods. It could
be a list or a subclass of `paddle.io.Dataset`.
kwargs (dict, optional): Other information to be passed to the dataset.
For examples of this class, please see `dataset_self_defined
<https://paddlenlp.readthedocs.io/zh/latest/data_prepare/dataset_self_defined.html>`__.
"""
def __init__(self, data, **kwargs):
self.data = data
self._transform_pipline = []
self.new_data = self.data
self.label_list = kwargs.pop('label_list', None)
self.vocab_info = kwargs.pop('vocab_info', None)
def _transform(self, data):
for fn in self._transform_pipline:
data = fn(data)
return data
def __getitem__(self, idx):
"""
Basic function of `MapDataset` to get sample from dataset with a given
index.
"""
return self._transform(self.new_data[
idx]) if self._transform_pipline else self.new_data[idx]
def __len__(self):
"""
Returns the number of samples in dataset.
"""
return len(self.new_data)
def filter(self, fn, num_workers=0):
"""
Filters samples by the filter function and uses the filtered data to
update this dataset.
Args:
fn (callable): A filter function that takes a sample as input and
returns a boolean. Samples that return False would be discarded.
num_workers(int, optional): Number of processes for multiprocessing. If
set to 0, it doesn't use multiprocessing. Defaults to `0`.
"""
assert num_workers >= 0, "num_workers should be a non-negative value"
if num_workers > 0:
pool = Pool(
num_workers, initargs=(RLock(), ), maxtasksperchild=1000)
def filter_shard(num_workers, index, fn):
self.shard(
num_shards=num_workers, index=index, contiguous=True)
self._filter(fn=fn)
return self
kwds_per_shard = [
dict(
num_workers=num_workers, index=rank, fn=fn)
for rank in range(num_workers)
]
results = [
pool.apply_async(
filter_shard, kwds=kwds) for kwds in kwds_per_shard
]
transformed_shards = [r.get() for r in results]
pool.close()
pool.join()
self.new_data = []
for i in range(num_workers):
self.new_data += transformed_shards[i].new_data
return self
else:
return self._filter(fn)
def _filter(self, fn):
self.new_data = [
self.new_data[idx] for idx in range(len(self.new_data))
if fn(self.new_data[idx])
]
return self
def shard(self, num_shards=None, index=None, contiguous=False):
"""
Split the dataset into `num_shards` pieces. Note that the size of each
shard might be different because the original dataset may not be evenly
divisible.
Args:
num_shards (int, optional): An integer representing the number of
data shards. If None, `num_shards` would be number of trainers.
Defaults to `None`.
index (int, optional): An integer representing the index of the
current shard. If None, `index` would be the current trainer rank
id. Defaults to `None`.
contiguous: (bool, optional): If true, contiguous chunks of data
will be select for sharding. And total number of examples will
be the same. Otherwise each shard will contain all examples of
dataset whose index mod `num_shards` = `index`. Defaults to `False`.
"""
if num_shards is None:
num_shards = dist.get_world_size()
if index is None:
index = dist.get_rank()
if contiguous:
div = len(self) // num_shards
mod = len(self) % num_shards
start = div * index + min(index, mod)
end = start + div + (1 if index < mod else 0)
self.new_data = self.new_data[start:end]
else:
num_samples = int(math.ceil(len(self.new_data) * 1.0 / num_shards))
self.new_data = [
self.new_data[idx] for idx in range(len(self.new_data))
if idx % num_shards == index
]
return self
def map(self, fn, lazy=True, batched=False, num_workers=0):
"""
Performs specific function on the dataset to transform and update every sample.
Args:
fn (callable): Transformations to be performed. It receives single
sample as argument if batched is False. Else it receives all examples.
lazy (bool, optional): If True, transformations would be delayed and
performed on demand. Otherwise, transforms all samples at once. Note that
if `fn` is stochastic, `lazy` should be True or you will get the same
result on all epochs. Defaults to False.
batched(bool, optional): If True, transformations would take all examples as
input and return a collection of transformed examples. Note that if set
True, `lazy` option would be ignored. Defaults to False.
num_workers(int, optional): Number of processes for multiprocessing. If
set to 0, it doesn't use multiprocessing. Note that if set to positive
value, `lazy` option would be ignored. Defaults to 0.
"""
assert num_workers >= 0, "num_workers should be a non-negative value"
if num_workers > 0:
def map_shard(num_workers, index, fn, batched):
self.shard(
num_shards=num_workers, index=index, contiguous=True)
self._map(fn=fn, lazy=False, batched=batched)
return self
kwds_per_shard = [
dict(
num_workers=num_workers,
index=rank,
fn=fn,
batched=batched) for rank in range(num_workers)
]
pool = Pool(
num_workers, initargs=(RLock(), ), maxtasksperchild=1000)
results = [
pool.apply_async(
map_shard, kwds=kwds) for kwds in kwds_per_shard
]
transformed_shards = [r.get() for r in results]
pool.close()
pool.join()
self.new_data = []
for i in range(num_workers):
self.new_data += transformed_shards[i].new_data
return self
else:
return self._map(fn, lazy=lazy, batched=batched)
def _map(self, fn, lazy=True, batched=False):
if batched:
self.new_data = fn(self.new_data)
elif lazy:
self._transform_pipline.append(fn)
else:
self.new_data = [
fn(self.new_data[idx]) for idx in range(len(self.new_data))
]
return self
class IterDataset(IterableDataset):
"""
Wraps a dataset-like object as an instance of `IterDataset`, and equips it with
`map` and other utility methods. All non-magic methods of the raw object
also accessible.
Args:
data (Iterable): An object with `__iter__` function. It can be a Iterable or a
subclass of `paddle.io.IterableDataset`.
kwargs (dict, optional): Other information to be passed to the dataset.
For examples of this class, please see `dataset_self_defined
<https://paddlenlp.readthedocs.io/zh/latest/data_prepare/dataset_self_defined.html>`__.
"""
def __init__(self, data, **kwargs):
self.data = data
self._transform_pipline = []
self._filter_pipline = []
self.label_list = kwargs.pop('label_list', None)
self.vocab_info = kwargs.pop('vocab_info', None)
def _transform(self, data):
for fn in self._transform_pipline:
data = fn(data)
return data
def _shard_filter(self, num_samples):
return True
def _filter(self, data):
for fn in self._filter_pipline:
if not fn(data):
return False
return True
def __iter__(self):
"""
yields sample sequentially.
"""
num_samples = 0
if inspect.isfunction(self.data):
for example in self.data():
if (not self._filter_pipline or
self._filter(self._filter_pipline)
) and self._shard_filter(num_samples=num_samples):
yield self._transform(
example) if self._transform_pipline else example
num_samples += 1
else:
if inspect.isgenerator(self.data):
warnings.warn(
'Reciving generator as data source, data can only be iterated once'
)
for example in self.data:
if (not self._filter_pipline or
self._filter(self._filter_pipline)
) and self._shard_filter(num_samples=num_samples):
yield self._transform(
example) if self._transform_pipline else example
num_samples += 1
def filter(self, fn):
"""
Filters samples by the filter function and uses the filtered data to
update this dataset.
Args:
fn (callable): A filter function that takes a sample as input and
returns a boolean. Samples that return False are discarded.
"""
self._filter_pipline.append(fn)
return self
def shard(self, num_shards=None, index=None):
"""
Split the dataset into `num_shards` pieces.
Args:
num_shards (int, optional): An integer representing the number of
data shards. If None, `num_shards` would be number of trainers.
Defaults to None.
index (int, optional): An integer representing the index of the
current shard. If None, `index` would be the current trainer rank
id. Defaults to None.
"""
if num_shards is None:
num_shards = dist.get_world_size()
if index is None:
index = dist.get_rank()
def sharder(num_shards, index, num_samples):
if num_samples % num_shards == index:
return True
else:
return False
fn = partial(sharder, num_shards=num_shards, index=index)
self._shard_filter = fn
return self
def map(self, fn):
"""
Performs specific function on the dataset to transform and update every sample.
Args:
fn (callable): Transformations to be performed. It receives single
sample as argument.
"""
self._transform_pipline.append(fn)
return self
class DatasetBuilder:
"""
A base class for all DatasetBuilder. It provides a `read()` function to turn
a data file into a MapDataset or IterDataset.
`_get_data()` function and `_read()` function should be implemented to download
data file and read data file into a `Iterable` of the examples.
For how to define a custom `DatasetBuilder`, please see `contribute_dataset
<https://paddlenlp.readthedocs.io/zh/latest/community/contribute_dataset.html>`__.
"""
lazy = False
def __init__(self, lazy=None, name=None, **config):
if lazy is not None:
self.lazy = lazy
self.name = name
self.config = config
def read_datasets(self, splits=None, data_files=None):
datasets = []
assert splits or data_files, "`data_files` and `splits` can not both be None."
def remove_if_exit(filepath):
if isinstance(filepath, (list, tuple)):
for file in filepath:
try:
os.remove(file)
except OSError:
pass
else:
try:
os.remove(filepath)
except OSError:
pass
if splits and data_files is None:
assert isinstance(splits, str) or (
isinstance(splits, list) and isinstance(splits[0], str)
) or (
isinstance(splits, tuple) and isinstance(splits[0], str)
), "`splits` should be a string or list of string or a tuple of string."
if isinstance(splits, str):
splits = [splits]
parallel_env = dist.ParallelEnv()
unique_endpoints = _get_unique_endpoints(
parallel_env.trainer_endpoints[:])
# move register hook to first and register togather
lock_files = []
for split in splits:
lock_file = os.path.join(DATA_HOME, self.__class__.__name__)
if self.name is not None:
lock_file = lock_file + "." + self.name
lock_file += "." + split + ".done" + "." + str(os.getppid())
lock_files.append(lock_file)
# Must register to all procs to make the lock file can be removed
# when any proc breaks. Otherwise, the single registered proc may
# not receive proper singal send by the parent proc to exit.
atexit.register(lambda: remove_if_exit(lock_files))
for split in splits:
filename = self._get_data(split)
lock_file = os.path.join(DATA_HOME, self.__class__.__name__)
if self.name is not None:
lock_file = lock_file + "." + self.name
lock_file += "." + split + ".done" + "." + str(os.getppid())
# `lock_file` indicates the finished status of`_get_data`.
# `_get_data` only works in the `unique_endpoints` specified
# proc since `get_path_from_url` only work for it. The other
# procs wait `_get_data` to be finished.
if parallel_env.current_endpoint in unique_endpoints:
f = open(lock_file, "w")
f.close()
else:
while not os.path.exists(lock_file):
time.sleep(1)
datasets.append(self.read(filename=filename, split=split))
if data_files:
assert isinstance(data_files, str) or isinstance(
data_files, tuple
) or isinstance(
data_files, list
), "`data_files` should be a string or tuple or list of strings."
if isinstance(data_files, str):
data_files = [data_files]
default_split = 'train'
if splits:
if isinstance(splits, str):
splits = [splits]
assert len(splits) == len(
data_files
), "Number of `splits` and number of `data_files` should be the same if you want to specify the split of loacl data file."
datasets += [
self.read(
filename=data_files[i], split=splits[i])
for i in range(len(data_files))
]
else:
datasets += [
self.read(
filename=data_files[i], split=default_split)
for i in range(len(data_files))
]
return datasets if len(datasets) > 1 else datasets[0]
def read(self, filename, split='train'):
"""
Returns a dataset containing all the examples that can be read from the file path.
If `self.lazy` is False, this eagerly reads all instances from `self._read()`
and returns a `MapDataset`.
If `self.lazy` is True, this returns an `IterDataset`, which internally
relies on the generator created from `self._read()` to lazily produce examples.
In this case your implementation of `_read()` must also be lazy
(that is, not load all examples into memory at once).
Args:
filename (str): Path of data file to read, usually provided by `_get_data`
function.
split (str, optional): The split name of selected dataset. This only makes
a different when data files of different splits have different structures.
Returns:
A `MapDataset|IterDataset`.
"""
label_list = self.get_labels()
vocab_info = self.get_vocab()
if self.lazy:
def generate_examples():
generator = self._read(
filename, split
) if self._read.__code__.co_argcount > 2 else self._read(
filename)
for example in generator:
# We need to check if the example contains label column and confirm its name.
# For now we only allow `label` or `labels` to be the name of label column.
if 'labels' in example.keys():
label_col = 'labels'
elif 'label' in example.keys():
label_col = 'label'
else:
label_col = None
# Convert class label to label ids.
if label_list is not None and example.get(label_col, None):
label_dict = {}
for i, label in enumerate(label_list):
label_dict[label] = i
if isinstance(example[label_col], list) or isinstance(
example[label_col], tuple):
for label_idx in range(len(example[label_col])):
example[label_col][label_idx] = label_dict[
example[label_col][label_idx]]
else:
example[label_col] = label_dict[example[label_col]]
yield example
else:
yield example
return IterDataset(
generate_examples(),
label_list=label_list,
vocab_info=vocab_info)
else:
examples = self._read(
filename,
split) if self._read.__code__.co_argcount > 2 else self._read(
filename)
# Then some validation.
if not isinstance(examples, list):
examples = list(examples)
if not examples:
raise ValueError(
"No instances were read from the given filepath {}. "
"Is the path correct?".format(filename))
# We need to check if the example contains label column and confirm its name.
# For now we only allow `label` or `labels` to be the name of label column.
if 'labels' in examples[0].keys():
label_col = 'labels'
elif 'label' in examples[0].keys():
label_col = 'label'
else:
label_col = None
# Convert class label to label ids.
if label_list is not None and examples[0].get(label_col, None):
label_dict = {}
for i, label in enumerate(label_list):
label_dict[label] = i
for idx in range(len(examples)):
if isinstance(examples[idx][label_col],
list) or isinstance(examples[idx][label_col],
tuple):
for label_idx in range(len(examples[idx][label_col])):
examples[idx][label_col][label_idx] = label_dict[
examples[idx][label_col][label_idx]]
else:
examples[idx][label_col] = label_dict[examples[idx][
label_col]]
return MapDataset(
examples, label_list=label_list, vocab_info=vocab_info)
def _read(self, filename: str, *args):
"""
Reads examples from the given file_path and returns them as an
`Iterable` (which could be a list or a generator).
This method must be implemented in self-defined `DatasetBuilder`.
"""
raise NotImplementedError
def _get_data(self, mode: str):
"""
Downloads examples from the given URL and customized split
informations and returns a filepath.
This method must be implemented in self-defined `DatasetBuilder`.
"""
raise NotImplementedError
def get_labels(self):
"""
Returns list of class labels of the dataset if specified.
"""
return None
def get_vocab(self):
"""
Returns vocab file path of the dataset if specified.
"""
return None
class SimpleBuilder(DatasetBuilder):
def __init__(self, lazy, read_func):
self._read = read_func
self.lazy = lazy
def read(self, **kwargs):
if self.lazy:
def generate_examples():
generator = self._read(**kwargs)
for example in generator:
yield example
return IterDataset(generate_examples)
else:
examples = self._read(**kwargs)
if hasattr(examples, '__len__') and hasattr(examples,
'__getitem__'):
return MapDataset(examples)
else:
return MapDataset(list(examples))
| 38.865837
| 138
| 0.574698
|
import atexit
import collections
import io
import math
import os
import warnings
import sys
import inspect
from multiprocess import Pool, RLock
import time
import paddle.distributed as dist
from paddle.io import Dataset, IterableDataset
from paddle.dataset.common import md5file
from paddle.utils.download import get_path_from_url, _get_unique_endpoints
from paddlenlp.utils.env import DATA_HOME
from typing import Iterable, Iterator, Optional, List, Any, Callable, Union
import importlib
from functools import partial
__all__ = ['MapDataset', 'DatasetBuilder', 'IterDataset', 'load_dataset']
DATASETS_MODULE_PATH = "paddlenlp.datasets."
def import_main_class(module_path):
module_path = DATASETS_MODULE_PATH + module_path
module = importlib.import_module(module_path)
main_cls_type = DatasetBuilder
module_main_cls = None
for name, obj in module.__dict__.items():
if isinstance(obj, type) and issubclass(obj, main_cls_type):
if name == 'DatasetBuilder':
continue
module_main_cls = obj
break
return module_main_cls
def load_dataset(path_or_read_func,
name=None,
data_files=None,
splits=None,
lazy=None,
**kwargs):
if inspect.isfunction(path_or_read_func):
assert lazy is not None, "lazy can not be None in custom mode."
kwargs['name'] = name
kwargs['data_files'] = data_files
kwargs['splits'] = splits
custom_kwargs = {}
for name in inspect.signature(path_or_read_func).parameters.keys():
if name in kwargs.keys():
custom_kwargs[name] = kwargs[name]
reader_instance = SimpleBuilder(lazy=lazy, read_func=path_or_read_func)
return reader_instance.read(**custom_kwargs)
else:
reader_cls = import_main_class(path_or_read_func)
reader_instance = reader_cls(lazy=lazy, name=name, **kwargs)
if hasattr(reader_instance, 'BUILDER_CONFIGS'):
if name in reader_cls.BUILDER_CONFIGS.keys():
split_names = reader_cls.BUILDER_CONFIGS[name]['splits'].keys()
else:
raise ValueError(
'Invalid name "{}". Should be one of {}.'.format(
name, list(reader_cls.BUILDER_CONFIGS.keys())))
elif hasattr(reader_instance, 'SPLITS'):
split_names = reader_instance.SPLITS.keys()
else:
raise AttributeError(
"Either 'SPLITS' or 'BUILDER_CONFIGS' must be implemented for DatasetBuilder."
)
selected_splits = []
if isinstance(splits, list) or isinstance(splits, tuple):
selected_splits.extend(splits)
else:
selected_splits += [splits]
for split_name in selected_splits:
if split_name not in split_names and split_name != None:
raise ValueError('Invalid split "{}". Should be one of {}.'.
format(split_name, list(split_names)))
datasets = reader_instance.read_datasets(
data_files=data_files, splits=splits)
return datasets
class MapDataset(Dataset):
def __init__(self, data, **kwargs):
self.data = data
self._transform_pipline = []
self.new_data = self.data
self.label_list = kwargs.pop('label_list', None)
self.vocab_info = kwargs.pop('vocab_info', None)
def _transform(self, data):
for fn in self._transform_pipline:
data = fn(data)
return data
def __getitem__(self, idx):
return self._transform(self.new_data[
idx]) if self._transform_pipline else self.new_data[idx]
def __len__(self):
return len(self.new_data)
def filter(self, fn, num_workers=0):
assert num_workers >= 0, "num_workers should be a non-negative value"
if num_workers > 0:
pool = Pool(
num_workers, initargs=(RLock(), ), maxtasksperchild=1000)
def filter_shard(num_workers, index, fn):
self.shard(
num_shards=num_workers, index=index, contiguous=True)
self._filter(fn=fn)
return self
kwds_per_shard = [
dict(
num_workers=num_workers, index=rank, fn=fn)
for rank in range(num_workers)
]
results = [
pool.apply_async(
filter_shard, kwds=kwds) for kwds in kwds_per_shard
]
transformed_shards = [r.get() for r in results]
pool.close()
pool.join()
self.new_data = []
for i in range(num_workers):
self.new_data += transformed_shards[i].new_data
return self
else:
return self._filter(fn)
def _filter(self, fn):
self.new_data = [
self.new_data[idx] for idx in range(len(self.new_data))
if fn(self.new_data[idx])
]
return self
def shard(self, num_shards=None, index=None, contiguous=False):
if num_shards is None:
num_shards = dist.get_world_size()
if index is None:
index = dist.get_rank()
if contiguous:
div = len(self) // num_shards
mod = len(self) % num_shards
start = div * index + min(index, mod)
end = start + div + (1 if index < mod else 0)
self.new_data = self.new_data[start:end]
else:
num_samples = int(math.ceil(len(self.new_data) * 1.0 / num_shards))
self.new_data = [
self.new_data[idx] for idx in range(len(self.new_data))
if idx % num_shards == index
]
return self
def map(self, fn, lazy=True, batched=False, num_workers=0):
assert num_workers >= 0, "num_workers should be a non-negative value"
if num_workers > 0:
def map_shard(num_workers, index, fn, batched):
self.shard(
num_shards=num_workers, index=index, contiguous=True)
self._map(fn=fn, lazy=False, batched=batched)
return self
kwds_per_shard = [
dict(
num_workers=num_workers,
index=rank,
fn=fn,
batched=batched) for rank in range(num_workers)
]
pool = Pool(
num_workers, initargs=(RLock(), ), maxtasksperchild=1000)
results = [
pool.apply_async(
map_shard, kwds=kwds) for kwds in kwds_per_shard
]
transformed_shards = [r.get() for r in results]
pool.close()
pool.join()
self.new_data = []
for i in range(num_workers):
self.new_data += transformed_shards[i].new_data
return self
else:
return self._map(fn, lazy=lazy, batched=batched)
def _map(self, fn, lazy=True, batched=False):
if batched:
self.new_data = fn(self.new_data)
elif lazy:
self._transform_pipline.append(fn)
else:
self.new_data = [
fn(self.new_data[idx]) for idx in range(len(self.new_data))
]
return self
class IterDataset(IterableDataset):
def __init__(self, data, **kwargs):
self.data = data
self._transform_pipline = []
self._filter_pipline = []
self.label_list = kwargs.pop('label_list', None)
self.vocab_info = kwargs.pop('vocab_info', None)
def _transform(self, data):
for fn in self._transform_pipline:
data = fn(data)
return data
def _shard_filter(self, num_samples):
return True
def _filter(self, data):
for fn in self._filter_pipline:
if not fn(data):
return False
return True
def __iter__(self):
num_samples = 0
if inspect.isfunction(self.data):
for example in self.data():
if (not self._filter_pipline or
self._filter(self._filter_pipline)
) and self._shard_filter(num_samples=num_samples):
yield self._transform(
example) if self._transform_pipline else example
num_samples += 1
else:
if inspect.isgenerator(self.data):
warnings.warn(
'Reciving generator as data source, data can only be iterated once'
)
for example in self.data:
if (not self._filter_pipline or
self._filter(self._filter_pipline)
) and self._shard_filter(num_samples=num_samples):
yield self._transform(
example) if self._transform_pipline else example
num_samples += 1
def filter(self, fn):
self._filter_pipline.append(fn)
return self
def shard(self, num_shards=None, index=None):
if num_shards is None:
num_shards = dist.get_world_size()
if index is None:
index = dist.get_rank()
def sharder(num_shards, index, num_samples):
if num_samples % num_shards == index:
return True
else:
return False
fn = partial(sharder, num_shards=num_shards, index=index)
self._shard_filter = fn
return self
def map(self, fn):
self._transform_pipline.append(fn)
return self
class DatasetBuilder:
lazy = False
def __init__(self, lazy=None, name=None, **config):
if lazy is not None:
self.lazy = lazy
self.name = name
self.config = config
def read_datasets(self, splits=None, data_files=None):
datasets = []
assert splits or data_files, "`data_files` and `splits` can not both be None."
def remove_if_exit(filepath):
if isinstance(filepath, (list, tuple)):
for file in filepath:
try:
os.remove(file)
except OSError:
pass
else:
try:
os.remove(filepath)
except OSError:
pass
if splits and data_files is None:
assert isinstance(splits, str) or (
isinstance(splits, list) and isinstance(splits[0], str)
) or (
isinstance(splits, tuple) and isinstance(splits[0], str)
), "`splits` should be a string or list of string or a tuple of string."
if isinstance(splits, str):
splits = [splits]
parallel_env = dist.ParallelEnv()
unique_endpoints = _get_unique_endpoints(
parallel_env.trainer_endpoints[:])
lock_files = []
for split in splits:
lock_file = os.path.join(DATA_HOME, self.__class__.__name__)
if self.name is not None:
lock_file = lock_file + "." + self.name
lock_file += "." + split + ".done" + "." + str(os.getppid())
lock_files.append(lock_file)
atexit.register(lambda: remove_if_exit(lock_files))
for split in splits:
filename = self._get_data(split)
lock_file = os.path.join(DATA_HOME, self.__class__.__name__)
if self.name is not None:
lock_file = lock_file + "." + self.name
lock_file += "." + split + ".done" + "." + str(os.getppid())
if parallel_env.current_endpoint in unique_endpoints:
f = open(lock_file, "w")
f.close()
else:
while not os.path.exists(lock_file):
time.sleep(1)
datasets.append(self.read(filename=filename, split=split))
if data_files:
assert isinstance(data_files, str) or isinstance(
data_files, tuple
) or isinstance(
data_files, list
), "`data_files` should be a string or tuple or list of strings."
if isinstance(data_files, str):
data_files = [data_files]
default_split = 'train'
if splits:
if isinstance(splits, str):
splits = [splits]
assert len(splits) == len(
data_files
), "Number of `splits` and number of `data_files` should be the same if you want to specify the split of loacl data file."
datasets += [
self.read(
filename=data_files[i], split=splits[i])
for i in range(len(data_files))
]
else:
datasets += [
self.read(
filename=data_files[i], split=default_split)
for i in range(len(data_files))
]
return datasets if len(datasets) > 1 else datasets[0]
def read(self, filename, split='train'):
label_list = self.get_labels()
vocab_info = self.get_vocab()
if self.lazy:
def generate_examples():
generator = self._read(
filename, split
) if self._read.__code__.co_argcount > 2 else self._read(
filename)
for example in generator:
if 'labels' in example.keys():
label_col = 'labels'
elif 'label' in example.keys():
label_col = 'label'
else:
label_col = None
if label_list is not None and example.get(label_col, None):
label_dict = {}
for i, label in enumerate(label_list):
label_dict[label] = i
if isinstance(example[label_col], list) or isinstance(
example[label_col], tuple):
for label_idx in range(len(example[label_col])):
example[label_col][label_idx] = label_dict[
example[label_col][label_idx]]
else:
example[label_col] = label_dict[example[label_col]]
yield example
else:
yield example
return IterDataset(
generate_examples(),
label_list=label_list,
vocab_info=vocab_info)
else:
examples = self._read(
filename,
split) if self._read.__code__.co_argcount > 2 else self._read(
filename)
if not isinstance(examples, list):
examples = list(examples)
if not examples:
raise ValueError(
"No instances were read from the given filepath {}. "
"Is the path correct?".format(filename))
if 'labels' in examples[0].keys():
label_col = 'labels'
elif 'label' in examples[0].keys():
label_col = 'label'
else:
label_col = None
if label_list is not None and examples[0].get(label_col, None):
label_dict = {}
for i, label in enumerate(label_list):
label_dict[label] = i
for idx in range(len(examples)):
if isinstance(examples[idx][label_col],
list) or isinstance(examples[idx][label_col],
tuple):
for label_idx in range(len(examples[idx][label_col])):
examples[idx][label_col][label_idx] = label_dict[
examples[idx][label_col][label_idx]]
else:
examples[idx][label_col] = label_dict[examples[idx][
label_col]]
return MapDataset(
examples, label_list=label_list, vocab_info=vocab_info)
def _read(self, filename: str, *args):
raise NotImplementedError
def _get_data(self, mode: str):
raise NotImplementedError
def get_labels(self):
return None
def get_vocab(self):
return None
class SimpleBuilder(DatasetBuilder):
def __init__(self, lazy, read_func):
self._read = read_func
self.lazy = lazy
def read(self, **kwargs):
if self.lazy:
def generate_examples():
generator = self._read(**kwargs)
for example in generator:
yield example
return IterDataset(generate_examples)
else:
examples = self._read(**kwargs)
if hasattr(examples, '__len__') and hasattr(examples,
'__getitem__'):
return MapDataset(examples)
else:
return MapDataset(list(examples))
| true
| true
|
1c4a5d83d36d2e8c948aeb0969a624af35d58159
| 6,949
|
py
|
Python
|
simple_python_profiler/main.py
|
jeshan/simple-python-profiler
|
a3d3a709781b5aaff38b55389c93efd132274344
|
[
"MIT"
] | null | null | null |
simple_python_profiler/main.py
|
jeshan/simple-python-profiler
|
a3d3a709781b5aaff38b55389c93efd132274344
|
[
"MIT"
] | 1
|
2021-06-02T00:57:36.000Z
|
2021-06-02T00:57:36.000Z
|
simple_python_profiler/main.py
|
jeshan/simple-python-profiler
|
a3d3a709781b5aaff38b55389c93efd132274344
|
[
"MIT"
] | null | null | null |
import functools
import inspect
import sys
from time import perf_counter_ns
from typing import List, Dict
from loguru import logger
from recursive_decorator import recursive_decorator
def fn_description(f):
return f'{f.__module__}.{f.__qualname__}'
def sort_fn(invocation):
return invocation.end - invocation.start
def log_call(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
logger.debug(f'Entering {f}')
result = f(*args, **kwargs)
logger.debug(f'Exiting {f}')
return result
return wrapper
@log_call
def sort_invocations_by_individual_time(invocations):
return sorted(invocations, key=sort_fn, reverse=True)
def duration(invocation):
return invocation['end'] - invocation['start']
@log_call
def sort_invocations_by_function_time(group):
name_speed_tuple_list = []
for fn_name, invocations in group.items():
total_per_function = sum(map(lambda x: duration(x), invocations))
name_speed_tuple_list.append((fn_name, total_per_function, len(invocations)))
return sorted(name_speed_tuple_list, key=lambda x: x[1], reverse=True)
@log_call
def group_by_function(invocations: List) -> Dict[object, List]:
result = {}
for invocation in invocations:
f = invocation['f']
if f not in result:
result[f] = []
result[f].append(invocation)
return result
def is_site_package(module):
return 'site-packages' in (module.__dict__.get('__file__') or {})
def exclude_paths(module):
return module.__dict__.get('__file__')
def exclude_importers(module):
loader = module.__dict__.get('__loader__')
loader_type = type(loader)
if hasattr(loader_type, '__name__'):
name = loader_type.__name__
elif hasattr(loader, 'name'):
name = loader.name
if loader:
qualified_name = loader_type.__module__ + '.' + name
else:
qualified_name = ''
return qualified_name.endswith('._SixMetaPathImporter')
def is_system_package(module):
from importlib._bootstrap import BuiltinImporter
loader = module.__dict__.get('__loader__')
return (
loader in [BuiltinImporter]
or (
hasattr(module, '__file__')
and f"python{sys.version_info.major}.{sys.version_info.minor}/{(module.__package__ or '').replace('.', '/')}"
in module.__file__
)
or module.__name__.startswith('typing.')
)
def get_loaded_modules():
import sys
all_modules = []
for name, module in sys.modules.items():
all_modules.append((name, module))
return all_modules
def mergeFunctionMetadata(f, g):
# this function was copied from Twisted core, https://github.com/racker/python-twisted-core
# licence notice in file ../LICENCE-Twisted-core
"""
Overwrite C{g}'s name and docstring with values from C{f}. Update
C{g}'s instance dictionary with C{f}'s.
To use this function safely you must use the return value. In Python 2.3,
L{mergeFunctionMetadata} will create a new function. In later versions of
Python, C{g} will be mutated and returned.
@return: A function that has C{g}'s behavior and metadata merged from
C{f}.
"""
try:
g.__name__ = f.__name__
except TypeError:
try:
import types
merged = types.FunctionType(
g.func_code, g.func_globals, f.__name__, inspect.getargspec(g)[-1], g.func_closure
)
except TypeError:
pass
else:
merged = g
try:
merged.__doc__ = f.__doc__
except (TypeError, AttributeError):
pass
try:
merged.__dict__.update(g.__dict__)
merged.__dict__.update(f.__dict__)
except (TypeError, AttributeError):
pass
merged.__module__ = f.__module__
return merged
def time_fn():
return perf_counter_ns()
def singleton(cls):
obj = cls()
# Always return the same object
cls.__new__ = staticmethod(lambda cls: obj)
# Disable __init__
try:
del cls.__init__
except AttributeError:
pass
return cls
@singleton
class Profiler:
def __init__(self):
logger.debug('creating instance of profiler')
self.invocations = []
def add_invocation(self, start, end, result, f):
i = {'start': start, 'end': end, 'result': result, 'f': f}
self.invocations.append(i)
def __enter__(self):
bootstrap()
logger.debug('Start recording invocations')
def __exit__(self, exc_type, exc_val, exc_tb):
logger.debug(f'stopped recording invocations, got {len(self.invocations)} of them.')
invocation_group = group_by_function(self.invocations)
by_time = sort_invocations_by_function_time(invocation_group)
by_time = limit_results(by_time)
print_results(by_time)
@recursive_decorator
def profile_recursive(f):
return profile(f)
def profile(f):
if f in [time_fn, profile]:
return f
# print('in profile', f)
@functools.wraps(f)
def wrapper(*args, **kwargs):
# print('wrapped', f)
start = time_fn()
result = f(*args, **kwargs)
end = time_fn()
Profiler().add_invocation(start, end, result, f)
return result
return wrapper
def edit_functions(items, module):
for fn_name, fn in items:
if fn == edit_functions:
continue
# print('editing', fn_name, fn)
new_item = mergeFunctionMetadata(fn, profile(fn))
setattr(module, fn.__name__, new_item)
def bootstrap():
for name, module in get_loaded_modules():
# print('loading', name)
try:
items = inspect.getmembers(module, inspect.isfunction)
except Exception as e:
# I saw this could happen when in debug mode
logger.warning(f'Failed getting members for module {module}, skipping')
logger.error(e)
continue
# if 'main' not in name:
exclude_site_package = True
exclude_system_package = True
if 'simple_python_profiler' in module.__name__:
logger.trace('Excluding the profiler itself')
continue
if exclude_site_package and is_site_package(module):
logger.trace(f'excluding site package {module}')
continue
if exclude_importers(module):
logger.trace(f'excluding importer {module}')
continue
if exclude_system_package and is_system_package(module):
logger.trace(f'excluding system module {module}')
continue
logger.debug(f'allowing module {module}')
edit_functions(items, module)
def limit_results(groups):
return groups[:100]
@log_call
def print_results(by_time):
for item in by_time:
logger.info(fn_description(item[0]) + f',invoked={item[2]} times, total={item[1] / 1_000_000}ms')
| 27.466403
| 121
| 0.647287
|
import functools
import inspect
import sys
from time import perf_counter_ns
from typing import List, Dict
from loguru import logger
from recursive_decorator import recursive_decorator
def fn_description(f):
return f'{f.__module__}.{f.__qualname__}'
def sort_fn(invocation):
return invocation.end - invocation.start
def log_call(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
logger.debug(f'Entering {f}')
result = f(*args, **kwargs)
logger.debug(f'Exiting {f}')
return result
return wrapper
@log_call
def sort_invocations_by_individual_time(invocations):
return sorted(invocations, key=sort_fn, reverse=True)
def duration(invocation):
return invocation['end'] - invocation['start']
@log_call
def sort_invocations_by_function_time(group):
name_speed_tuple_list = []
for fn_name, invocations in group.items():
total_per_function = sum(map(lambda x: duration(x), invocations))
name_speed_tuple_list.append((fn_name, total_per_function, len(invocations)))
return sorted(name_speed_tuple_list, key=lambda x: x[1], reverse=True)
@log_call
def group_by_function(invocations: List) -> Dict[object, List]:
result = {}
for invocation in invocations:
f = invocation['f']
if f not in result:
result[f] = []
result[f].append(invocation)
return result
def is_site_package(module):
return 'site-packages' in (module.__dict__.get('__file__') or {})
def exclude_paths(module):
return module.__dict__.get('__file__')
def exclude_importers(module):
loader = module.__dict__.get('__loader__')
loader_type = type(loader)
if hasattr(loader_type, '__name__'):
name = loader_type.__name__
elif hasattr(loader, 'name'):
name = loader.name
if loader:
qualified_name = loader_type.__module__ + '.' + name
else:
qualified_name = ''
return qualified_name.endswith('._SixMetaPathImporter')
def is_system_package(module):
from importlib._bootstrap import BuiltinImporter
loader = module.__dict__.get('__loader__')
return (
loader in [BuiltinImporter]
or (
hasattr(module, '__file__')
and f"python{sys.version_info.major}.{sys.version_info.minor}/{(module.__package__ or '').replace('.', '/')}"
in module.__file__
)
or module.__name__.startswith('typing.')
)
def get_loaded_modules():
import sys
all_modules = []
for name, module in sys.modules.items():
all_modules.append((name, module))
return all_modules
def mergeFunctionMetadata(f, g):
try:
g.__name__ = f.__name__
except TypeError:
try:
import types
merged = types.FunctionType(
g.func_code, g.func_globals, f.__name__, inspect.getargspec(g)[-1], g.func_closure
)
except TypeError:
pass
else:
merged = g
try:
merged.__doc__ = f.__doc__
except (TypeError, AttributeError):
pass
try:
merged.__dict__.update(g.__dict__)
merged.__dict__.update(f.__dict__)
except (TypeError, AttributeError):
pass
merged.__module__ = f.__module__
return merged
def time_fn():
return perf_counter_ns()
def singleton(cls):
obj = cls()
cls.__new__ = staticmethod(lambda cls: obj)
try:
del cls.__init__
except AttributeError:
pass
return cls
@singleton
class Profiler:
def __init__(self):
logger.debug('creating instance of profiler')
self.invocations = []
def add_invocation(self, start, end, result, f):
i = {'start': start, 'end': end, 'result': result, 'f': f}
self.invocations.append(i)
def __enter__(self):
bootstrap()
logger.debug('Start recording invocations')
def __exit__(self, exc_type, exc_val, exc_tb):
logger.debug(f'stopped recording invocations, got {len(self.invocations)} of them.')
invocation_group = group_by_function(self.invocations)
by_time = sort_invocations_by_function_time(invocation_group)
by_time = limit_results(by_time)
print_results(by_time)
@recursive_decorator
def profile_recursive(f):
return profile(f)
def profile(f):
if f in [time_fn, profile]:
return f
@functools.wraps(f)
def wrapper(*args, **kwargs):
start = time_fn()
result = f(*args, **kwargs)
end = time_fn()
Profiler().add_invocation(start, end, result, f)
return result
return wrapper
def edit_functions(items, module):
for fn_name, fn in items:
if fn == edit_functions:
continue
new_item = mergeFunctionMetadata(fn, profile(fn))
setattr(module, fn.__name__, new_item)
def bootstrap():
for name, module in get_loaded_modules():
try:
items = inspect.getmembers(module, inspect.isfunction)
except Exception as e:
logger.warning(f'Failed getting members for module {module}, skipping')
logger.error(e)
continue
exclude_site_package = True
exclude_system_package = True
if 'simple_python_profiler' in module.__name__:
logger.trace('Excluding the profiler itself')
continue
if exclude_site_package and is_site_package(module):
logger.trace(f'excluding site package {module}')
continue
if exclude_importers(module):
logger.trace(f'excluding importer {module}')
continue
if exclude_system_package and is_system_package(module):
logger.trace(f'excluding system module {module}')
continue
logger.debug(f'allowing module {module}')
edit_functions(items, module)
def limit_results(groups):
return groups[:100]
@log_call
def print_results(by_time):
for item in by_time:
logger.info(fn_description(item[0]) + f',invoked={item[2]} times, total={item[1] / 1_000_000}ms')
| true
| true
|
1c4a610aebca605ce60f6577184339a699daaaa0
| 470
|
py
|
Python
|
tests/performance/conftest.py
|
rspadim/aiocache
|
bf675ae912173bee25cc1d8c22b77f57de34375d
|
[
"BSD-3-Clause"
] | 213
|
2020-11-02T14:29:46.000Z
|
2022-03-24T23:12:32.000Z
|
tests/performance/conftest.py
|
rspadim/aiocache
|
bf675ae912173bee25cc1d8c22b77f57de34375d
|
[
"BSD-3-Clause"
] | 48
|
2020-11-02T11:17:13.000Z
|
2022-03-24T17:55:31.000Z
|
tests/performance/conftest.py
|
rspadim/aiocache
|
bf675ae912173bee25cc1d8c22b77f57de34375d
|
[
"BSD-3-Clause"
] | 49
|
2020-11-13T07:41:37.000Z
|
2022-03-25T12:24:49.000Z
|
import pytest
from aiocache import Cache
from aiocache.backends.redis import RedisBackend
@pytest.fixture
def redis_cache(event_loop):
cache = Cache(Cache.REDIS, namespace="test", pool_max_size=1)
yield cache
for _, pool in RedisBackend.pools.items():
pool.close()
event_loop.run_until_complete(pool.wait_closed())
@pytest.fixture
def memcached_cache():
cache = Cache(Cache.MEMCACHED, namespace="test", pool_size=1)
yield cache
| 22.380952
| 65
| 0.731915
|
import pytest
from aiocache import Cache
from aiocache.backends.redis import RedisBackend
@pytest.fixture
def redis_cache(event_loop):
cache = Cache(Cache.REDIS, namespace="test", pool_max_size=1)
yield cache
for _, pool in RedisBackend.pools.items():
pool.close()
event_loop.run_until_complete(pool.wait_closed())
@pytest.fixture
def memcached_cache():
cache = Cache(Cache.MEMCACHED, namespace="test", pool_size=1)
yield cache
| true
| true
|
1c4a624f74d426cc722ed45176fcf417a5aa38db
| 2,970
|
py
|
Python
|
spacy_crfsuite/tokenizer.py
|
lusterck/spacy_crfsuite
|
21acb6431b8c3c98528d6994880ca7bb3b69f499
|
[
"MIT"
] | 12
|
2020-07-29T17:08:06.000Z
|
2022-03-28T10:39:39.000Z
|
spacy_crfsuite/tokenizer.py
|
marzi-heidari/spacy_crfsuite
|
b9f31aac9e727245791197aed4245f03a57a89ba
|
[
"MIT"
] | 5
|
2020-07-29T17:08:03.000Z
|
2022-03-28T07:16:19.000Z
|
spacy_crfsuite/tokenizer.py
|
marzi-heidari/spacy_crfsuite
|
b9f31aac9e727245791197aed4245f03a57a89ba
|
[
"MIT"
] | 7
|
2020-08-06T11:08:30.000Z
|
2022-01-20T14:25:19.000Z
|
import numpy as np
import spacy
from abc import ABCMeta, abstractmethod
from typing import Text, Optional, Any, Dict, Union
class Token:
def __init__(
self,
text: Text,
start: int,
end: Optional[int] = None,
data: Optional[Dict[Text, Any]] = None,
lemma: Optional[Text] = None,
) -> None:
self.text = text
self.start = start
self.end = end if end else start + len(text)
self.data = data if data else {}
self.lemma = lemma or text
def set(self, prop: Text, info: Any) -> None:
self.data[prop] = info
def get(self, prop: Text, default: Optional[Any] = None) -> Any:
return self.data.get(prop, default)
def __eq__(self, other):
if not isinstance(other, Token):
return NotImplemented
return (self.start, self.end, self.text, self.lemma) == (
other.start,
other.end,
other.text,
other.lemma,
)
def __lt__(self, other):
if not isinstance(other, Token):
return NotImplemented
return (self.start, self.end, self.text, self.lemma) < (
other.start,
other.end,
other.text,
other.lemma,
)
class Tokenizer:
__metaclass__ = ABCMeta
@abstractmethod
def tokenize(self, message: Dict, attribute: Text = "text") -> None:
raise NotImplementedError("should be implemented by subclass")
class SpacyTokenizer(Tokenizer):
def __init__(self, nlp=None):
self.nlp = nlp or spacy.blank("en")
def tokenize(self, message: Dict, attribute: Text = "text") -> None:
doc = message[attribute]
if attribute == "text":
doc = self.nlp(doc)
tokens = [
Token(
t.text,
t.idx,
lemma=t.lemma_,
data={"pos": self._tag_of_token(t), "shape": t.shape_},
)
for t in doc
]
# Token -> Vec
for token in tokens:
vector = self.get_vector(token)
if vector is not None:
token.set("vector", vector)
# Add CLS token
idx = tokens[-1].end + 1
tokens.append(Token("__CLS__", idx))
message["tokens"] = tokens
def get_vector(self, token: Union[Text, Token]) -> Optional[np.ndarray]:
word_vec = None
if self.nlp.vocab.vectors_length > 0:
word = token.text if isinstance(token, Token) else token
word_hash = self.nlp.vocab.strings[word]
if word_hash in self.nlp.vocab.vectors:
word_vec = self.nlp.vocab.vectors[word_hash]
return word_vec
@staticmethod
def _tag_of_token(token: Any) -> Text:
import spacy
if spacy.about.__version__ > "2" and token._.has("tag"):
return token._.get("tag")
else:
return token.tag_
| 27.5
| 76
| 0.550168
|
import numpy as np
import spacy
from abc import ABCMeta, abstractmethod
from typing import Text, Optional, Any, Dict, Union
class Token:
def __init__(
self,
text: Text,
start: int,
end: Optional[int] = None,
data: Optional[Dict[Text, Any]] = None,
lemma: Optional[Text] = None,
) -> None:
self.text = text
self.start = start
self.end = end if end else start + len(text)
self.data = data if data else {}
self.lemma = lemma or text
def set(self, prop: Text, info: Any) -> None:
self.data[prop] = info
def get(self, prop: Text, default: Optional[Any] = None) -> Any:
return self.data.get(prop, default)
def __eq__(self, other):
if not isinstance(other, Token):
return NotImplemented
return (self.start, self.end, self.text, self.lemma) == (
other.start,
other.end,
other.text,
other.lemma,
)
def __lt__(self, other):
if not isinstance(other, Token):
return NotImplemented
return (self.start, self.end, self.text, self.lemma) < (
other.start,
other.end,
other.text,
other.lemma,
)
class Tokenizer:
__metaclass__ = ABCMeta
@abstractmethod
def tokenize(self, message: Dict, attribute: Text = "text") -> None:
raise NotImplementedError("should be implemented by subclass")
class SpacyTokenizer(Tokenizer):
def __init__(self, nlp=None):
self.nlp = nlp or spacy.blank("en")
def tokenize(self, message: Dict, attribute: Text = "text") -> None:
doc = message[attribute]
if attribute == "text":
doc = self.nlp(doc)
tokens = [
Token(
t.text,
t.idx,
lemma=t.lemma_,
data={"pos": self._tag_of_token(t), "shape": t.shape_},
)
for t in doc
]
for token in tokens:
vector = self.get_vector(token)
if vector is not None:
token.set("vector", vector)
idx = tokens[-1].end + 1
tokens.append(Token("__CLS__", idx))
message["tokens"] = tokens
def get_vector(self, token: Union[Text, Token]) -> Optional[np.ndarray]:
word_vec = None
if self.nlp.vocab.vectors_length > 0:
word = token.text if isinstance(token, Token) else token
word_hash = self.nlp.vocab.strings[word]
if word_hash in self.nlp.vocab.vectors:
word_vec = self.nlp.vocab.vectors[word_hash]
return word_vec
@staticmethod
def _tag_of_token(token: Any) -> Text:
import spacy
if spacy.about.__version__ > "2" and token._.has("tag"):
return token._.get("tag")
else:
return token.tag_
| true
| true
|
1c4a6340c5d62211f6a45bfb0967b64e8ecde7eb
| 1,355
|
py
|
Python
|
src/slim_train.py
|
limxuanyu127/seesawfacenet_pytorch
|
d975f12cb48b53508ca7ea42c5b0b459eb73a2bc
|
[
"MIT"
] | null | null | null |
src/slim_train.py
|
limxuanyu127/seesawfacenet_pytorch
|
d975f12cb48b53508ca7ea42c5b0b459eb73a2bc
|
[
"MIT"
] | null | null | null |
src/slim_train.py
|
limxuanyu127/seesawfacenet_pytorch
|
d975f12cb48b53508ca7ea42c5b0b459eb73a2bc
|
[
"MIT"
] | null | null | null |
from config import get_config
from slim_Learner import face_learner
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='for face verification')
parser.add_argument("-e", "--epochs", help="training epochs", default=16, type=int)
parser.add_argument("-net", "--net_mode", help="which network, [ir, ir_se, mobilefacenet,seesawFaceNet]",default='seesawFaceNet', type=str)
parser.add_argument("-depth", "--net_depth", help="how many layers [50,100,152]", default=50, type=int)
parser.add_argument('-lr','--lr',help='learning rate',default=1e-1, type=float)
parser.add_argument("-b", "--batch_size", help="batch_size", default=196, type=int)
parser.add_argument("-w", "--num_workers", help="workers number", default=8, type=int)
parser.add_argument("-d", "--data_mode", help="use which database, [vgg, ms1m, emore, concat]",default='emore', type=str)
args = parser.parse_args()
conf = get_config()
if args.net_mode == 'seesawFaceNet':
conf.seesawFaceNet = True
else:
conf.net_mode = args.net_mode
conf.net_depth = args.net_depth
conf.lr = args.lr
conf.batch_size = args.batch_size
conf.num_workers = args.num_workers
conf.data_mode = args.data_mode
learner = face_learner(conf)
learner.train(conf, args.epochs)
| 43.709677
| 143
| 0.687823
|
from config import get_config
from slim_Learner import face_learner
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='for face verification')
parser.add_argument("-e", "--epochs", help="training epochs", default=16, type=int)
parser.add_argument("-net", "--net_mode", help="which network, [ir, ir_se, mobilefacenet,seesawFaceNet]",default='seesawFaceNet', type=str)
parser.add_argument("-depth", "--net_depth", help="how many layers [50,100,152]", default=50, type=int)
parser.add_argument('-lr','--lr',help='learning rate',default=1e-1, type=float)
parser.add_argument("-b", "--batch_size", help="batch_size", default=196, type=int)
parser.add_argument("-w", "--num_workers", help="workers number", default=8, type=int)
parser.add_argument("-d", "--data_mode", help="use which database, [vgg, ms1m, emore, concat]",default='emore', type=str)
args = parser.parse_args()
conf = get_config()
if args.net_mode == 'seesawFaceNet':
conf.seesawFaceNet = True
else:
conf.net_mode = args.net_mode
conf.net_depth = args.net_depth
conf.lr = args.lr
conf.batch_size = args.batch_size
conf.num_workers = args.num_workers
conf.data_mode = args.data_mode
learner = face_learner(conf)
learner.train(conf, args.epochs)
| true
| true
|
1c4a64081f8217ebdf60f72541839ad38a6f2848
| 890
|
py
|
Python
|
release/scripts/modules/bl_keymap_utils/__init__.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 365
|
2015-02-10T15:10:55.000Z
|
2022-03-03T15:50:51.000Z
|
release/scripts/modules/bl_keymap_utils/__init__.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 45
|
2015-01-09T15:34:20.000Z
|
2021-10-05T14:44:23.000Z
|
release/scripts/modules/bl_keymap_utils/__init__.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 172
|
2015-01-25T15:16:53.000Z
|
2022-01-31T08:25:36.000Z
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
__all__ = (
"io",
"keymap_from_toolbar",
"keymap_hierarchy",
)
| 34.230769
| 74
| 0.716854
| true
| true
|
|
1c4a65bb65df12ad239c522acfad3662a7231d1a
| 15,278
|
py
|
Python
|
cortex/dataset/views.py
|
mvdoc/pycortex
|
bc8a93cac9518e3c1cd89650c703f9f3814e805b
|
[
"BSD-2-Clause"
] | 423
|
2015-01-06T02:46:46.000Z
|
2022-03-23T17:20:38.000Z
|
cortex/dataset/views.py
|
mvdoc/pycortex
|
bc8a93cac9518e3c1cd89650c703f9f3814e805b
|
[
"BSD-2-Clause"
] | 243
|
2015-01-03T02:10:03.000Z
|
2022-03-31T19:29:48.000Z
|
cortex/dataset/views.py
|
mvdoc/pycortex
|
bc8a93cac9518e3c1cd89650c703f9f3814e805b
|
[
"BSD-2-Clause"
] | 136
|
2015-03-23T20:35:59.000Z
|
2022-03-09T13:39:10.000Z
|
import json
import h5py
import numpy as np
from six import string_types
from .. import options
from .braindata import BrainData, VolumeData, VertexData
default_cmap = options.config.get("basic", "default_cmap")
def normalize(data):
if isinstance(data, tuple):
if len(data) == 3:
if data[0].dtype == np.uint8:
return VolumeRGB(data[0][...,0], data[0][...,1], data[0][...,2], *data[1:])
return Volume(*data)
elif len(data) == 2:
return Vertex(*data)
else:
raise TypeError("Invalid input for Dataview")
elif isinstance(data, Dataview):
return data
else:
raise TypeError("Invalid input for Dataview")
def _from_hdf_data(h5, name, xfmname=None, **kwargs):
"""Decodes a __hash named node from an HDF file into the
constituent Vertex or Volume object"""
dnode = h5.get("/data/%s"%name)
if dnode is None:
dnode = h5.get(name)
attrs = {k: u(v) for (k, v) in dnode.attrs.items()}
subj = attrs['subject']
#support old style xfmname saving as attribute
if xfmname is None and 'xfmname' in attrs:
xfmname = attrs['xfmname']
mask = None
if 'mask' in attrs:
if attrs['mask'].startswith("__"):
mask = h5['/subjects/%s/transforms/%s/masks/%s'%(attrs['subject'], xfmname, attrs['mask'])].value
else:
mask = attrs['mask']
#support old style RGB volumes
if dnode.dtype == np.uint8 and dnode.shape[-1] in (3, 4):
alpha = None
if dnode.shape[-1] == 4:
alpha = dnode[..., 3]
if xfmname is None:
return VertexRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj,
alpha=alpha, **kwargs)
return VolumeRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj, xfmname,
alpha=alpha, mask=mask, **kwargs)
if xfmname is None:
return Vertex(dnode, subj, **kwargs)
return Volume(dnode, subj, xfmname, mask=mask, **kwargs)
def _from_hdf_view(h5, data, xfmname=None, vmin=None, vmax=None, **kwargs):
if isinstance(data, string_types):
return _from_hdf_data(h5, data, xfmname=xfmname, vmin=vmin, vmax=vmax, **kwargs)
if len(data) == 2:
dim1 = _from_hdf_data(h5, data[0], xfmname=xfmname[0])
dim2 = _from_hdf_data(h5, data[1], xfmname=xfmname[1])
cls = Vertex2D if isinstance(dim1, Vertex) else Volume2D
return cls(dim1, dim2, vmin=vmin[0], vmin2=vmin[1],
vmax=vmax[0], vmax2=vmax[1], **kwargs)
elif len(data) == 4:
red, green, blue = [_from_hdf_data(h5, d, xfmname=xfmname) for d in data[:3]]
alpha = None
if data[3] is not None:
alpha = _from_hdf_data(h5, data[3], xfmname=xfmname)
cls = VertexRGB if isinstance(red, Vertex) else VolumeRGB
return cls(red, green, blue, alpha=alpha, **kwargs)
else:
raise ValueError("Invalid Dataview specification")
class Dataview(object):
def __init__(self, cmap=None, vmin=None, vmax=None, description="", state=None, **kwargs):
if self.__class__ == Dataview:
raise TypeError('Cannot directly instantiate Dataview objects')
self.cmap = cmap if cmap is not None else default_cmap
self.vmin = vmin
self.vmax = vmax
self.state = state
self.attrs = kwargs
if 'priority' not in self.attrs:
self.attrs['priority'] = 1
self.description = description
def copy(self, *args, **kwargs):
kwargs.update(self.attrs)
return self.__class__(*args,
cmap=self.cmap,
vmin=self.vmin,
vmax=self.vmax,
description=self.description,
state=self.state,
**kwargs)
@property
def priority(self):
return self.attrs['priority']
@priority.setter
def priority(self, value):
self.attrs['priority'] = value
def to_json(self, simple=False):
if simple:
return dict()
desc = self.description
if hasattr(desc, 'decode'):
desc = desc.decode()
sdict = dict(
state=self.state,
attrs=self.attrs.copy(),
desc=desc)
try:
sdict.update(dict(
cmap=[self.cmap],
vmin=[self.vmin if self.vmin is not None else np.percentile(np.nan_to_num(self.data), 1)],
vmax=[self.vmax if self.vmax is not None else np.percentile(np.nan_to_num(self.data), 99)]
))
except AttributeError:
pass
return sdict
@staticmethod
def from_hdf(node):
data = json.loads(u(node[0]))
desc = node[1]
try:
cmap = json.loads(u(node[2]))
except:
cmap = u(node[2])
vmin = json.loads(u(node[3]))
vmax = json.loads(u(node[4]))
state = json.loads(u(node[5]))
attrs = json.loads(u(node[6]))
try:
xfmname = json.loads(u(node[7]))
except ValueError:
xfmname = None
if not isinstance(vmin, list):
vmin = [vmin]
if not isinstance(vmax, list):
vmax = [vmax]
if not isinstance(cmap, list):
cmap = [cmap]
if len(data) == 1:
xfm = None if xfmname is None else xfmname[0]
return _from_hdf_view(node.file, data[0], xfmname=xfm, cmap=cmap[0], description=desc,
vmin=vmin[0], vmax=vmax[0], state=state, **attrs)
else:
views = [_from_hdf_view(node.file, d, xfmname=x) for d, x in zip(data, xfname)]
raise NotImplementedError
def _write_hdf(self, h5, name="data", data=None, xfmname=None):
views = h5.require_group("/views")
view = views.require_dataset(name, (8,), h5py.special_dtype(vlen=str))
view[0] = json.dumps(data)
view[1] = self.description
try:
view[2] = json.dumps([self.cmap])
view[3] = json.dumps([self.vmin])
view[4] = json.dumps([self.vmax])
except AttributeError:
#For VolumeRGB/Vertex, there is no cmap/vmin/vmax
view[2] = "null"
view[3:5] = "null"
view[5] = json.dumps(self.state)
view[6] = json.dumps(self.attrs)
view[7] = json.dumps(xfmname)
return view
@property
def raw(self):
from matplotlib import colors, cm, pyplot as plt
import glob, os
# Get colormap from matplotlib or pycortex colormaps
## -- redundant code, here and in cortex/quicklflat.py -- ##
if isinstance(self.cmap, string_types):
if not self.cmap in cm.__dict__:
# unknown colormap, test whether it's in pycortex colormaps
cmapdir = options.config.get('webgl', 'colormaps')
colormaps = glob.glob(os.path.join(cmapdir, "*.png"))
colormaps = dict(((os.path.split(c)[1][:-4],c) for c in colormaps))
if not self.cmap in colormaps:
raise Exception('Unkown color map!')
I = plt.imread(colormaps[self.cmap])
cmap = colors.ListedColormap(np.squeeze(I))
# Register colormap while we're at it
cm.register_cmap(self.cmap,cmap)
else:
cmap = cm.get_cmap(self.cmap)
elif isinstance(self.cmap, colors.Colormap):
cmap = self.cmap
# Normalize colors according to vmin, vmax
norm = colors.Normalize(self.vmin, self.vmax)
cmapper = cm.ScalarMappable(norm=norm, cmap=cmap)
color_data = cmapper.to_rgba(self.data.flatten()).reshape(self.data.shape+(4,))
# rollaxis puts the last color dimension first, to allow output of separate channels: r,g,b,a = dataset.raw
color_data = (np.clip(color_data, 0, 1) * 255).astype(np.uint8)
return np.rollaxis(color_data, -1)
class Multiview(Dataview):
def __init__(self, views, description=""):
for view in views:
if not isinstance(view, Dataview):
raise TypeError("Must be a View object!")
raise NotImplementedError
self.views = views
def uniques(self, collapse=False):
for view in self.views:
for sv in view.uniques(collapse=collapse):
yield sv
class Volume(VolumeData, Dataview):
"""
Encapsulates a 3D volume or 4D volumetric movie. Includes information on how
the volume should be colormapped for display purposes.
Parameters
----------
data : ndarray
The data. Can be 3D with shape (z,y,x), 1D with shape (v,) for masked data,
4D with shape (t,z,y,x), or 2D with shape (t,v). For masked data, if the
size of the given array matches any of the existing masks in the database,
that mask will automatically be loaded. If it does not, an error will be
raised.
subject : str
Subject identifier. Must exist in the pycortex database.
xfmname : str
Transform name. Must exist in the pycortex database.
mask : ndarray, optional
Binary 3D array with shape (z,y,x) showing which voxels are selected.
If masked data is given, the mask will automatically be loaded if it
exists in the pycortex database.
cmap : str or matplotlib colormap, optional
Colormap (or colormap name) to use. If not given defaults to matplotlib
default colormap.
vmin : float, optional
Minimum value in colormap. If not given, defaults to the 1st percentile
of the data.
vmax : float, optional
Maximum value in colormap. If not given defaults to the 99th percentile
of the data.
description : str, optional
String describing this dataset. Displayed in webgl viewer.
**kwargs
All additional arguments in kwargs are passed to the VolumeData and Dataview
"""
def __init__(self, data, subject, xfmname, mask=None,
cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Volume, self).__init__(data, subject, xfmname, mask=mask,
cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
# set vmin and vmax
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VolumeData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name,
data=[self.name],
xfmname=[self.xfmname])
return viewnode
@property
def raw(self):
r, g, b, a = super(Volume, self).raw
return VolumeRGB(r, g, b, self.subject, self.xfmname, a,
description=self.description, state=self.state,
**self.attrs)
class Vertex(VertexData, Dataview):
"""
Encapsulates a 1D vertex map or 2D vertex movie. Includes information on how
the data should be colormapped for display purposes.
Parameters
----------
data : ndarray
The data. Can be 1D with shape (v,), or 2D with shape (t,v). Here, v can
be the number of vertices in both hemispheres, or the number of vertices
in either one of the hemispheres. In that case, the data for the other
hemisphere will be filled with zeros.
subject : str
Subject identifier. Must exist in the pycortex database.
cmap : str or matplotlib colormap, optional
Colormap (or colormap name) to use. If not given defaults to matplotlib
default colormap.
vmin : float, optional
Minimum value in colormap. If not given, defaults to the 1st percentile
of the data.
vmax : float, optional
Maximum value in colormap. If not given defaults to the 99th percentile
of the data.
description : str, optional
String describing this dataset. Displayed in webgl viewer.
**kwargs
All additional arguments in kwargs are passed to the VolumeData and Dataview
"""
def __init__(self, data, subject, cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Vertex, self).__init__(data, subject, cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
# set vmin and vmax
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VertexData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name, data=[self.name])
return viewnode
@property
def raw(self):
r, g, b, a = super(Vertex, self).raw
return VertexRGB(r, g, b, self.subject, a,
description=self.description, state=self.state,
**self.attrs)
def map(self, target_subj, surface_type='fiducial',
hemi='both', fs_subj=None, **kwargs):
"""Map this data from this surface to another surface
Calls `cortex.freesurfer.vertex_to_vertex()` with this
vertex object as the first argument.
NOTE: Requires either previous computation of mapping matrices
(with `cortex.db.get_mri_surf2surf_matrix`) or active
freesurfer environment.
Parameters
----------
target_subj : str
freesurfer subject to which to map
Other Parameters
----------------
kwargs map to `cortex.freesurfer.vertex_to_vertex()`
"""
# Input check
if hemi not in ['lh', 'rh', 'both']:
raise ValueError("`hemi` kwarg must be 'lh', 'rh', or 'both'")
# lazy load
from ..database import db
mats = db.get_mri_surf2surf_matrix(self.subject, surface_type,
hemi='both', target_subj=target_subj, fs_subj=fs_subj,
**kwargs)
new_data = [mats[0].dot(self.left), mats[1].dot(self.right)]
if hemi == 'both':
new_data = np.hstack(new_data)
elif hemi == 'lh':
new_data = np.hstack([new_data[0], np.nan * np.zeros(new_data[1].shape)])
elif hemi == 'rh':
new_data = np.hstack([np.nan * np.zeros(new_data[0].shape), new_data[1]])
vx = Vertex(new_data, target_subj, vmin=self.vmin, vmax=self.vmax, cmap=self.cmap)
return vx
def u(s, encoding='utf8'):
try:
return s.decode(encoding)
except AttributeError:
return s
from .viewRGB import VolumeRGB, VertexRGB, Colors
from .view2D import Volume2D, Vertex2D
| 39.074169
| 115
| 0.582668
|
import json
import h5py
import numpy as np
from six import string_types
from .. import options
from .braindata import BrainData, VolumeData, VertexData
default_cmap = options.config.get("basic", "default_cmap")
def normalize(data):
if isinstance(data, tuple):
if len(data) == 3:
if data[0].dtype == np.uint8:
return VolumeRGB(data[0][...,0], data[0][...,1], data[0][...,2], *data[1:])
return Volume(*data)
elif len(data) == 2:
return Vertex(*data)
else:
raise TypeError("Invalid input for Dataview")
elif isinstance(data, Dataview):
return data
else:
raise TypeError("Invalid input for Dataview")
def _from_hdf_data(h5, name, xfmname=None, **kwargs):
dnode = h5.get("/data/%s"%name)
if dnode is None:
dnode = h5.get(name)
attrs = {k: u(v) for (k, v) in dnode.attrs.items()}
subj = attrs['subject']
if xfmname is None and 'xfmname' in attrs:
xfmname = attrs['xfmname']
mask = None
if 'mask' in attrs:
if attrs['mask'].startswith("__"):
mask = h5['/subjects/%s/transforms/%s/masks/%s'%(attrs['subject'], xfmname, attrs['mask'])].value
else:
mask = attrs['mask']
if dnode.dtype == np.uint8 and dnode.shape[-1] in (3, 4):
alpha = None
if dnode.shape[-1] == 4:
alpha = dnode[..., 3]
if xfmname is None:
return VertexRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj,
alpha=alpha, **kwargs)
return VolumeRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj, xfmname,
alpha=alpha, mask=mask, **kwargs)
if xfmname is None:
return Vertex(dnode, subj, **kwargs)
return Volume(dnode, subj, xfmname, mask=mask, **kwargs)
def _from_hdf_view(h5, data, xfmname=None, vmin=None, vmax=None, **kwargs):
if isinstance(data, string_types):
return _from_hdf_data(h5, data, xfmname=xfmname, vmin=vmin, vmax=vmax, **kwargs)
if len(data) == 2:
dim1 = _from_hdf_data(h5, data[0], xfmname=xfmname[0])
dim2 = _from_hdf_data(h5, data[1], xfmname=xfmname[1])
cls = Vertex2D if isinstance(dim1, Vertex) else Volume2D
return cls(dim1, dim2, vmin=vmin[0], vmin2=vmin[1],
vmax=vmax[0], vmax2=vmax[1], **kwargs)
elif len(data) == 4:
red, green, blue = [_from_hdf_data(h5, d, xfmname=xfmname) for d in data[:3]]
alpha = None
if data[3] is not None:
alpha = _from_hdf_data(h5, data[3], xfmname=xfmname)
cls = VertexRGB if isinstance(red, Vertex) else VolumeRGB
return cls(red, green, blue, alpha=alpha, **kwargs)
else:
raise ValueError("Invalid Dataview specification")
class Dataview(object):
def __init__(self, cmap=None, vmin=None, vmax=None, description="", state=None, **kwargs):
if self.__class__ == Dataview:
raise TypeError('Cannot directly instantiate Dataview objects')
self.cmap = cmap if cmap is not None else default_cmap
self.vmin = vmin
self.vmax = vmax
self.state = state
self.attrs = kwargs
if 'priority' not in self.attrs:
self.attrs['priority'] = 1
self.description = description
def copy(self, *args, **kwargs):
kwargs.update(self.attrs)
return self.__class__(*args,
cmap=self.cmap,
vmin=self.vmin,
vmax=self.vmax,
description=self.description,
state=self.state,
**kwargs)
@property
def priority(self):
return self.attrs['priority']
@priority.setter
def priority(self, value):
self.attrs['priority'] = value
def to_json(self, simple=False):
if simple:
return dict()
desc = self.description
if hasattr(desc, 'decode'):
desc = desc.decode()
sdict = dict(
state=self.state,
attrs=self.attrs.copy(),
desc=desc)
try:
sdict.update(dict(
cmap=[self.cmap],
vmin=[self.vmin if self.vmin is not None else np.percentile(np.nan_to_num(self.data), 1)],
vmax=[self.vmax if self.vmax is not None else np.percentile(np.nan_to_num(self.data), 99)]
))
except AttributeError:
pass
return sdict
@staticmethod
def from_hdf(node):
data = json.loads(u(node[0]))
desc = node[1]
try:
cmap = json.loads(u(node[2]))
except:
cmap = u(node[2])
vmin = json.loads(u(node[3]))
vmax = json.loads(u(node[4]))
state = json.loads(u(node[5]))
attrs = json.loads(u(node[6]))
try:
xfmname = json.loads(u(node[7]))
except ValueError:
xfmname = None
if not isinstance(vmin, list):
vmin = [vmin]
if not isinstance(vmax, list):
vmax = [vmax]
if not isinstance(cmap, list):
cmap = [cmap]
if len(data) == 1:
xfm = None if xfmname is None else xfmname[0]
return _from_hdf_view(node.file, data[0], xfmname=xfm, cmap=cmap[0], description=desc,
vmin=vmin[0], vmax=vmax[0], state=state, **attrs)
else:
views = [_from_hdf_view(node.file, d, xfmname=x) for d, x in zip(data, xfname)]
raise NotImplementedError
def _write_hdf(self, h5, name="data", data=None, xfmname=None):
views = h5.require_group("/views")
view = views.require_dataset(name, (8,), h5py.special_dtype(vlen=str))
view[0] = json.dumps(data)
view[1] = self.description
try:
view[2] = json.dumps([self.cmap])
view[3] = json.dumps([self.vmin])
view[4] = json.dumps([self.vmax])
except AttributeError:
view[2] = "null"
view[3:5] = "null"
view[5] = json.dumps(self.state)
view[6] = json.dumps(self.attrs)
view[7] = json.dumps(xfmname)
return view
@property
def raw(self):
from matplotlib import colors, cm, pyplot as plt
import glob, os
f not self.cmap in cm.__dict__:
cmapdir = options.config.get('webgl', 'colormaps')
colormaps = glob.glob(os.path.join(cmapdir, "*.png"))
colormaps = dict(((os.path.split(c)[1][:-4],c) for c in colormaps))
if not self.cmap in colormaps:
raise Exception('Unkown color map!')
I = plt.imread(colormaps[self.cmap])
cmap = colors.ListedColormap(np.squeeze(I))
# Register colormap while we're at it
cm.register_cmap(self.cmap,cmap)
else:
cmap = cm.get_cmap(self.cmap)
elif isinstance(self.cmap, colors.Colormap):
cmap = self.cmap
norm = colors.Normalize(self.vmin, self.vmax)
cmapper = cm.ScalarMappable(norm=norm, cmap=cmap)
color_data = cmapper.to_rgba(self.data.flatten()).reshape(self.data.shape+(4,))
color_data = (np.clip(color_data, 0, 1) * 255).astype(np.uint8)
return np.rollaxis(color_data, -1)
class Multiview(Dataview):
def __init__(self, views, description=""):
for view in views:
if not isinstance(view, Dataview):
raise TypeError("Must be a View object!")
raise NotImplementedError
self.views = views
def uniques(self, collapse=False):
for view in self.views:
for sv in view.uniques(collapse=collapse):
yield sv
class Volume(VolumeData, Dataview):
def __init__(self, data, subject, xfmname, mask=None,
cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Volume, self).__init__(data, subject, xfmname, mask=mask,
cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VolumeData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name,
data=[self.name],
xfmname=[self.xfmname])
return viewnode
@property
def raw(self):
r, g, b, a = super(Volume, self).raw
return VolumeRGB(r, g, b, self.subject, self.xfmname, a,
description=self.description, state=self.state,
**self.attrs)
class Vertex(VertexData, Dataview):
def __init__(self, data, subject, cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Vertex, self).__init__(data, subject, cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VertexData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name, data=[self.name])
return viewnode
@property
def raw(self):
r, g, b, a = super(Vertex, self).raw
return VertexRGB(r, g, b, self.subject, a,
description=self.description, state=self.state,
**self.attrs)
def map(self, target_subj, surface_type='fiducial',
hemi='both', fs_subj=None, **kwargs):
if hemi not in ['lh', 'rh', 'both']:
raise ValueError("`hemi` kwarg must be 'lh', 'rh', or 'both'")
from ..database import db
mats = db.get_mri_surf2surf_matrix(self.subject, surface_type,
hemi='both', target_subj=target_subj, fs_subj=fs_subj,
**kwargs)
new_data = [mats[0].dot(self.left), mats[1].dot(self.right)]
if hemi == 'both':
new_data = np.hstack(new_data)
elif hemi == 'lh':
new_data = np.hstack([new_data[0], np.nan * np.zeros(new_data[1].shape)])
elif hemi == 'rh':
new_data = np.hstack([np.nan * np.zeros(new_data[0].shape), new_data[1]])
vx = Vertex(new_data, target_subj, vmin=self.vmin, vmax=self.vmax, cmap=self.cmap)
return vx
def u(s, encoding='utf8'):
try:
return s.decode(encoding)
except AttributeError:
return s
from .viewRGB import VolumeRGB, VertexRGB, Colors
from .view2D import Volume2D, Vertex2D
| true
| true
|
1c4a676f27ed3a73245add251cc9aa1a30f369df
| 374
|
py
|
Python
|
_ctfs/wargamesmy-19/babypwn/xpl.py
|
daniellimws/daniellimws.github.io
|
464548e058ca423548cbe95c4ee38f856f9185c2
|
[
"MIT"
] | 1
|
2019-01-31T16:50:12.000Z
|
2019-01-31T16:50:12.000Z
|
_ctfs/wargamesmy-19/babypwn/xpl.py
|
daniellimws/daniellimws.github.io
|
464548e058ca423548cbe95c4ee38f856f9185c2
|
[
"MIT"
] | null | null | null |
_ctfs/wargamesmy-19/babypwn/xpl.py
|
daniellimws/daniellimws.github.io
|
464548e058ca423548cbe95c4ee38f856f9185c2
|
[
"MIT"
] | 3
|
2019-03-04T12:46:18.000Z
|
2021-05-18T16:10:44.000Z
|
from pwn import *
# r = process("./babypwn")
r = remote("45.76.161.20", 19509)
leak = u32(r.recv(4))
libc_base = leak - 0xd80 - 0x1d8000
log.info("Leaked: " + hex(libc_base))
sh = libc_base + 0x3d0d5
log.info("Shell: " + hex(sh))
sh = -(2**32 - sh) if sh > 0x7fffffff else sh
# pause()
r.sendline(str(sh))
# pause()
r.interactive()
# wgmy{b20208102bc4242bb10197edec8f3bb9}
| 23.375
| 45
| 0.660428
|
from pwn import *
r = remote("45.76.161.20", 19509)
leak = u32(r.recv(4))
libc_base = leak - 0xd80 - 0x1d8000
log.info("Leaked: " + hex(libc_base))
sh = libc_base + 0x3d0d5
log.info("Shell: " + hex(sh))
sh = -(2**32 - sh) if sh > 0x7fffffff else sh
r.sendline(str(sh))
r.interactive()
| true
| true
|
1c4a6843d62ed680a7b0c55c9e80f25fddaa013a
| 47
|
py
|
Python
|
Taekwon/Python/baseGrammar/codeup040.py
|
sonnysorry/codingtest
|
478e0168e3209eb97b6b16910027bf12ccc3ccd0
|
[
"MIT"
] | 2
|
2021-09-27T19:10:36.000Z
|
2021-11-09T05:40:39.000Z
|
Taekwon/Python/baseGrammar/codeup040.py
|
sonnysorry/codingtest
|
478e0168e3209eb97b6b16910027bf12ccc3ccd0
|
[
"MIT"
] | 1
|
2021-11-15T14:56:54.000Z
|
2021-11-15T14:56:54.000Z
|
Taekwon/Python/baseGrammar/codeup040.py
|
sonnysorry/codingtest
|
478e0168e3209eb97b6b16910027bf12ccc3ccd0
|
[
"MIT"
] | null | null | null |
a, b = input().split()
print(int(a) // int(b))
| 15.666667
| 23
| 0.531915
|
a, b = input().split()
print(int(a) // int(b))
| true
| true
|
1c4a6896ce6f6954c7f299bc77e2b3cfe6d96de7
| 2,983
|
py
|
Python
|
huaweicloud-sdk-iotda/huaweicloudsdkiotda/v5/model/untag_device_response.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-iotda/huaweicloudsdkiotda/v5/model/untag_device_response.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-iotda/huaweicloudsdkiotda/v5/model/untag_device_response.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UntagDeviceResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'body': 'str'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
"""UntagDeviceResponse - a model defined in huaweicloud sdk"""
super(UntagDeviceResponse, self).__init__()
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
"""Gets the body of this UntagDeviceResponse.
:return: The body of this UntagDeviceResponse.
:rtype: str
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this UntagDeviceResponse.
:param body: The body of this UntagDeviceResponse.
:type: str
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UntagDeviceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.39823
| 79
| 0.546095
|
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UntagDeviceResponse(SdkResponse):
sensitive_list = []
openapi_types = {
'body': 'str'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
super(UntagDeviceResponse, self).__init__()
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
return self._body
@body.setter
def body(self, body):
self._body = body
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, UntagDeviceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
1c4a68ab2bfe23ff9552b1c0fd938fc479058e3b
| 3,557
|
py
|
Python
|
blocks/find_delta/pyspark/lib.py
|
datayoga-io/datayoga
|
d88b23d21d49d60d14b3c72d54e11e5034f6c1f3
|
[
"Apache-2.0"
] | 16
|
2022-01-24T07:45:16.000Z
|
2022-03-07T08:27:13.000Z
|
blocks/find_delta/pyspark/lib.py
|
datayoga-io/datayoga
|
d88b23d21d49d60d14b3c72d54e11e5034f6c1f3
|
[
"Apache-2.0"
] | null | null | null |
blocks/find_delta/pyspark/lib.py
|
datayoga-io/datayoga
|
d88b23d21d49d60d14b3c72d54e11e5034f6c1f3
|
[
"Apache-2.0"
] | 2
|
2022-01-19T07:46:16.000Z
|
2022-01-24T12:25:09.000Z
|
import pyspark.sql
import pyspark.sql.functions as F
import pyspark.sql.types as T
import logging
import collections
from typing import List
import functools
import operator
logger = logging.getLogger("dy_runner")
Delta = collections.namedtuple('Delta', 'insert update delete exclude')
def find_delta(
df_existing,
df_incoming,
business_keys: List[str],
columns: List[str],
include_deletes: bool = False,
exclusion_condition: str = ""
):
join_condition = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"existing.`{colname}`") for colname in business_keys
]
)
# check if we need to include deletes. these are more costly since need to scan entire existing set
join_type = "leftouter"
if include_deletes:
join_type = "fullouter"
_all_rows = df_incoming.alias("incoming").join(df_existing.alias("existing"), join_condition, join_type)
#
# inserts - ones that didn't match existing business keys
#
df_insert = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"existing.`{colname}`").isNull() for colname in business_keys
]
)
).select("incoming.*")
#
# deletes
#
if include_deletes:
# deletes are ones that didn't match incoming business keys
df_delete = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`").isNull() for colname in business_keys
]
)
).select("existing.*")
else:
df_delete = None
#
# updates - ones that matched all business keys
#
if exclusion_condition and exclusion_condition != "":
# if we received an exclusion condition, apply it here.
# this is used to exclude rows altogether to save another join with the source
_all_rows = _all_rows.filter(~F.expr(exclusion_condition))
df_exclude = _all_rows.filter(F.expr(exclusion_condition))
else:
df_exclude = None
df_update = _all_rows.filter(join_condition)
# check if there was any change vs existing. take only specified columns or all shared columns if not explicitly specified
if columns and columns != []:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(
map(str.lower, df_existing.columns)).intersection(map(str.lower, columns)))
else:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(map(str.lower, df_existing.columns)))
# fetch the IDs, then join again to get the full record that matched
original_columns = list(set(map(str.lower, shared_cols+business_keys)))
df_update_ids = df_update.select(
[F.col(f"incoming.{colname}") for colname in original_columns]
).exceptAll(df_update.select(
[F.col(f"existing.{colname}") for colname in original_columns]
))
join_condition_ids = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"update_ids.`{colname}`") for colname in business_keys
]
)
# we join again to get the complete rows of the incoming records that have been updated
df_update = _all_rows.join(df_update_ids.alias("update_ids"), join_condition_ids, "inner").select("incoming.*")
return Delta(
insert=df_insert,
update=df_update,
delete=df_delete,
exclude=df_exclude
)
| 33.556604
| 126
| 0.648299
|
import pyspark.sql
import pyspark.sql.functions as F
import pyspark.sql.types as T
import logging
import collections
from typing import List
import functools
import operator
logger = logging.getLogger("dy_runner")
Delta = collections.namedtuple('Delta', 'insert update delete exclude')
def find_delta(
df_existing,
df_incoming,
business_keys: List[str],
columns: List[str],
include_deletes: bool = False,
exclusion_condition: str = ""
):
join_condition = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"existing.`{colname}`") for colname in business_keys
]
)
join_type = "leftouter"
if include_deletes:
join_type = "fullouter"
_all_rows = df_incoming.alias("incoming").join(df_existing.alias("existing"), join_condition, join_type)
#
df_insert = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"existing.`{colname}`").isNull() for colname in business_keys
]
)
).select("incoming.*")
#
# deletes
#
if include_deletes:
# deletes are ones that didn't match incoming business keys
df_delete = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`").isNull() for colname in business_keys
]
)
).select("existing.*")
else:
df_delete = None
if exclusion_condition and exclusion_condition != "":
_all_rows = _all_rows.filter(~F.expr(exclusion_condition))
df_exclude = _all_rows.filter(F.expr(exclusion_condition))
else:
df_exclude = None
df_update = _all_rows.filter(join_condition)
if columns and columns != []:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(
map(str.lower, df_existing.columns)).intersection(map(str.lower, columns)))
else:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(map(str.lower, df_existing.columns)))
original_columns = list(set(map(str.lower, shared_cols+business_keys)))
df_update_ids = df_update.select(
[F.col(f"incoming.{colname}") for colname in original_columns]
).exceptAll(df_update.select(
[F.col(f"existing.{colname}") for colname in original_columns]
))
join_condition_ids = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"update_ids.`{colname}`") for colname in business_keys
]
)
df_update = _all_rows.join(df_update_ids.alias("update_ids"), join_condition_ids, "inner").select("incoming.*")
return Delta(
insert=df_insert,
update=df_update,
delete=df_delete,
exclude=df_exclude
)
| true
| true
|
1c4a68f1c5b6a23e89d1c5fd877e88f0987b2ccf
| 2,069
|
py
|
Python
|
common/utils.py
|
Spearis666/SARA
|
99f4d70053faaf15c89bdb5a6ef6b624853b1f9f
|
[
"MIT"
] | null | null | null |
common/utils.py
|
Spearis666/SARA
|
99f4d70053faaf15c89bdb5a6ef6b624853b1f9f
|
[
"MIT"
] | null | null | null |
common/utils.py
|
Spearis666/SARA
|
99f4d70053faaf15c89bdb5a6ef6b624853b1f9f
|
[
"MIT"
] | null | null | null |
import os, sys
# Check if folder exist, and try to create it if not
def checkFolder(path):
if os.path.exists(path):
return True
else:
try:
os.makedirs(path)
return True
except OSError:
return False
# List files in directory with specific extensions
def getFilesIn(directory, extensions, subdir=False):
filePaths = []
for root, directories, files in os.walk(directory):
if not subdir:
del(directories[:])
for filename in files:
for extension in extensions:
if filename.endswith(extension):
filepath = os.path.join(root, filename)
filePaths.append(filepath)
break
return filePaths
# Get script directory
def getScriptPath():
return os.path.dirname(os.path.realpath(sys.argv[0]))
# Return size of one or multiples files
def getSize(filesList):
totalSizeBytes = 0
for filePath in filesList:
totalSizeBytes += os.path.getsize(filePath)
return totalSizeBytes
# Make path relative (need to be relative for archiver retain
# directory structure, may be useful when it's a bunch of files/folder,
# like blueray/dvd)
def makeRelativePath(rootPath, filesList):
for i, _ in enumerate(filesList):
filesList[i] = filesList[i].replace(rootPath + "/", "")
return filesList
# The million dollar question ? YES or NO ? :p
def queryYesNo(question, default="no"):
valid = {"yes": True, "y": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
print(question + prompt)
choice = input("> ").lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
| 26.87013
| 71
| 0.61189
|
import os, sys
def checkFolder(path):
if os.path.exists(path):
return True
else:
try:
os.makedirs(path)
return True
except OSError:
return False
def getFilesIn(directory, extensions, subdir=False):
filePaths = []
for root, directories, files in os.walk(directory):
if not subdir:
del(directories[:])
for filename in files:
for extension in extensions:
if filename.endswith(extension):
filepath = os.path.join(root, filename)
filePaths.append(filepath)
break
return filePaths
def getScriptPath():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def getSize(filesList):
totalSizeBytes = 0
for filePath in filesList:
totalSizeBytes += os.path.getsize(filePath)
return totalSizeBytes
# like blueray/dvd)
def makeRelativePath(rootPath, filesList):
for i, _ in enumerate(filesList):
filesList[i] = filesList[i].replace(rootPath + "/", "")
return filesList
# The million dollar question ? YES or NO ? :p
def queryYesNo(question, default="no"):
valid = {"yes": True, "y": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
print(question + prompt)
choice = input("> ").lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
| true
| true
|
1c4a6920c0679fbb93d994ae2d9852889fe7d1aa
| 2,118
|
py
|
Python
|
tests/test_read_ltxt.py
|
l-johnston/toolbag
|
1bd6ca61bfaf5856e5de320926d5593291e39e9c
|
[
"MIT"
] | null | null | null |
tests/test_read_ltxt.py
|
l-johnston/toolbag
|
1bd6ca61bfaf5856e5de320926d5593291e39e9c
|
[
"MIT"
] | null | null | null |
tests/test_read_ltxt.py
|
l-johnston/toolbag
|
1bd6ca61bfaf5856e5de320926d5593291e39e9c
|
[
"MIT"
] | null | null | null |
"""Test read_ltxt"""
from tempfile import TemporaryFile
from unyt import unyt_array
from unyt.testing import allclose_units
from toolbag import read_ltxt
# pylint: disable=missing-function-docstring
# pylint: disable=invalid-name
def test_time():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write("time\tV(out)\n0.0e+0\t1.0e+0\n1.0e+0\t2.0e+0")
file.seek(0)
data = read_ltxt(file)
assert data.header == "time\tV(out)"
assert data.legends == ["time", "V(out)"]
expected = unyt_array([0.0, 1.0], "s")
assert allclose_units(data.time, expected)
assert allclose_units(data["time"], expected)
assert allclose_units(data[0], expected)
expected = unyt_array([1.0, 2.0], "V")
assert allclose_units(data.V_out, expected)
assert allclose_units(data["V(out)"], expected)
assert allclose_units(data[1], expected)
def test_frequency_dBdeg():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t(0.0e+0dB,0.0e+0°)
1.0e+0\t(-3.0e+0dB,-9.0e+1°)"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, -3.0], "dB")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, -90.0], "degree")
assert allclose_units(data.V_out[1], expected)
def test_frequency_reim():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t0.0e+0,0.0e+0
1.0e+0\t2.0e+0,3.0e+0"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, 2.0], "V")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, 3.0], "V")
assert allclose_units(data.V_out[1], expected)
| 34.721311
| 66
| 0.626062
|
from tempfile import TemporaryFile
from unyt import unyt_array
from unyt.testing import allclose_units
from toolbag import read_ltxt
def test_time():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write("time\tV(out)\n0.0e+0\t1.0e+0\n1.0e+0\t2.0e+0")
file.seek(0)
data = read_ltxt(file)
assert data.header == "time\tV(out)"
assert data.legends == ["time", "V(out)"]
expected = unyt_array([0.0, 1.0], "s")
assert allclose_units(data.time, expected)
assert allclose_units(data["time"], expected)
assert allclose_units(data[0], expected)
expected = unyt_array([1.0, 2.0], "V")
assert allclose_units(data.V_out, expected)
assert allclose_units(data["V(out)"], expected)
assert allclose_units(data[1], expected)
def test_frequency_dBdeg():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t(0.0e+0dB,0.0e+0°)
1.0e+0\t(-3.0e+0dB,-9.0e+1°)"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, -3.0], "dB")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, -90.0], "degree")
assert allclose_units(data.V_out[1], expected)
def test_frequency_reim():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t0.0e+0,0.0e+0
1.0e+0\t2.0e+0,3.0e+0"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, 2.0], "V")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, 3.0], "V")
assert allclose_units(data.V_out[1], expected)
| true
| true
|
1c4a6a7fe13f1d334ea7b780e192d677da6eed5f
| 570
|
py
|
Python
|
textflow/model/__init__.py
|
ysenarath/textflow
|
ebb86cbedaf6ba7ed62a9f811a7d7d1818d938ac
|
[
"MIT"
] | 4
|
2020-12-10T19:38:15.000Z
|
2021-08-02T02:00:46.000Z
|
textflow/model/__init__.py
|
ysenarath/textflow
|
ebb86cbedaf6ba7ed62a9f811a7d7d1818d938ac
|
[
"MIT"
] | 2
|
2021-01-08T18:35:04.000Z
|
2021-02-07T04:25:56.000Z
|
textflow/model/__init__.py
|
ysenarath/textflow
|
ebb86cbedaf6ba7ed62a9f811a7d7d1818d938ac
|
[
"MIT"
] | 1
|
2021-04-04T19:21:40.000Z
|
2021-04-04T19:21:40.000Z
|
""" Model """
from textflow.model.annotation import AnnotationSet, Annotation, AnnotationSpan
from textflow.model.dataset import Dataset, datasets
from textflow.model.document import Document
from textflow.model.label import Label
from textflow.model.estimator import estimators
from textflow.model.project import Project
from textflow.model.user import Assignment, User
__all__ = [
'Annotation',
'AnnotationSet',
'AnnotationSpan',
'Document',
'Project',
'Label',
'Assignment',
'User',
'Dataset',
'datasets',
'estimators',
]
| 24.782609
| 79
| 0.72807
|
from textflow.model.annotation import AnnotationSet, Annotation, AnnotationSpan
from textflow.model.dataset import Dataset, datasets
from textflow.model.document import Document
from textflow.model.label import Label
from textflow.model.estimator import estimators
from textflow.model.project import Project
from textflow.model.user import Assignment, User
__all__ = [
'Annotation',
'AnnotationSet',
'AnnotationSpan',
'Document',
'Project',
'Label',
'Assignment',
'User',
'Dataset',
'datasets',
'estimators',
]
| true
| true
|
1c4a6aa6a00384fc6746c01b52b7972d18f38e00
| 679
|
py
|
Python
|
src/leetcode/Coding_Interviews/number_of_1.py
|
highing666/leaving
|
c121ee2f61e45472bb71e2770d0697e902279a64
|
[
"MIT"
] | null | null | null |
src/leetcode/Coding_Interviews/number_of_1.py
|
highing666/leaving
|
c121ee2f61e45472bb71e2770d0697e902279a64
|
[
"MIT"
] | null | null | null |
src/leetcode/Coding_Interviews/number_of_1.py
|
highing666/leaving
|
c121ee2f61e45472bb71e2770d0697e902279a64
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class Solution:
def countDigitOne(self, n: int) -> int:
digit, result = 1, 0
high, cur, low = n // 10, n % 10, 0
while high != 0 or cur != 0:
if cur == 0:
result += high * digit
elif cur == 1:
result += high * digit + low + 1
else:
result += (high + 1) * digit
low += cur * digit
cur = high % 10
high //= 10
digit *= 10
return result
if __name__ == "__main__":
solution = Solution()
test_num = 12344
print(solution.countDigitOne(test_num))
| 23.413793
| 48
| 0.427099
|
class Solution:
def countDigitOne(self, n: int) -> int:
digit, result = 1, 0
high, cur, low = n // 10, n % 10, 0
while high != 0 or cur != 0:
if cur == 0:
result += high * digit
elif cur == 1:
result += high * digit + low + 1
else:
result += (high + 1) * digit
low += cur * digit
cur = high % 10
high //= 10
digit *= 10
return result
if __name__ == "__main__":
solution = Solution()
test_num = 12344
print(solution.countDigitOne(test_num))
| true
| true
|
1c4a6b229d3ab0ade8a6a29420ac282ec48469e8
| 2,616
|
py
|
Python
|
analytics/urls.py
|
fearless0307/zulip
|
378d14af7ea73a9a83c7245706cd918bec5a37bf
|
[
"Apache-2.0"
] | 2
|
2019-04-24T15:22:52.000Z
|
2020-01-18T11:01:31.000Z
|
analytics/urls.py
|
fearless0307/zulip
|
378d14af7ea73a9a83c7245706cd918bec5a37bf
|
[
"Apache-2.0"
] | 10
|
2019-02-26T11:10:42.000Z
|
2019-02-26T14:30:24.000Z
|
analytics/urls.py
|
fearless0307/zulip
|
378d14af7ea73a9a83c7245706cd918bec5a37bf
|
[
"Apache-2.0"
] | 1
|
2020-01-07T15:49:54.000Z
|
2020-01-07T15:49:54.000Z
|
from django.conf.urls import include, url
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
# Server admin (user_profile.is_staff) visible stats pages
url(r'^activity$', analytics.views.get_activity,
name='analytics.views.get_activity'),
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
url(r'^stats/installation$', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$',
analytics.views.stats_for_remote_installation,
name='analytics.views.stats_for_remote_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$',
analytics.views.stats_for_remote_realm,
name='analytics.views.stats_for_remote_realm'),
# User-visible stats page
url(r'^stats$', analytics.views.stats,
name='analytics.views.stats'),
]
# These endpoints are a part of the API (V1), which uses:
# * REST verbs
# * Basic auth (username:password is email:apiKey)
# * Takes and returns json-formatted data
#
# See rest_dispatch in zerver.lib.rest for an explanation of auth methods used
#
# All of these paths are accessed by either a /json or /api prefix
v1_api_and_json_patterns = [
# get data for the graphs at /stats
url(r'^analytics/chart_data$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
url(r'^analytics/chart_data/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$',
rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
]
i18n_urlpatterns += [
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns
| 43.6
| 102
| 0.706804
|
from django.conf.urls import include, url
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
url(r'^activity$', analytics.views.get_activity,
name='analytics.views.get_activity'),
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
url(r'^stats/installation$', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$',
analytics.views.stats_for_remote_installation,
name='analytics.views.stats_for_remote_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$',
analytics.views.stats_for_remote_realm,
name='analytics.views.stats_for_remote_realm'),
url(r'^stats$', analytics.views.stats,
name='analytics.views.stats'),
]
v1_api_and_json_patterns = [
url(r'^analytics/chart_data$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
url(r'^analytics/chart_data/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$',
rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
]
i18n_urlpatterns += [
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns
| true
| true
|
1c4a6b3c9898b1cd500e1c9204c8490401f0ca8f
| 18,701
|
py
|
Python
|
python_modules/dagster-graphql/dagster_graphql/schema/roots/query.py
|
ibelikov/dagster
|
6781eaadd33ecfb0b48d7c2c7d8e193efbda4209
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster-graphql/dagster_graphql/schema/roots/query.py
|
ibelikov/dagster
|
6781eaadd33ecfb0b48d7c2c7d8e193efbda4209
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster-graphql/dagster_graphql/schema/roots/query.py
|
ibelikov/dagster
|
6781eaadd33ecfb0b48d7c2c7d8e193efbda4209
|
[
"Apache-2.0"
] | 1
|
2021-11-25T11:06:39.000Z
|
2021-11-25T11:06:39.000Z
|
import graphene
from dagster import check
from dagster.core.definitions.events import AssetKey
from dagster.core.execution.backfill import BulkActionStatus
from dagster.core.host_representation import (
InstigationSelector,
RepositorySelector,
ScheduleSelector,
SensorSelector,
)
from dagster.core.scheduler.instigation import InstigatorType
from ...implementation.external import fetch_repositories, fetch_repository, fetch_workspace
from ...implementation.fetch_assets import get_asset, get_asset_node, get_asset_nodes, get_assets
from ...implementation.fetch_backfills import get_backfill, get_backfills
from ...implementation.fetch_jobs import get_job_state_or_error, get_unloadable_job_states_or_error
from ...implementation.fetch_partition_sets import get_partition_set, get_partition_sets_or_error
from ...implementation.fetch_pipelines import (
get_pipeline_or_error,
get_pipeline_snapshot_or_error_from_pipeline_selector,
get_pipeline_snapshot_or_error_from_snapshot_id,
)
from ...implementation.fetch_runs import (
get_execution_plan,
get_run_by_id,
get_run_group,
get_run_groups,
get_run_tags,
validate_pipeline_config,
)
from ...implementation.fetch_schedules import (
get_schedule_or_error,
get_scheduler_or_error,
get_schedules_or_error,
)
from ...implementation.fetch_sensors import get_sensor_or_error, get_sensors_or_error
from ...implementation.fetch_solids import get_graph_or_error
from ...implementation.loader import BatchMaterializationLoader
from ...implementation.run_config_schema import resolve_run_config_schema_or_error
from ...implementation.utils import graph_selector_from_graphql, pipeline_selector_from_graphql
from ..asset_graph import GrapheneAssetNode, GrapheneAssetNodeOrError
from ..backfill import (
GrapheneBulkActionStatus,
GraphenePartitionBackfillOrError,
GraphenePartitionBackfillsOrError,
)
from ..external import (
GrapheneRepositoriesOrError,
GrapheneRepositoryOrError,
GrapheneWorkspaceOrError,
)
from ..inputs import (
GrapheneAssetKeyInput,
GrapheneGraphSelector,
GrapheneInstigationSelector,
GraphenePipelineSelector,
GrapheneRepositorySelector,
GrapheneRunsFilter,
GrapheneScheduleSelector,
GrapheneSensorSelector,
)
from ..instance import GrapheneInstance
from ..instigation import (
GrapheneInstigationStateOrError,
GrapheneInstigationStatesOrError,
GrapheneInstigationType,
)
from ..partition_sets import GraphenePartitionSetOrError, GraphenePartitionSetsOrError
from ..permissions import GraphenePermission
from ..pipelines.config_result import GraphenePipelineConfigValidationResult
from ..pipelines.pipeline import GrapheneRunOrError
from ..pipelines.snapshot import GraphenePipelineSnapshotOrError
from ..run_config import GrapheneRunConfigSchemaOrError
from ..runs import (
GrapheneRunConfigData,
GrapheneRunGroupOrError,
GrapheneRunGroupsOrError,
GrapheneRuns,
GrapheneRunsOrError,
parse_run_config_input,
)
from ..schedules import GrapheneScheduleOrError, GrapheneSchedulerOrError, GrapheneSchedulesOrError
from ..sensors import GrapheneSensorOrError, GrapheneSensorsOrError
from ..tags import GraphenePipelineTagAndValues
from ..util import non_null_list
from .assets import GrapheneAssetOrError, GrapheneAssetsOrError
from .execution_plan import GrapheneExecutionPlanOrError
from .pipeline import GrapheneGraphOrError, GraphenePipelineOrError
class GrapheneDagitQuery(graphene.ObjectType):
class Meta:
name = "DagitQuery"
version = graphene.NonNull(graphene.String)
repositoriesOrError = graphene.NonNull(GrapheneRepositoriesOrError)
repositoryOrError = graphene.Field(
graphene.NonNull(GrapheneRepositoryOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
workspaceOrError = graphene.NonNull(GrapheneWorkspaceOrError)
pipelineOrError = graphene.Field(
graphene.NonNull(GraphenePipelineOrError), params=graphene.NonNull(GraphenePipelineSelector)
)
pipelineSnapshotOrError = graphene.Field(
graphene.NonNull(GraphenePipelineSnapshotOrError),
snapshotId=graphene.String(),
activePipelineSelector=graphene.Argument(GraphenePipelineSelector),
)
graphOrError = graphene.Field(
graphene.NonNull(GrapheneGraphOrError),
selector=graphene.Argument(GrapheneGraphSelector),
)
scheduler = graphene.Field(graphene.NonNull(GrapheneSchedulerOrError))
scheduleOrError = graphene.Field(
graphene.NonNull(GrapheneScheduleOrError),
schedule_selector=graphene.NonNull(GrapheneScheduleSelector),
)
schedulesOrError = graphene.Field(
graphene.NonNull(GrapheneSchedulesOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
sensorOrError = graphene.Field(
graphene.NonNull(GrapheneSensorOrError),
sensorSelector=graphene.NonNull(GrapheneSensorSelector),
)
sensorsOrError = graphene.Field(
graphene.NonNull(GrapheneSensorsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
instigationStateOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStateOrError),
instigationSelector=graphene.NonNull(GrapheneInstigationSelector),
)
unloadableInstigationStatesOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStatesOrError),
instigationType=graphene.Argument(GrapheneInstigationType),
)
partitionSetsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
pipelineName=graphene.NonNull(graphene.String),
)
partitionSetOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
partitionSetName=graphene.String(),
)
pipelineRunsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
pipelineRunOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
runsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
runOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
pipelineRunTags = non_null_list(GraphenePipelineTagAndValues)
runGroupOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupOrError), runId=graphene.NonNull(graphene.ID)
)
runGroupsOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
isPipelineConfigValid = graphene.Field(
graphene.NonNull(GraphenePipelineConfigValidationResult),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
executionPlanOrError = graphene.Field(
graphene.NonNull(GrapheneExecutionPlanOrError),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
runConfigSchemaOrError = graphene.Field(
graphene.NonNull(GrapheneRunConfigSchemaOrError),
args={
"selector": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"mode": graphene.Argument(graphene.String),
},
description="""Fetch an environment schema given an execution selection and a mode.
See the descripton on RunConfigSchema for more information.""",
)
instance = graphene.NonNull(GrapheneInstance)
assetsOrError = graphene.Field(
graphene.NonNull(GrapheneAssetsOrError),
prefix=graphene.List(graphene.NonNull(graphene.String)),
cursor=graphene.String(),
limit=graphene.Int(),
)
assetOrError = graphene.Field(
graphene.NonNull(GrapheneAssetOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
assetNodes = graphene.Field(
non_null_list(GrapheneAssetNode),
pipeline=graphene.Argument(GraphenePipelineSelector),
assetKeys=graphene.Argument(graphene.List(graphene.NonNull(GrapheneAssetKeyInput))),
loadMaterializations=graphene.Boolean(default_value=False),
)
assetNodeOrError = graphene.Field(
graphene.NonNull(GrapheneAssetNodeOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
partitionBackfillOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillOrError),
backfillId=graphene.Argument(graphene.NonNull(graphene.String)),
)
partitionBackfillsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillsOrError),
status=graphene.Argument(GrapheneBulkActionStatus),
cursor=graphene.String(),
limit=graphene.Int(),
)
permissions = graphene.Field(non_null_list(GraphenePermission))
def resolve_repositoriesOrError(self, graphene_info):
return fetch_repositories(graphene_info)
def resolve_repositoryOrError(self, graphene_info, **kwargs):
return fetch_repository(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_workspaceOrError(self, graphene_info):
return fetch_workspace(graphene_info.context)
def resolve_pipelineSnapshotOrError(self, graphene_info, **kwargs):
snapshot_id_arg = kwargs.get("snapshotId")
pipeline_selector_arg = kwargs.get("activePipelineSelector")
check.invariant(
not (snapshot_id_arg and pipeline_selector_arg),
"Must only pass one of snapshotId or activePipelineSelector",
)
check.invariant(
snapshot_id_arg or pipeline_selector_arg,
"Must set one of snapshotId or activePipelineSelector",
)
if pipeline_selector_arg:
pipeline_selector = pipeline_selector_from_graphql(kwargs["activePipelineSelector"])
return get_pipeline_snapshot_or_error_from_pipeline_selector(
graphene_info, pipeline_selector
)
else:
return get_pipeline_snapshot_or_error_from_snapshot_id(graphene_info, snapshot_id_arg)
def resolve_graphOrError(self, graphene_info, **kwargs):
graph_selector = graph_selector_from_graphql(kwargs["selector"])
return get_graph_or_error(graphene_info, graph_selector)
def resolve_version(self, graphene_info):
return graphene_info.context.version
def resolve_scheduler(self, graphene_info):
return get_scheduler_or_error(graphene_info)
def resolve_scheduleOrError(self, graphene_info, schedule_selector):
return get_schedule_or_error(
graphene_info, ScheduleSelector.from_graphql_input(schedule_selector)
)
def resolve_schedulesOrError(self, graphene_info, **kwargs):
return get_schedules_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_sensorOrError(self, graphene_info, sensorSelector):
return get_sensor_or_error(graphene_info, SensorSelector.from_graphql_input(sensorSelector))
def resolve_sensorsOrError(self, graphene_info, **kwargs):
return get_sensors_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_instigationStateOrError(self, graphene_info, instigationSelector):
return get_job_state_or_error(
graphene_info, InstigationSelector.from_graphql_input(instigationSelector)
)
def resolve_unloadableInstigationStatesOrError(self, graphene_info, **kwargs):
job_type = (
InstigatorType(kwargs["instigationType"]) if "instigationType" in kwargs else None
)
return get_unloadable_job_states_or_error(graphene_info, job_type)
def resolve_pipelineOrError(self, graphene_info, **kwargs):
return get_pipeline_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["params"]),
)
def resolve_pipelineRunsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_pipelineRunOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_runOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runGroupsOrError(self, graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRunGroupsOrError(
results=get_run_groups(
graphene_info, filters, kwargs.get("cursor"), kwargs.get("limit")
)
)
def resolve_partitionSetsOrError(self, graphene_info, **kwargs):
return get_partition_sets_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("pipelineName"),
)
def resolve_partitionSetOrError(self, graphene_info, **kwargs):
return get_partition_set(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("partitionSetName"),
)
def resolve_pipelineRunTags(self, graphene_info):
return get_run_tags(graphene_info)
def resolve_runGroupOrError(self, graphene_info, runId):
return get_run_group(graphene_info, runId)
def resolve_isPipelineConfigValid(self, graphene_info, pipeline, **kwargs):
return validate_pipeline_config(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_executionPlanOrError(self, graphene_info, pipeline, **kwargs):
return get_execution_plan(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_runConfigSchemaOrError(self, graphene_info, **kwargs):
return resolve_run_config_schema_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["selector"]),
kwargs.get("mode"),
)
def resolve_instance(self, graphene_info):
return GrapheneInstance(graphene_info.context.instance)
def resolve_assetNodes(self, graphene_info, **kwargs):
asset_keys = set(
AssetKey.from_graphql_input(asset_key) for asset_key in kwargs.get("assetKeys", [])
)
if "pipeline" in kwargs:
pipeline_name = kwargs.get("pipeline").get("pipelineName")
repo_sel = RepositorySelector.from_graphql_input(kwargs.get("pipeline"))
repo_loc = graphene_info.context.get_repository_location(repo_sel.location_name)
repo = repo_loc.get_repository(repo_sel.repository_name)
external_asset_nodes = repo.get_external_asset_nodes(pipeline_name)
results = (
[GrapheneAssetNode(repo, asset_node) for asset_node in external_asset_nodes]
if external_asset_nodes
else []
)
else:
results = get_asset_nodes(graphene_info)
# Filter down to requested asset keys
results = [node for node in results if not asset_keys or node.assetKey in asset_keys]
if not results:
return []
materialization_loader = BatchMaterializationLoader(
instance=graphene_info.context.instance, asset_keys=[node.assetKey for node in results]
)
return [
GrapheneAssetNode(
node.get_external_repository(),
node.get_external_asset_node(),
materialization_loader=materialization_loader,
)
for node in results
]
def resolve_assetNodeOrError(self, graphene_info, **kwargs):
return get_asset_node(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_assetsOrError(self, graphene_info, **kwargs):
return get_assets(
graphene_info,
prefix=kwargs.get("prefix"),
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_assetOrError(self, graphene_info, **kwargs):
return get_asset(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_partitionBackfillOrError(self, graphene_info, backfillId):
return get_backfill(graphene_info, backfillId)
def resolve_partitionBackfillsOrError(self, graphene_info, **kwargs):
status = kwargs.get("status")
return get_backfills(
graphene_info,
status=BulkActionStatus.from_graphql_input(status) if status else None,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_permissions(self, graphene_info):
permissions = graphene_info.context.permissions
return [GraphenePermission(permission, value) for permission, value in permissions.items()]
| 38.087576
| 100
| 0.71825
|
import graphene
from dagster import check
from dagster.core.definitions.events import AssetKey
from dagster.core.execution.backfill import BulkActionStatus
from dagster.core.host_representation import (
InstigationSelector,
RepositorySelector,
ScheduleSelector,
SensorSelector,
)
from dagster.core.scheduler.instigation import InstigatorType
from ...implementation.external import fetch_repositories, fetch_repository, fetch_workspace
from ...implementation.fetch_assets import get_asset, get_asset_node, get_asset_nodes, get_assets
from ...implementation.fetch_backfills import get_backfill, get_backfills
from ...implementation.fetch_jobs import get_job_state_or_error, get_unloadable_job_states_or_error
from ...implementation.fetch_partition_sets import get_partition_set, get_partition_sets_or_error
from ...implementation.fetch_pipelines import (
get_pipeline_or_error,
get_pipeline_snapshot_or_error_from_pipeline_selector,
get_pipeline_snapshot_or_error_from_snapshot_id,
)
from ...implementation.fetch_runs import (
get_execution_plan,
get_run_by_id,
get_run_group,
get_run_groups,
get_run_tags,
validate_pipeline_config,
)
from ...implementation.fetch_schedules import (
get_schedule_or_error,
get_scheduler_or_error,
get_schedules_or_error,
)
from ...implementation.fetch_sensors import get_sensor_or_error, get_sensors_or_error
from ...implementation.fetch_solids import get_graph_or_error
from ...implementation.loader import BatchMaterializationLoader
from ...implementation.run_config_schema import resolve_run_config_schema_or_error
from ...implementation.utils import graph_selector_from_graphql, pipeline_selector_from_graphql
from ..asset_graph import GrapheneAssetNode, GrapheneAssetNodeOrError
from ..backfill import (
GrapheneBulkActionStatus,
GraphenePartitionBackfillOrError,
GraphenePartitionBackfillsOrError,
)
from ..external import (
GrapheneRepositoriesOrError,
GrapheneRepositoryOrError,
GrapheneWorkspaceOrError,
)
from ..inputs import (
GrapheneAssetKeyInput,
GrapheneGraphSelector,
GrapheneInstigationSelector,
GraphenePipelineSelector,
GrapheneRepositorySelector,
GrapheneRunsFilter,
GrapheneScheduleSelector,
GrapheneSensorSelector,
)
from ..instance import GrapheneInstance
from ..instigation import (
GrapheneInstigationStateOrError,
GrapheneInstigationStatesOrError,
GrapheneInstigationType,
)
from ..partition_sets import GraphenePartitionSetOrError, GraphenePartitionSetsOrError
from ..permissions import GraphenePermission
from ..pipelines.config_result import GraphenePipelineConfigValidationResult
from ..pipelines.pipeline import GrapheneRunOrError
from ..pipelines.snapshot import GraphenePipelineSnapshotOrError
from ..run_config import GrapheneRunConfigSchemaOrError
from ..runs import (
GrapheneRunConfigData,
GrapheneRunGroupOrError,
GrapheneRunGroupsOrError,
GrapheneRuns,
GrapheneRunsOrError,
parse_run_config_input,
)
from ..schedules import GrapheneScheduleOrError, GrapheneSchedulerOrError, GrapheneSchedulesOrError
from ..sensors import GrapheneSensorOrError, GrapheneSensorsOrError
from ..tags import GraphenePipelineTagAndValues
from ..util import non_null_list
from .assets import GrapheneAssetOrError, GrapheneAssetsOrError
from .execution_plan import GrapheneExecutionPlanOrError
from .pipeline import GrapheneGraphOrError, GraphenePipelineOrError
class GrapheneDagitQuery(graphene.ObjectType):
class Meta:
name = "DagitQuery"
version = graphene.NonNull(graphene.String)
repositoriesOrError = graphene.NonNull(GrapheneRepositoriesOrError)
repositoryOrError = graphene.Field(
graphene.NonNull(GrapheneRepositoryOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
workspaceOrError = graphene.NonNull(GrapheneWorkspaceOrError)
pipelineOrError = graphene.Field(
graphene.NonNull(GraphenePipelineOrError), params=graphene.NonNull(GraphenePipelineSelector)
)
pipelineSnapshotOrError = graphene.Field(
graphene.NonNull(GraphenePipelineSnapshotOrError),
snapshotId=graphene.String(),
activePipelineSelector=graphene.Argument(GraphenePipelineSelector),
)
graphOrError = graphene.Field(
graphene.NonNull(GrapheneGraphOrError),
selector=graphene.Argument(GrapheneGraphSelector),
)
scheduler = graphene.Field(graphene.NonNull(GrapheneSchedulerOrError))
scheduleOrError = graphene.Field(
graphene.NonNull(GrapheneScheduleOrError),
schedule_selector=graphene.NonNull(GrapheneScheduleSelector),
)
schedulesOrError = graphene.Field(
graphene.NonNull(GrapheneSchedulesOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
sensorOrError = graphene.Field(
graphene.NonNull(GrapheneSensorOrError),
sensorSelector=graphene.NonNull(GrapheneSensorSelector),
)
sensorsOrError = graphene.Field(
graphene.NonNull(GrapheneSensorsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
instigationStateOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStateOrError),
instigationSelector=graphene.NonNull(GrapheneInstigationSelector),
)
unloadableInstigationStatesOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStatesOrError),
instigationType=graphene.Argument(GrapheneInstigationType),
)
partitionSetsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
pipelineName=graphene.NonNull(graphene.String),
)
partitionSetOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
partitionSetName=graphene.String(),
)
pipelineRunsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
pipelineRunOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
runsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
runOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
pipelineRunTags = non_null_list(GraphenePipelineTagAndValues)
runGroupOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupOrError), runId=graphene.NonNull(graphene.ID)
)
runGroupsOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
isPipelineConfigValid = graphene.Field(
graphene.NonNull(GraphenePipelineConfigValidationResult),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
executionPlanOrError = graphene.Field(
graphene.NonNull(GrapheneExecutionPlanOrError),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
runConfigSchemaOrError = graphene.Field(
graphene.NonNull(GrapheneRunConfigSchemaOrError),
args={
"selector": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"mode": graphene.Argument(graphene.String),
},
description="""Fetch an environment schema given an execution selection and a mode.
See the descripton on RunConfigSchema for more information.""",
)
instance = graphene.NonNull(GrapheneInstance)
assetsOrError = graphene.Field(
graphene.NonNull(GrapheneAssetsOrError),
prefix=graphene.List(graphene.NonNull(graphene.String)),
cursor=graphene.String(),
limit=graphene.Int(),
)
assetOrError = graphene.Field(
graphene.NonNull(GrapheneAssetOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
assetNodes = graphene.Field(
non_null_list(GrapheneAssetNode),
pipeline=graphene.Argument(GraphenePipelineSelector),
assetKeys=graphene.Argument(graphene.List(graphene.NonNull(GrapheneAssetKeyInput))),
loadMaterializations=graphene.Boolean(default_value=False),
)
assetNodeOrError = graphene.Field(
graphene.NonNull(GrapheneAssetNodeOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
partitionBackfillOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillOrError),
backfillId=graphene.Argument(graphene.NonNull(graphene.String)),
)
partitionBackfillsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillsOrError),
status=graphene.Argument(GrapheneBulkActionStatus),
cursor=graphene.String(),
limit=graphene.Int(),
)
permissions = graphene.Field(non_null_list(GraphenePermission))
def resolve_repositoriesOrError(self, graphene_info):
return fetch_repositories(graphene_info)
def resolve_repositoryOrError(self, graphene_info, **kwargs):
return fetch_repository(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_workspaceOrError(self, graphene_info):
return fetch_workspace(graphene_info.context)
def resolve_pipelineSnapshotOrError(self, graphene_info, **kwargs):
snapshot_id_arg = kwargs.get("snapshotId")
pipeline_selector_arg = kwargs.get("activePipelineSelector")
check.invariant(
not (snapshot_id_arg and pipeline_selector_arg),
"Must only pass one of snapshotId or activePipelineSelector",
)
check.invariant(
snapshot_id_arg or pipeline_selector_arg,
"Must set one of snapshotId or activePipelineSelector",
)
if pipeline_selector_arg:
pipeline_selector = pipeline_selector_from_graphql(kwargs["activePipelineSelector"])
return get_pipeline_snapshot_or_error_from_pipeline_selector(
graphene_info, pipeline_selector
)
else:
return get_pipeline_snapshot_or_error_from_snapshot_id(graphene_info, snapshot_id_arg)
def resolve_graphOrError(self, graphene_info, **kwargs):
graph_selector = graph_selector_from_graphql(kwargs["selector"])
return get_graph_or_error(graphene_info, graph_selector)
def resolve_version(self, graphene_info):
return graphene_info.context.version
def resolve_scheduler(self, graphene_info):
return get_scheduler_or_error(graphene_info)
def resolve_scheduleOrError(self, graphene_info, schedule_selector):
return get_schedule_or_error(
graphene_info, ScheduleSelector.from_graphql_input(schedule_selector)
)
def resolve_schedulesOrError(self, graphene_info, **kwargs):
return get_schedules_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_sensorOrError(self, graphene_info, sensorSelector):
return get_sensor_or_error(graphene_info, SensorSelector.from_graphql_input(sensorSelector))
def resolve_sensorsOrError(self, graphene_info, **kwargs):
return get_sensors_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_instigationStateOrError(self, graphene_info, instigationSelector):
return get_job_state_or_error(
graphene_info, InstigationSelector.from_graphql_input(instigationSelector)
)
def resolve_unloadableInstigationStatesOrError(self, graphene_info, **kwargs):
job_type = (
InstigatorType(kwargs["instigationType"]) if "instigationType" in kwargs else None
)
return get_unloadable_job_states_or_error(graphene_info, job_type)
def resolve_pipelineOrError(self, graphene_info, **kwargs):
return get_pipeline_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["params"]),
)
def resolve_pipelineRunsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_pipelineRunOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_runOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runGroupsOrError(self, graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRunGroupsOrError(
results=get_run_groups(
graphene_info, filters, kwargs.get("cursor"), kwargs.get("limit")
)
)
def resolve_partitionSetsOrError(self, graphene_info, **kwargs):
return get_partition_sets_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("pipelineName"),
)
def resolve_partitionSetOrError(self, graphene_info, **kwargs):
return get_partition_set(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("partitionSetName"),
)
def resolve_pipelineRunTags(self, graphene_info):
return get_run_tags(graphene_info)
def resolve_runGroupOrError(self, graphene_info, runId):
return get_run_group(graphene_info, runId)
def resolve_isPipelineConfigValid(self, graphene_info, pipeline, **kwargs):
return validate_pipeline_config(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_executionPlanOrError(self, graphene_info, pipeline, **kwargs):
return get_execution_plan(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_runConfigSchemaOrError(self, graphene_info, **kwargs):
return resolve_run_config_schema_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["selector"]),
kwargs.get("mode"),
)
def resolve_instance(self, graphene_info):
return GrapheneInstance(graphene_info.context.instance)
def resolve_assetNodes(self, graphene_info, **kwargs):
asset_keys = set(
AssetKey.from_graphql_input(asset_key) for asset_key in kwargs.get("assetKeys", [])
)
if "pipeline" in kwargs:
pipeline_name = kwargs.get("pipeline").get("pipelineName")
repo_sel = RepositorySelector.from_graphql_input(kwargs.get("pipeline"))
repo_loc = graphene_info.context.get_repository_location(repo_sel.location_name)
repo = repo_loc.get_repository(repo_sel.repository_name)
external_asset_nodes = repo.get_external_asset_nodes(pipeline_name)
results = (
[GrapheneAssetNode(repo, asset_node) for asset_node in external_asset_nodes]
if external_asset_nodes
else []
)
else:
results = get_asset_nodes(graphene_info)
results = [node for node in results if not asset_keys or node.assetKey in asset_keys]
if not results:
return []
materialization_loader = BatchMaterializationLoader(
instance=graphene_info.context.instance, asset_keys=[node.assetKey for node in results]
)
return [
GrapheneAssetNode(
node.get_external_repository(),
node.get_external_asset_node(),
materialization_loader=materialization_loader,
)
for node in results
]
def resolve_assetNodeOrError(self, graphene_info, **kwargs):
return get_asset_node(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_assetsOrError(self, graphene_info, **kwargs):
return get_assets(
graphene_info,
prefix=kwargs.get("prefix"),
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_assetOrError(self, graphene_info, **kwargs):
return get_asset(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_partitionBackfillOrError(self, graphene_info, backfillId):
return get_backfill(graphene_info, backfillId)
def resolve_partitionBackfillsOrError(self, graphene_info, **kwargs):
status = kwargs.get("status")
return get_backfills(
graphene_info,
status=BulkActionStatus.from_graphql_input(status) if status else None,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_permissions(self, graphene_info):
permissions = graphene_info.context.permissions
return [GraphenePermission(permission, value) for permission, value in permissions.items()]
| true
| true
|
1c4a6b7f9a2cd7b57e29cb5f2d14e1917bfc20b7
| 3,448
|
py
|
Python
|
tutorial/settings.py
|
cyndi088/tutorial
|
4a1e373554b827fce1719fe49e1e49412eaa7af5
|
[
"MIT"
] | null | null | null |
tutorial/settings.py
|
cyndi088/tutorial
|
4a1e373554b827fce1719fe49e1e49412eaa7af5
|
[
"MIT"
] | 2
|
2020-01-09T07:58:53.000Z
|
2020-02-12T14:57:46.000Z
|
tutorial/settings.py
|
cyndi088/tutorial
|
4a1e373554b827fce1719fe49e1e49412eaa7af5
|
[
"MIT"
] | null | null | null |
"""
Django settings for tutorial project.
Generated by 'django-admin startproject' using Django 2.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_DIR, 'tutorial'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '5nrnq$kj)nxh9@u-v#n83(9d4@x1r-vk%+%_5)!6$1smwy2-y&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'tutorial.snippets',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# LOGIN_REDIRECT_URL = '/admin' # 登录后重定向页面
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
# LANGUAGE_CODE = 'zh-Hans'
# TIME_ZONE = 'UTC'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# Pagination
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10
}
| 25.540741
| 91
| 0.697506
|
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_DIR, 'tutorial'))
SECRET_KEY = '5nrnq$kj)nxh9@u-v#n83(9d4@x1r-vk%+%_5)!6$1smwy2-y&'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'tutorial.snippets',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# LOGIN_REDIRECT_URL = '/admin' # 登录后重定向页面
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
# LANGUAGE_CODE = 'zh-Hans'
# TIME_ZONE = 'UTC'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# Pagination
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10
}
| true
| true
|
1c4a6d92ab4a6582c87d304e6fe65c32e55535a7
| 17,478
|
py
|
Python
|
google/cloud/iap/v1/iap-v1-py/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/cloud/iap/v1/iap-v1-py/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/cloud/iap/v1/iap-v1-py/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.iap_v1.types import service
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from .base import IdentityAwareProxyAdminServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import IdentityAwareProxyAdminServiceGrpcTransport
class IdentityAwareProxyAdminServiceGrpcAsyncIOTransport(IdentityAwareProxyAdminServiceTransport):
"""gRPC AsyncIO backend transport for IdentityAwareProxyAdminService.
APIs for Identity-Aware Proxy Admin configurations.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def set_iam_policy(self) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy for an Identity-Aware Proxy
protected resource. Replaces any existing policy. More
information about managing access via IAP can be found at:
https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api
Returns:
Callable[[~.SetIamPolicyRequest],
Awaitable[~.Policy]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'set_iam_policy' not in self._stubs:
self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/SetIamPolicy',
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['set_iam_policy']
@property
def get_iam_policy(self) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the access control policy for an Identity-Aware Proxy
protected resource. More information about managing access via
IAP can be found at:
https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api
Returns:
Callable[[~.GetIamPolicyRequest],
Awaitable[~.Policy]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_iam_policy' not in self._stubs:
self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIamPolicy',
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['get_iam_policy']
@property
def test_iam_permissions(self) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]:
r"""Return a callable for the test iam permissions method over gRPC.
Returns permissions that a caller has on the Identity-Aware
Proxy protected resource. More information about managing access
via IAP can be found at:
https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api
Returns:
Callable[[~.TestIamPermissionsRequest],
Awaitable[~.TestIamPermissionsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'test_iam_permissions' not in self._stubs:
self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/TestIamPermissions',
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs['test_iam_permissions']
@property
def get_iap_settings(self) -> Callable[
[service.GetIapSettingsRequest],
Awaitable[service.IapSettings]]:
r"""Return a callable for the get iap settings method over gRPC.
Gets the IAP settings on a particular IAP protected
resource.
Returns:
Callable[[~.GetIapSettingsRequest],
Awaitable[~.IapSettings]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_iap_settings' not in self._stubs:
self._stubs['get_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIapSettings',
request_serializer=service.GetIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['get_iap_settings']
@property
def update_iap_settings(self) -> Callable[
[service.UpdateIapSettingsRequest],
Awaitable[service.IapSettings]]:
r"""Return a callable for the update iap settings method over gRPC.
Updates the IAP settings on a particular IAP protected resource.
It replaces all fields unless the ``update_mask`` is set.
Returns:
Callable[[~.UpdateIapSettingsRequest],
Awaitable[~.IapSettings]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_iap_settings' not in self._stubs:
self._stubs['update_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateIapSettings',
request_serializer=service.UpdateIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['update_iap_settings']
def close(self):
return self.grpc_channel.close()
__all__ = (
'IdentityAwareProxyAdminServiceGrpcAsyncIOTransport',
)
| 46.360743
| 98
| 0.639318
|
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.transport.grpc import SslCredentials
import packaging.version
import grpc
from grpc.experimental import aio
from google.cloud.iap_v1.types import service
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from .base import IdentityAwareProxyAdminServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import IdentityAwareProxyAdminServiceGrpcTransport
class IdentityAwareProxyAdminServiceGrpcAsyncIOTransport(IdentityAwareProxyAdminServiceTransport):
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
credentials = False
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
return self._grpc_channel
@property
def set_iam_policy(self) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
if 'set_iam_policy' not in self._stubs:
self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/SetIamPolicy',
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['set_iam_policy']
@property
def get_iam_policy(self) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
if 'get_iam_policy' not in self._stubs:
self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIamPolicy',
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['get_iam_policy']
@property
def test_iam_permissions(self) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]:
if 'test_iam_permissions' not in self._stubs:
self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/TestIamPermissions',
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs['test_iam_permissions']
@property
def get_iap_settings(self) -> Callable[
[service.GetIapSettingsRequest],
Awaitable[service.IapSettings]]:
if 'get_iap_settings' not in self._stubs:
self._stubs['get_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIapSettings',
request_serializer=service.GetIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['get_iap_settings']
@property
def update_iap_settings(self) -> Callable[
[service.UpdateIapSettingsRequest],
Awaitable[service.IapSettings]]:
if 'update_iap_settings' not in self._stubs:
self._stubs['update_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateIapSettings',
request_serializer=service.UpdateIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['update_iap_settings']
def close(self):
return self.grpc_channel.close()
__all__ = (
'IdentityAwareProxyAdminServiceGrpcAsyncIOTransport',
)
| true
| true
|
1c4a6da9e32f2fa4fbbdec55c0b23a5795f8a579
| 9,407
|
py
|
Python
|
tools/telemetry/telemetry/timeline/thread.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2019-11-28T10:46:52.000Z
|
2019-11-28T10:46:52.000Z
|
tools/telemetry/telemetry/timeline/thread.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/telemetry/telemetry/timeline/thread.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2015-03-27T11:15:39.000Z
|
2016-08-17T14:19:56.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import telemetry.timeline.async_slice as async_slice_module
import telemetry.timeline.event_container as event_container
import telemetry.timeline.flow_event as flow_event_module
import telemetry.timeline.sample as sample_module
import telemetry.timeline.slice as slice_module
class Thread(event_container.TimelineEventContainer):
''' A Thread stores all the trace events collected for a particular
thread. We organize the synchronous slices on a thread by "subrows," where
subrow 0 has all the root slices, subrow 1 those nested 1 deep, and so on.
The asynchronous slices are stored in an AsyncSliceGroup object.
'''
def __init__(self, process, tid):
super(Thread, self).__init__('thread %s' % tid, parent=process)
self.tid = tid
self._async_slices = []
self._flow_events = []
self._samples = []
self._toplevel_slices = []
self._all_slices = []
# State only valid during import.
self._open_slices = []
self._newly_added_slices = []
@property
def toplevel_slices(self):
return self._toplevel_slices
@property
def all_slices(self):
return self._all_slices
@property
def samples(self):
return self._samples
@property
def async_slices(self):
return self._async_slices
@property
def open_slice_count(self):
return len(self._open_slices)
def IterChildContainers(self):
return
yield # pylint: disable=W0101
def IterEventsInThisContainer(self, event_type_predicate, event_predicate):
if event_type_predicate(slice_module.Slice):
for s in self._newly_added_slices:
if event_predicate(s):
yield s
for s in self._all_slices:
if event_predicate(s):
yield s
if event_type_predicate(async_slice_module.AsyncSlice):
for async_slice in self._async_slices:
if event_predicate(async_slice):
yield async_slice
for sub_slice in async_slice.IterEventsInThisContainerRecrusively():
if event_predicate(sub_slice):
yield sub_slice
if event_type_predicate(flow_event_module.FlowEvent):
for flow_event in self._flow_events:
if event_predicate(flow_event):
yield flow_event
if event_type_predicate(sample_module.Sample):
for sample in self._samples:
if event_predicate(sample):
yield sample
def AddSample(self, category, name, timestamp, args=None):
if len(self._samples) and timestamp < self._samples[-1].start:
raise ValueError(
'Samples must be added in increasing timestamp order')
sample = sample_module.Sample(self,
category, name, timestamp, args=args)
self._samples.append(sample)
def AddAsyncSlice(self, async_slice):
self._async_slices.append(async_slice)
def AddFlowEvent(self, flow_event):
self._flow_events.append(flow_event)
def BeginSlice(self, category, name, timestamp, thread_timestamp=None,
args=None):
"""Opens a new slice for the thread.
Calls to beginSlice and endSlice must be made with
non-monotonically-decreasing timestamps.
* category: Category to which the slice belongs.
* name: Name of the slice to add.
* timestamp: The timetsamp of the slice, in milliseconds.
* thread_timestamp: Thread specific clock (scheduled) timestamp of the
slice, in milliseconds.
* args: Arguments associated with
Returns newly opened slice
"""
if len(self._open_slices) > 0 and timestamp < self._open_slices[-1].start:
raise ValueError(
'Slices must be added in increasing timestamp order')
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
self._open_slices.append(new_slice)
new_slice.did_not_finish = True
self.PushSlice(new_slice)
return new_slice
def EndSlice(self, end_timestamp, end_thread_timestamp=None):
""" Ends the last begun slice in this group and pushes it onto the slice
array.
* end_timestamp: Timestamp when the slice ended in milliseconds
* end_thread_timestamp: Timestamp when the scheduled time of the slice ended
in milliseconds
returns completed slice.
"""
if not len(self._open_slices):
raise ValueError(
'EndSlice called without an open slice')
curr_slice = self._open_slices.pop()
if end_timestamp < curr_slice.start:
raise ValueError(
'Slice %s end time is before its start.' % curr_slice.name)
curr_slice.duration = end_timestamp - curr_slice.start
if end_thread_timestamp != None:
if curr_slice.thread_start == None:
raise ValueError(
'EndSlice with thread_timestamp called on open slice without ' +
'thread_timestamp')
curr_slice.thread_duration = (end_thread_timestamp -
curr_slice.thread_start)
curr_slice.did_not_finish = False
return curr_slice
def PushCompleteSlice(self, category, name, timestamp, duration,
thread_timestamp, thread_duration, args=None):
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
if duration == None:
new_slice.did_not_finish = True
else:
new_slice.duration = duration
new_slice.thread_duration = thread_duration
self.PushSlice(new_slice)
return new_slice
def PushSlice(self, new_slice):
self._newly_added_slices.append(new_slice)
return new_slice
def AutoCloseOpenSlices(self, max_timestamp, max_thread_timestamp):
for s in self._newly_added_slices:
if s.did_not_finish:
s.duration = max_timestamp - s.start
assert s.duration >= 0
if s.thread_start != None:
s.thread_duration = max_thread_timestamp - s.thread_start
assert s.thread_duration >= 0
self._open_slices = []
def IsTimestampValidForBeginOrEnd(self, timestamp):
if not len(self._open_slices):
return True
return timestamp >= self._open_slices[-1].start
def FinalizeImport(self):
self._BuildSliceSubRows()
def _BuildSliceSubRows(self):
'''This function works by walking through slices by start time.
The basic idea here is to insert each slice as deep into the subrow
list as it can go such that every subslice is fully contained by its
parent slice.
Visually, if we start with this:
0: [ a ]
1: [ b ]
2: [c][d]
To place this slice:
[e]
We first check row 2's last item, [d]. [e] wont fit into [d] (they dont
even intersect). So we go to row 1. That gives us [b], and [d] wont fit
into that either. So, we go to row 0 and its last slice, [a]. That can
completely contain [e], so that means we should add [e] as a subslice
of [a]. That puts it on row 1, yielding:
0: [ a ]
1: [ b ][e]
2: [c][d]
If we then get this slice:
[f]
We do the same deepest-to-shallowest walk of the subrows trying to fit
it. This time, it doesn't fit in any open slice. So, we simply append
it to row 0 (a root slice):
0: [ a ] [f]
1: [ b ][e]
'''
def CompareSlices(s1, s2):
if s1.start == s2.start:
# Break ties by having the slice with the greatest
# end timestamp come first.
return cmp(s2.end, s1.end)
return cmp(s1.start, s2.start)
assert len(self._toplevel_slices) == 0
assert len(self._all_slices) == 0
if not len(self._newly_added_slices):
return
self._all_slices.extend(self._newly_added_slices)
sorted_slices = sorted(self._newly_added_slices, cmp=CompareSlices)
root_slice = sorted_slices[0]
self._toplevel_slices.append(root_slice)
for s in sorted_slices[1:]:
if not self._AddSliceIfBounds(root_slice, s):
root_slice = s
self._toplevel_slices.append(root_slice)
self._newly_added_slices = []
def _AddSliceIfBounds(self, root, child):
''' Adds a child slice to a root slice its proper row.
Return False if the child slice is not in the bounds
of the root slice.
Because we know that the start time of child is >= the start time
of all other slices seen so far, we can just check the last slice
of each row for bounding.
'''
# The source trace data is in microseconds but we store it as milliseconds
# in floating-point. Since we can't represent micros as millis perfectly,
# two end=start+duration combos that should be the same will be slightly
# different. Round back to micros to ensure equality below.
child_end_micros = round(child.end * 1000)
root_end_micros = round(root.end * 1000)
if child.start >= root.start and child_end_micros <= root_end_micros:
if len(root.sub_slices) > 0:
if self._AddSliceIfBounds(root.sub_slices[-1], child):
return True
child.parent_slice = root
root.AddSubSlice(child)
return True
return False
| 35.90458
| 80
| 0.672691
|
import telemetry.timeline.async_slice as async_slice_module
import telemetry.timeline.event_container as event_container
import telemetry.timeline.flow_event as flow_event_module
import telemetry.timeline.sample as sample_module
import telemetry.timeline.slice as slice_module
class Thread(event_container.TimelineEventContainer):
def __init__(self, process, tid):
super(Thread, self).__init__('thread %s' % tid, parent=process)
self.tid = tid
self._async_slices = []
self._flow_events = []
self._samples = []
self._toplevel_slices = []
self._all_slices = []
self._open_slices = []
self._newly_added_slices = []
@property
def toplevel_slices(self):
return self._toplevel_slices
@property
def all_slices(self):
return self._all_slices
@property
def samples(self):
return self._samples
@property
def async_slices(self):
return self._async_slices
@property
def open_slice_count(self):
return len(self._open_slices)
def IterChildContainers(self):
return
yield
def IterEventsInThisContainer(self, event_type_predicate, event_predicate):
if event_type_predicate(slice_module.Slice):
for s in self._newly_added_slices:
if event_predicate(s):
yield s
for s in self._all_slices:
if event_predicate(s):
yield s
if event_type_predicate(async_slice_module.AsyncSlice):
for async_slice in self._async_slices:
if event_predicate(async_slice):
yield async_slice
for sub_slice in async_slice.IterEventsInThisContainerRecrusively():
if event_predicate(sub_slice):
yield sub_slice
if event_type_predicate(flow_event_module.FlowEvent):
for flow_event in self._flow_events:
if event_predicate(flow_event):
yield flow_event
if event_type_predicate(sample_module.Sample):
for sample in self._samples:
if event_predicate(sample):
yield sample
def AddSample(self, category, name, timestamp, args=None):
if len(self._samples) and timestamp < self._samples[-1].start:
raise ValueError(
'Samples must be added in increasing timestamp order')
sample = sample_module.Sample(self,
category, name, timestamp, args=args)
self._samples.append(sample)
def AddAsyncSlice(self, async_slice):
self._async_slices.append(async_slice)
def AddFlowEvent(self, flow_event):
self._flow_events.append(flow_event)
def BeginSlice(self, category, name, timestamp, thread_timestamp=None,
args=None):
if len(self._open_slices) > 0 and timestamp < self._open_slices[-1].start:
raise ValueError(
'Slices must be added in increasing timestamp order')
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
self._open_slices.append(new_slice)
new_slice.did_not_finish = True
self.PushSlice(new_slice)
return new_slice
def EndSlice(self, end_timestamp, end_thread_timestamp=None):
if not len(self._open_slices):
raise ValueError(
'EndSlice called without an open slice')
curr_slice = self._open_slices.pop()
if end_timestamp < curr_slice.start:
raise ValueError(
'Slice %s end time is before its start.' % curr_slice.name)
curr_slice.duration = end_timestamp - curr_slice.start
if end_thread_timestamp != None:
if curr_slice.thread_start == None:
raise ValueError(
'EndSlice with thread_timestamp called on open slice without ' +
'thread_timestamp')
curr_slice.thread_duration = (end_thread_timestamp -
curr_slice.thread_start)
curr_slice.did_not_finish = False
return curr_slice
def PushCompleteSlice(self, category, name, timestamp, duration,
thread_timestamp, thread_duration, args=None):
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
if duration == None:
new_slice.did_not_finish = True
else:
new_slice.duration = duration
new_slice.thread_duration = thread_duration
self.PushSlice(new_slice)
return new_slice
def PushSlice(self, new_slice):
self._newly_added_slices.append(new_slice)
return new_slice
def AutoCloseOpenSlices(self, max_timestamp, max_thread_timestamp):
for s in self._newly_added_slices:
if s.did_not_finish:
s.duration = max_timestamp - s.start
assert s.duration >= 0
if s.thread_start != None:
s.thread_duration = max_thread_timestamp - s.thread_start
assert s.thread_duration >= 0
self._open_slices = []
def IsTimestampValidForBeginOrEnd(self, timestamp):
if not len(self._open_slices):
return True
return timestamp >= self._open_slices[-1].start
def FinalizeImport(self):
self._BuildSliceSubRows()
def _BuildSliceSubRows(self):
def CompareSlices(s1, s2):
if s1.start == s2.start:
return cmp(s2.end, s1.end)
return cmp(s1.start, s2.start)
assert len(self._toplevel_slices) == 0
assert len(self._all_slices) == 0
if not len(self._newly_added_slices):
return
self._all_slices.extend(self._newly_added_slices)
sorted_slices = sorted(self._newly_added_slices, cmp=CompareSlices)
root_slice = sorted_slices[0]
self._toplevel_slices.append(root_slice)
for s in sorted_slices[1:]:
if not self._AddSliceIfBounds(root_slice, s):
root_slice = s
self._toplevel_slices.append(root_slice)
self._newly_added_slices = []
def _AddSliceIfBounds(self, root, child):
# two end=start+duration combos that should be the same will be slightly
# different. Round back to micros to ensure equality below.
child_end_micros = round(child.end * 1000)
root_end_micros = round(root.end * 1000)
if child.start >= root.start and child_end_micros <= root_end_micros:
if len(root.sub_slices) > 0:
if self._AddSliceIfBounds(root.sub_slices[-1], child):
return True
child.parent_slice = root
root.AddSubSlice(child)
return True
return False
| true
| true
|
1c4a6de9d76149804cfadbfe3dc687ed9ec8343a
| 1,570
|
py
|
Python
|
src/runners/utils.py
|
timfletch/SnowAlert
|
7736ae4e7e1c8d3d4be34f8e360eddea53c49d2c
|
[
"Apache-2.0"
] | null | null | null |
src/runners/utils.py
|
timfletch/SnowAlert
|
7736ae4e7e1c8d3d4be34f8e360eddea53c49d2c
|
[
"Apache-2.0"
] | null | null | null |
src/runners/utils.py
|
timfletch/SnowAlert
|
7736ae4e7e1c8d3d4be34f8e360eddea53c49d2c
|
[
"Apache-2.0"
] | null | null | null |
from datetime import date, datetime
import inspect
from itertools import zip_longest
import json
import traceback
from types import GeneratorType
NO_FILL = object()
def groups_of(n, iterable, fillvalue=NO_FILL):
args = [iter(iterable)] * n
rets = zip_longest(*args, fillvalue=fillvalue)
return (tuple(l for l in ret if l is not NO_FILL) for ret in rets)
def format_exception(e):
return ''.join(traceback.format_exception(type(e), e, e.__traceback__))
def format_exception_only(e):
return ''.join(traceback.format_exception_only(type(e), e)).strip()
def json_dumps(obj):
def default_json_dumps(x):
if isinstance(x, Exception):
return {
"traceback": format_exception(x),
"exception": format_exception_only(x),
"exceptionName": x.__class__.__name__,
"exceptionArgs": x.args,
}
if isinstance(x, (date, datetime)):
return x.isoformat()
if hasattr(x, 'raw'):
return default_json_dumps(x.raw)
if callable(getattr(x, 'to_json', None)):
return json.parse(x.to_json())
if type(x) is GeneratorType:
return list(x)
return repr(x)
return json.dumps(obj, default=default_json_dumps)
def apply_some(f, **kwargs):
spec = inspect.getfullargspec(f)
defaults = dict(zip(reversed(spec.args), reversed(spec.defaults or ())))
passed_in = {arg: kwargs[arg] for arg in spec.args if arg in kwargs}
defaults.update(passed_in)
return f(**defaults)
| 27.068966
| 76
| 0.642675
|
from datetime import date, datetime
import inspect
from itertools import zip_longest
import json
import traceback
from types import GeneratorType
NO_FILL = object()
def groups_of(n, iterable, fillvalue=NO_FILL):
args = [iter(iterable)] * n
rets = zip_longest(*args, fillvalue=fillvalue)
return (tuple(l for l in ret if l is not NO_FILL) for ret in rets)
def format_exception(e):
return ''.join(traceback.format_exception(type(e), e, e.__traceback__))
def format_exception_only(e):
return ''.join(traceback.format_exception_only(type(e), e)).strip()
def json_dumps(obj):
def default_json_dumps(x):
if isinstance(x, Exception):
return {
"traceback": format_exception(x),
"exception": format_exception_only(x),
"exceptionName": x.__class__.__name__,
"exceptionArgs": x.args,
}
if isinstance(x, (date, datetime)):
return x.isoformat()
if hasattr(x, 'raw'):
return default_json_dumps(x.raw)
if callable(getattr(x, 'to_json', None)):
return json.parse(x.to_json())
if type(x) is GeneratorType:
return list(x)
return repr(x)
return json.dumps(obj, default=default_json_dumps)
def apply_some(f, **kwargs):
spec = inspect.getfullargspec(f)
defaults = dict(zip(reversed(spec.args), reversed(spec.defaults or ())))
passed_in = {arg: kwargs[arg] for arg in spec.args if arg in kwargs}
defaults.update(passed_in)
return f(**defaults)
| true
| true
|
1c4a7040f025b55e78f539572f49045fceb53255
| 11,790
|
py
|
Python
|
pyleecan/Classes/CondType12.py
|
jgdedamas/pyleecan
|
52ca00b36bbf1a1ba24ae722cf72c5e8e8e16395
|
[
"Apache-2.0"
] | null | null | null |
pyleecan/Classes/CondType12.py
|
jgdedamas/pyleecan
|
52ca00b36bbf1a1ba24ae722cf72c5e8e8e16395
|
[
"Apache-2.0"
] | null | null | null |
pyleecan/Classes/CondType12.py
|
jgdedamas/pyleecan
|
52ca00b36bbf1a1ba24ae722cf72c5e8e8e16395
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# File generated according to Generator/ClassesRef/Machine/CondType12.csv
# WARNING! All changes made in this file will be lost!
"""Method code available at https://github.com/Eomys/pyleecan/tree/master/pyleecan/Methods/Machine/CondType12
"""
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .Conductor import Conductor
# Import all class method
# Try/catch to remove unnecessary dependencies in unused method
try:
from ..Methods.Machine.CondType12.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.CondType12.comp_surface_active import comp_surface_active
except ImportError as error:
comp_surface_active = error
try:
from ..Methods.Machine.CondType12.comp_height import comp_height
except ImportError as error:
comp_height = error
try:
from ..Methods.Machine.CondType12.comp_surface import comp_surface
except ImportError as error:
comp_surface = error
try:
from ..Methods.Machine.CondType12.comp_width import comp_width
except ImportError as error:
comp_width = error
try:
from ..Methods.Machine.CondType12.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.CondType12.plot_schematics import plot_schematics
except ImportError as error:
plot_schematics = error
from ._check import InitUnKnowClassError
from .Material import Material
class CondType12(Conductor):
"""parallel stranded conductor consisting of at least a single round wire"""
VERSION = 1
# Check ImportError to remove unnecessary dependencies in unused method
# cf Methods.Machine.CondType12.check
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method check: " + str(check))
)
)
else:
check = check
# cf Methods.Machine.CondType12.comp_surface_active
if isinstance(comp_surface_active, ImportError):
comp_surface_active = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface_active: "
+ str(comp_surface_active)
)
)
)
else:
comp_surface_active = comp_surface_active
# cf Methods.Machine.CondType12.comp_height
if isinstance(comp_height, ImportError):
comp_height = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_height: " + str(comp_height)
)
)
)
else:
comp_height = comp_height
# cf Methods.Machine.CondType12.comp_surface
if isinstance(comp_surface, ImportError):
comp_surface = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface: " + str(comp_surface)
)
)
)
else:
comp_surface = comp_surface
# cf Methods.Machine.CondType12.comp_width
if isinstance(comp_width, ImportError):
comp_width = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_width: " + str(comp_width)
)
)
)
else:
comp_width = comp_width
# cf Methods.Machine.CondType12.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method plot: " + str(plot))
)
)
else:
plot = plot
# cf Methods.Machine.CondType12.plot_schematics
if isinstance(plot_schematics, ImportError):
plot_schematics = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method plot_schematics: "
+ str(plot_schematics)
)
)
)
else:
plot_schematics = plot_schematics
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Wwire=0.015,
Wins_cond=0.015,
Nwppc=1,
Wins_wire=0,
Kwoh=0.5,
cond_mat=-1,
ins_mat=-1,
init_dict=None,
init_str=None,
):
"""Constructor of the class. Can be use in three ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for pyleecan type, -1 will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary with property names as keys
- __init__ (init_str = s) s must be a string
s is the file path to load
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Wwire" in list(init_dict.keys()):
Wwire = init_dict["Wwire"]
if "Wins_cond" in list(init_dict.keys()):
Wins_cond = init_dict["Wins_cond"]
if "Nwppc" in list(init_dict.keys()):
Nwppc = init_dict["Nwppc"]
if "Wins_wire" in list(init_dict.keys()):
Wins_wire = init_dict["Wins_wire"]
if "Kwoh" in list(init_dict.keys()):
Kwoh = init_dict["Kwoh"]
if "cond_mat" in list(init_dict.keys()):
cond_mat = init_dict["cond_mat"]
if "ins_mat" in list(init_dict.keys()):
ins_mat = init_dict["ins_mat"]
# Set the properties (value check and convertion are done in setter)
self.Wwire = Wwire
self.Wins_cond = Wins_cond
self.Nwppc = Nwppc
self.Wins_wire = Wins_wire
self.Kwoh = Kwoh
# Call Conductor init
super(CondType12, self).__init__(cond_mat=cond_mat, ins_mat=ins_mat)
# The class is frozen (in Conductor init), for now it's impossible to
# add new properties
def __str__(self):
"""Convert this object in a readeable string (for print)"""
CondType12_str = ""
# Get the properties inherited from Conductor
CondType12_str += super(CondType12, self).__str__()
CondType12_str += "Wwire = " + str(self.Wwire) + linesep
CondType12_str += "Wins_cond = " + str(self.Wins_cond) + linesep
CondType12_str += "Nwppc = " + str(self.Nwppc) + linesep
CondType12_str += "Wins_wire = " + str(self.Wins_wire) + linesep
CondType12_str += "Kwoh = " + str(self.Kwoh) + linesep
return CondType12_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
# Check the properties inherited from Conductor
if not super(CondType12, self).__eq__(other):
return False
if other.Wwire != self.Wwire:
return False
if other.Wins_cond != self.Wins_cond:
return False
if other.Nwppc != self.Nwppc:
return False
if other.Wins_wire != self.Wins_wire:
return False
if other.Kwoh != self.Kwoh:
return False
return True
def __sizeof__(self):
"""Return the size in memory of the object (including all subobject)"""
S = 0 # Full size of the object
# Get size of the properties inherited from Conductor
S += super(CondType12, self).__sizeof__()
S += getsizeof(self.Wwire)
S += getsizeof(self.Wins_cond)
S += getsizeof(self.Nwppc)
S += getsizeof(self.Wins_wire)
S += getsizeof(self.Kwoh)
return S
def as_dict(self):
"""Convert this object in a json seriable dict (can be use in __init__)"""
# Get the properties inherited from Conductor
CondType12_dict = super(CondType12, self).as_dict()
CondType12_dict["Wwire"] = self.Wwire
CondType12_dict["Wins_cond"] = self.Wins_cond
CondType12_dict["Nwppc"] = self.Nwppc
CondType12_dict["Wins_wire"] = self.Wins_wire
CondType12_dict["Kwoh"] = self.Kwoh
# The class name is added to the dict for deserialisation purpose
# Overwrite the mother class name
CondType12_dict["__class__"] = "CondType12"
return CondType12_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
self.Wwire = None
self.Wins_cond = None
self.Nwppc = None
self.Wins_wire = None
self.Kwoh = None
# Set to None the properties inherited from Conductor
super(CondType12, self)._set_None()
def _get_Wwire(self):
"""getter of Wwire"""
return self._Wwire
def _set_Wwire(self, value):
"""setter of Wwire"""
check_var("Wwire", value, "float", Vmin=0)
self._Wwire = value
Wwire = property(
fget=_get_Wwire,
fset=_set_Wwire,
doc=u"""cf schematics, single wire diameter without insulation [m]
:Type: float
:min: 0
""",
)
def _get_Wins_cond(self):
"""getter of Wins_cond"""
return self._Wins_cond
def _set_Wins_cond(self, value):
"""setter of Wins_cond"""
check_var("Wins_cond", value, "float", Vmin=0)
self._Wins_cond = value
Wins_cond = property(
fget=_get_Wins_cond,
fset=_set_Wins_cond,
doc=u"""(advanced) cf schematics, winding coil insulation diameter [m]
:Type: float
:min: 0
""",
)
def _get_Nwppc(self):
"""getter of Nwppc"""
return self._Nwppc
def _set_Nwppc(self, value):
"""setter of Nwppc"""
check_var("Nwppc", value, "int", Vmin=1)
self._Nwppc = value
Nwppc = property(
fget=_get_Nwppc,
fset=_set_Nwppc,
doc=u"""cf schematics, winding number of random wires (strands) in parallel per coil
:Type: int
:min: 1
""",
)
def _get_Wins_wire(self):
"""getter of Wins_wire"""
return self._Wins_wire
def _set_Wins_wire(self, value):
"""setter of Wins_wire"""
check_var("Wins_wire", value, "float", Vmin=0)
self._Wins_wire = value
Wins_wire = property(
fget=_get_Wins_wire,
fset=_set_Wins_wire,
doc=u"""(advanced) cf schematics, winding strand insulation thickness [m]
:Type: float
:min: 0
""",
)
def _get_Kwoh(self):
"""getter of Kwoh"""
return self._Kwoh
def _set_Kwoh(self, value):
"""setter of Kwoh"""
check_var("Kwoh", value, "float", Vmin=0)
self._Kwoh = value
Kwoh = property(
fget=_get_Kwoh,
fset=_set_Kwoh,
doc=u"""winding overhang factor which describes the fact that random round wire end-windings can be more or less compressed (0.5 for small motors, 0.8 for large motors) - can be used to tune the average turn length (relevant if type_cond==1)
:Type: float
:min: 0
""",
)
| 32.125341
| 249
| 0.609669
|
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .Conductor import Conductor
try:
from ..Methods.Machine.CondType12.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.CondType12.comp_surface_active import comp_surface_active
except ImportError as error:
comp_surface_active = error
try:
from ..Methods.Machine.CondType12.comp_height import comp_height
except ImportError as error:
comp_height = error
try:
from ..Methods.Machine.CondType12.comp_surface import comp_surface
except ImportError as error:
comp_surface = error
try:
from ..Methods.Machine.CondType12.comp_width import comp_width
except ImportError as error:
comp_width = error
try:
from ..Methods.Machine.CondType12.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.CondType12.plot_schematics import plot_schematics
except ImportError as error:
plot_schematics = error
from ._check import InitUnKnowClassError
from .Material import Material
class CondType12(Conductor):
VERSION = 1
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method check: " + str(check))
)
)
else:
check = check
# cf Methods.Machine.CondType12.comp_surface_active
if isinstance(comp_surface_active, ImportError):
comp_surface_active = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface_active: "
+ str(comp_surface_active)
)
)
)
else:
comp_surface_active = comp_surface_active
if isinstance(comp_height, ImportError):
comp_height = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_height: " + str(comp_height)
)
)
)
else:
comp_height = comp_height
# cf Methods.Machine.CondType12.comp_surface
if isinstance(comp_surface, ImportError):
comp_surface = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface: " + str(comp_surface)
)
)
)
else:
comp_surface = comp_surface
if isinstance(comp_width, ImportError):
comp_width = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_width: " + str(comp_width)
)
)
)
else:
comp_width = comp_width
# cf Methods.Machine.CondType12.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method plot: " + str(plot))
)
)
else:
plot = plot
if isinstance(plot_schematics, ImportError):
plot_schematics = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method plot_schematics: "
+ str(plot_schematics)
)
)
)
else:
plot_schematics = plot_schematics
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Wwire=0.015,
Wins_cond=0.015,
Nwppc=1,
Wins_wire=0,
Kwoh=0.5,
cond_mat=-1,
ins_mat=-1,
init_dict=None,
init_str=None,
):
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Wwire" in list(init_dict.keys()):
Wwire = init_dict["Wwire"]
if "Wins_cond" in list(init_dict.keys()):
Wins_cond = init_dict["Wins_cond"]
if "Nwppc" in list(init_dict.keys()):
Nwppc = init_dict["Nwppc"]
if "Wins_wire" in list(init_dict.keys()):
Wins_wire = init_dict["Wins_wire"]
if "Kwoh" in list(init_dict.keys()):
Kwoh = init_dict["Kwoh"]
if "cond_mat" in list(init_dict.keys()):
cond_mat = init_dict["cond_mat"]
if "ins_mat" in list(init_dict.keys()):
ins_mat = init_dict["ins_mat"]
# Set the properties (value check and convertion are done in setter)
self.Wwire = Wwire
self.Wins_cond = Wins_cond
self.Nwppc = Nwppc
self.Wins_wire = Wins_wire
self.Kwoh = Kwoh
# Call Conductor init
super(CondType12, self).__init__(cond_mat=cond_mat, ins_mat=ins_mat)
# The class is frozen (in Conductor init), for now it's impossible to
def __str__(self):
CondType12_str = ""
CondType12_str += super(CondType12, self).__str__()
CondType12_str += "Wwire = " + str(self.Wwire) + linesep
CondType12_str += "Wins_cond = " + str(self.Wins_cond) + linesep
CondType12_str += "Nwppc = " + str(self.Nwppc) + linesep
CondType12_str += "Wins_wire = " + str(self.Wins_wire) + linesep
CondType12_str += "Kwoh = " + str(self.Kwoh) + linesep
return CondType12_str
def __eq__(self, other):
if type(other) != type(self):
return False
if not super(CondType12, self).__eq__(other):
return False
if other.Wwire != self.Wwire:
return False
if other.Wins_cond != self.Wins_cond:
return False
if other.Nwppc != self.Nwppc:
return False
if other.Wins_wire != self.Wins_wire:
return False
if other.Kwoh != self.Kwoh:
return False
return True
def __sizeof__(self):
S = 0
S += super(CondType12, self).__sizeof__()
S += getsizeof(self.Wwire)
S += getsizeof(self.Wins_cond)
S += getsizeof(self.Nwppc)
S += getsizeof(self.Wins_wire)
S += getsizeof(self.Kwoh)
return S
def as_dict(self):
CondType12_dict = super(CondType12, self).as_dict()
CondType12_dict["Wwire"] = self.Wwire
CondType12_dict["Wins_cond"] = self.Wins_cond
CondType12_dict["Nwppc"] = self.Nwppc
CondType12_dict["Wins_wire"] = self.Wins_wire
CondType12_dict["Kwoh"] = self.Kwoh
CondType12_dict["__class__"] = "CondType12"
return CondType12_dict
def _set_None(self):
self.Wwire = None
self.Wins_cond = None
self.Nwppc = None
self.Wins_wire = None
self.Kwoh = None
super(CondType12, self)._set_None()
def _get_Wwire(self):
return self._Wwire
def _set_Wwire(self, value):
check_var("Wwire", value, "float", Vmin=0)
self._Wwire = value
Wwire = property(
fget=_get_Wwire,
fset=_set_Wwire,
doc=u"""cf schematics, single wire diameter without insulation [m]
:Type: float
:min: 0
""",
)
def _get_Wins_cond(self):
return self._Wins_cond
def _set_Wins_cond(self, value):
check_var("Wins_cond", value, "float", Vmin=0)
self._Wins_cond = value
Wins_cond = property(
fget=_get_Wins_cond,
fset=_set_Wins_cond,
doc=u"""(advanced) cf schematics, winding coil insulation diameter [m]
:Type: float
:min: 0
""",
)
def _get_Nwppc(self):
return self._Nwppc
def _set_Nwppc(self, value):
check_var("Nwppc", value, "int", Vmin=1)
self._Nwppc = value
Nwppc = property(
fget=_get_Nwppc,
fset=_set_Nwppc,
doc=u"""cf schematics, winding number of random wires (strands) in parallel per coil
:Type: int
:min: 1
""",
)
def _get_Wins_wire(self):
return self._Wins_wire
def _set_Wins_wire(self, value):
check_var("Wins_wire", value, "float", Vmin=0)
self._Wins_wire = value
Wins_wire = property(
fget=_get_Wins_wire,
fset=_set_Wins_wire,
doc=u"""(advanced) cf schematics, winding strand insulation thickness [m]
:Type: float
:min: 0
""",
)
def _get_Kwoh(self):
return self._Kwoh
def _set_Kwoh(self, value):
check_var("Kwoh", value, "float", Vmin=0)
self._Kwoh = value
Kwoh = property(
fget=_get_Kwoh,
fset=_set_Kwoh,
doc=u"""winding overhang factor which describes the fact that random round wire end-windings can be more or less compressed (0.5 for small motors, 0.8 for large motors) - can be used to tune the average turn length (relevant if type_cond==1)
:Type: float
:min: 0
""",
)
| true
| true
|
1c4a704defde2f07302fed79149db9bb0af194e7
| 4,779
|
py
|
Python
|
test/functional/interface_http.py
|
kyancoin/KYAN
|
39174bd5add8a41a82ca53e5f1372e4c0a58f447
|
[
"MIT"
] | 6
|
2020-09-17T04:29:33.000Z
|
2021-08-08T16:39:10.000Z
|
test/functional/interface_http.py
|
sapphire-pt/KYAN
|
0c534d9a10a8d07d3707c74e6ea93477857b5ec7
|
[
"MIT"
] | 22
|
2020-07-31T20:01:16.000Z
|
2020-08-13T09:58:21.000Z
|
test/functional/interface_http.py
|
kyancoin/KYAN
|
39174bd5add8a41a82ca53e5f1372e4c0a58f447
|
[
"MIT"
] | 4
|
2020-09-17T22:32:25.000Z
|
2022-01-12T20:49:24.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC HTTP basics."""
from test_framework.test_framework import PivxTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (PivxTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because kyanited should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 43.844037
| 108
| 0.632768
|
from test_framework.test_framework import PivxTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (PivxTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
thpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| true
| true
|
1c4a70cf289dd3128b34f541efdf029286cbd0d2
| 1,070
|
py
|
Python
|
data/migrations/0012_auto_20191005_0731.py
|
SIXMON/peps
|
48c09a951a0193ada7b91c8bb6efc4b1232c3520
|
[
"MIT"
] | 5
|
2019-08-29T13:55:47.000Z
|
2021-11-15T08:30:33.000Z
|
data/migrations/0012_auto_20191005_0731.py
|
SIXMON/peps
|
48c09a951a0193ada7b91c8bb6efc4b1232c3520
|
[
"MIT"
] | 295
|
2019-08-19T12:40:29.000Z
|
2022-01-24T14:03:20.000Z
|
data/migrations/0012_auto_20191005_0731.py
|
SIXMON/peps
|
48c09a951a0193ada7b91c8bb6efc4b1232c3520
|
[
"MIT"
] | 7
|
2020-05-27T06:28:48.000Z
|
2021-11-17T10:00:54.000Z
|
# Generated by Django 2.2.4 on 2019-10-05 07:31
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data', '0011_auto_20191004_1505'),
]
operations = [
migrations.RemoveField(
model_name='practice',
name='pest_multipliers',
),
migrations.AddField(
model_name='practice',
name='pest_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
migrations.RemoveField(
model_name='practice',
name='weed_multipliers',
),
migrations.AddField(
model_name='practice',
name='weed_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
]
| 31.470588
| 155
| 0.636449
|
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data', '0011_auto_20191004_1505'),
]
operations = [
migrations.RemoveField(
model_name='practice',
name='pest_multipliers',
),
migrations.AddField(
model_name='practice',
name='pest_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
migrations.RemoveField(
model_name='practice',
name='weed_multipliers',
),
migrations.AddField(
model_name='practice',
name='weed_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
]
| true
| true
|
1c4a70d44d91648b29f794c7eba76c2e8bd1c7fa
| 7,104
|
py
|
Python
|
bsp_tool/branches/valve/orange_box.py
|
snake-biscuits/bsp_tool
|
f2a1ab3ff502d0ca4ade97ff6e44823d2f8f5773
|
[
"MIT"
] | 44
|
2018-07-06T04:44:02.000Z
|
2022-03-27T02:18:37.000Z
|
bsp_tool/branches/valve/orange_box.py
|
snake-biscuits/bsp_tool
|
f2a1ab3ff502d0ca4ade97ff6e44823d2f8f5773
|
[
"MIT"
] | 18
|
2018-07-25T23:19:17.000Z
|
2022-03-16T23:59:36.000Z
|
bsp_tool/branches/valve/orange_box.py
|
snake-biscuits/bsp_tool
|
f2a1ab3ff502d0ca4ade97ff6e44823d2f8f5773
|
[
"MIT"
] | 12
|
2019-09-13T21:52:51.000Z
|
2022-03-22T18:04:21.000Z
|
# https://github.com/ValveSoftware/source-sdk-2013/blob/master/sp/src/public/bspfile.h
import enum
import io
import struct
from typing import List
from .. import base
from . import source
FILE_MAGIC = b"VBSP"
BSP_VERSION = 20 # NOTE: v20 Source BSPs differ widely, since many forks are of this version
GAME_PATHS = ["Day of Defeat: Source", # TODO: full path
"G String",
"Garry's Mod",
"Half-Life 2: Episode 2",
"Half-Life 2 Update",
"NEOTOKYO",
"Portal",
"Team Fortress 2"]
GAME_VERSIONS = {GAME: BSP_VERSION for GAME in GAME_PATHS}
class LUMP(enum.Enum):
ENTITIES = 0
PLANES = 1
TEXTURE_DATA = 2
VERTICES = 3
VISIBILITY = 4
NODES = 5
TEXTURE_INFO = 6
FACES = 7 # version 1
LIGHTING = 8 # version 1
OCCLUSION = 9 # version 2
LEAVES = 10 # version 1
FACE_IDS = 11 # TF2 branch, for mapping debug & detail prop seed
EDGES = 12
SURFEDGES = 13
MODELS = 14
WORLD_LIGHTS = 15
LEAF_FACES = 16
LEAF_BRUSHES = 17
BRUSHES = 18
BRUSH_SIDES = 19
AREAS = 20
AREA_PORTALS = 21
UNUSED_22 = 22
UNUSED_23 = 23
UNUSED_24 = 24
UNUSED_25 = 25
DISPLACEMENT_INFO = 26
ORIGINAL_FACES = 27
PHYSICS_DISPLACEMENT = 28
PHYSICS_COLLIDE = 29
VERTEX_NORMALS = 30
VERTEX_NORMAL_INDICES = 31
DISPLACEMENT_LIGHTMAP_ALPHAS = 32 # deprecated / X360 ?
DISPLACEMENT_VERTICES = 33
DISPLACEMENT_LIGHTMAP_SAMPLE_POSITIONS = 34
GAME_LUMP = 35
LEAF_WATER_DATA = 36
PRIMITIVES = 37
PRIMITIVE_VERTICES = 38 # deprecated / X360 ?
PRIMITIVE_INDICES = 39
PAKFILE = 40
CLIP_PORTAL_VERTICES = 41
CUBEMAPS = 42
TEXTURE_DATA_STRING_DATA = 43
TEXTURE_DATA_STRING_TABLE = 44
OVERLAYS = 45
LEAF_MIN_DIST_TO_WATER = 46
FACE_MACRO_TEXTURE_INFO = 47
DISPLACEMENT_TRIS = 48
PHYSICS_COLLIDE_SURFACE = 49 # deprecated / X360 ?
WATER_OVERLAYS = 50 # deprecated / X360 ?
LEAF_AMBIENT_INDEX_HDR = 51
LEAF_AMBIENT_INDEX = 52
LIGHTING_HDR = 53 # version 1
WORLD_LIGHTS_HDR = 54
LEAF_AMBIENT_LIGHTING_HDR = 55 # version 1
LEAF_AMBIENT_LIGHTING = 56 # version 1
XZIP_PAKFILE = 57 # deprecated / X360 ?
FACES_HDR = 58 # version 1
MAP_FLAGS = 59
OVERLAY_FADES = 60
UNUSED_61 = 61
UNUSED_62 = 62
UNUSED_63 = 63
# struct SourceBspHeader { char file_magic[4]; int version; SourceLumpHeader headers[64]; int revision; };
lump_header_address = {LUMP_ID: (8 + i * 16) for i, LUMP_ID in enumerate(LUMP)}
def read_lump_header(file, LUMP: enum.Enum) -> source.SourceLumpHeader:
file.seek(lump_header_address[LUMP])
offset, length, version, fourCC = struct.unpack("4I", file.read(16))
header = source.SourceLumpHeader(offset, length, version, fourCC)
return header
# a rough map of the relationships between lumps:
#
# /-> SurfEdge -> Edge -> Vertex
# Leaf -> Node -> Face -> Plane
# \-> DisplacementInfo -> DisplacementVertex
#
# ClipPortalVertices are AreaPortal geometry [citation neeeded]
# classes for each lump, in alphabetical order:
class Leaf(base.Struct): # LUMP 10
"""Endpoint of a vis tree branch, a pocket of Faces"""
contents: int # contents bitflags
cluster: int # index of this Leaf's cluster (parent node?) (visibility?)
area_flags: int # area + flags (short area:9; short flags:7;)
# area and flags are held in the same float
# area = leaf[2] & 0xFF80 >> 7 # 9 bits
# flags = leaf[2] & 0x007F # 7 bits
# TODO: automatically split area & flags, merging back for flat()
# why was this done when the struct is padded by one short anyway?
mins: List[float] # bounding box minimums along XYZ axes
maxs: List[float] # bounding box maximums along XYZ axes
first_leaf_face: int # index of first LeafFace
num_leaf_faces: int # number of LeafFaces
first_leaf_brush: int # index of first LeafBrush
num_leaf_brushes: int # number of LeafBrushes
leaf_water_data_id: int # -1 if this leaf isn't submerged
padding: int # should be empty
__slots__ = ["contents", "cluster", "area_flags", "mins", "maxs",
"first_leaf_face", "num_leaf_faces", "first_leaf_brush",
"num_leaf_brushes", "leaf_water_data_id", "padding"]
_format = "i8h4H2h"
_arrays = {"mins": [*"xyz"], "maxs": [*"xyz"]}
# classes for special lumps, in alphabetical order:
class PhysicsDisplacement(list): # LUMP 28
def __init__(self, raw_lump: bytes):
lump = io.BytesIO(raw_lump)
count = int.from_bytes(lump.read(2), "little")
data_sizes = list(*struct.unpack(f"{count}H", lump.read(count * 2)))
physics_data = list()
for size in data_sizes:
physics_data.append(lump.read(size))
super().__init__(physics_data)
def as_bytes(self) -> bytes:
count = len(self).to_bytes(2, "little")
sizes = map(lambda s: s.to_bytes(2, "little"), [len(d) for d in self])
return b"".join(count, *sizes, *self)
class StaticPropv10(base.Struct): # sprp GAME LUMP (LUMP 35)
origin: List[float] # origin.xyz
angles: List[float] # origin.yzx QAngle; Z0 = East
name_index: int # index into AME_LUMP.sprp.model_names
first_leaf: int # index into Leaf lump
num_leafs: int # number of Leafs after first_leaf this StaticPropv10 is in
solid_mode: int # collision flags enum
skin: int # index of this StaticProp's skin in the .mdl
fade_distance: List[float] # min & max distances to fade out
lighting_origin: List[float] # xyz position to sample lighting from
forced_fade_scale: float # relative to pixels used to render on-screen?
dx_level: List[int] # supported directX level, will not render depending on settings
flags: int # other flags
lightmap: List[int] # dimensions of this StaticProp's lightmap (GAME_LUMP.static prop lighting?)
__slots__ = ["origin", "angles", "name_index", "first_leaf", "num_leafs",
"solid_mode", "skin", "fade_distance", "lighting_origin",
"forced_fade_scale", "dx_level", "flags", "lightmap"]
_format = "6f3HBi6f2Hi2H"
_arrays = {"origin": [*"xyz"], "angles": [*"yzx"], "fade_distance": ["min", "max"],
"lighting_origin": [*"xyz"], "dx_level": ["min", "max"],
"lightmap": ["width", "height"]}
# {"LUMP_NAME": {version: LumpClass}}
BASIC_LUMP_CLASSES = source.BASIC_LUMP_CLASSES.copy()
LUMP_CLASSES = source.LUMP_CLASSES.copy()
LUMP_CLASSES.update({"LEAVES": {1: Leaf}})
SPECIAL_LUMP_CLASSES = source.SPECIAL_LUMP_CLASSES.copy()
GAME_LUMP_HEADER = source.GAME_LUMP_HEADER
# {"lump": {version: SpecialLumpClass}}
GAME_LUMP_CLASSES = source.GAME_LUMP_CLASSES.copy()
GAME_LUMP_CLASSES["sprp"].update({7: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10), # 7*
10: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10)})
methods = [*source.methods]
| 36.060914
| 106
| 0.657517
|
import enum
import io
import struct
from typing import List
from .. import base
from . import source
FILE_MAGIC = b"VBSP"
BSP_VERSION = 20
GAME_PATHS = ["Day of Defeat: Source",
"G String",
"Garry's Mod",
"Half-Life 2: Episode 2",
"Half-Life 2 Update",
"NEOTOKYO",
"Portal",
"Team Fortress 2"]
GAME_VERSIONS = {GAME: BSP_VERSION for GAME in GAME_PATHS}
class LUMP(enum.Enum):
ENTITIES = 0
PLANES = 1
TEXTURE_DATA = 2
VERTICES = 3
VISIBILITY = 4
NODES = 5
TEXTURE_INFO = 6
FACES = 7 # version 1
LIGHTING = 8 # version 1
OCCLUSION = 9 # version 2
LEAVES = 10 # version 1
FACE_IDS = 11 # TF2 branch, for mapping debug & detail prop seed
EDGES = 12
SURFEDGES = 13
MODELS = 14
WORLD_LIGHTS = 15
LEAF_FACES = 16
LEAF_BRUSHES = 17
BRUSHES = 18
BRUSH_SIDES = 19
AREAS = 20
AREA_PORTALS = 21
UNUSED_22 = 22
UNUSED_23 = 23
UNUSED_24 = 24
UNUSED_25 = 25
DISPLACEMENT_INFO = 26
ORIGINAL_FACES = 27
PHYSICS_DISPLACEMENT = 28
PHYSICS_COLLIDE = 29
VERTEX_NORMALS = 30
VERTEX_NORMAL_INDICES = 31
DISPLACEMENT_LIGHTMAP_ALPHAS = 32 # deprecated / X360 ?
DISPLACEMENT_VERTICES = 33
DISPLACEMENT_LIGHTMAP_SAMPLE_POSITIONS = 34
GAME_LUMP = 35
LEAF_WATER_DATA = 36
PRIMITIVES = 37
PRIMITIVE_VERTICES = 38 # deprecated / X360 ?
PRIMITIVE_INDICES = 39
PAKFILE = 40
CLIP_PORTAL_VERTICES = 41
CUBEMAPS = 42
TEXTURE_DATA_STRING_DATA = 43
TEXTURE_DATA_STRING_TABLE = 44
OVERLAYS = 45
LEAF_MIN_DIST_TO_WATER = 46
FACE_MACRO_TEXTURE_INFO = 47
DISPLACEMENT_TRIS = 48
PHYSICS_COLLIDE_SURFACE = 49 # deprecated / X360 ?
WATER_OVERLAYS = 50 # deprecated / X360 ?
LEAF_AMBIENT_INDEX_HDR = 51
LEAF_AMBIENT_INDEX = 52
LIGHTING_HDR = 53 # version 1
WORLD_LIGHTS_HDR = 54
LEAF_AMBIENT_LIGHTING_HDR = 55 # version 1
LEAF_AMBIENT_LIGHTING = 56 # version 1
XZIP_PAKFILE = 57 # deprecated / X360 ?
FACES_HDR = 58 # version 1
MAP_FLAGS = 59
OVERLAY_FADES = 60
UNUSED_61 = 61
UNUSED_62 = 62
UNUSED_63 = 63
# struct SourceBspHeader { char file_magic[4]; int version; SourceLumpHeader headers[64]; int revision; };
lump_header_address = {LUMP_ID: (8 + i * 16) for i, LUMP_ID in enumerate(LUMP)}
def read_lump_header(file, LUMP: enum.Enum) -> source.SourceLumpHeader:
file.seek(lump_header_address[LUMP])
offset, length, version, fourCC = struct.unpack("4I", file.read(16))
header = source.SourceLumpHeader(offset, length, version, fourCC)
return header
# a rough map of the relationships between lumps:
#
# /-> SurfEdge -> Edge -> Vertex
# Leaf -> Node -> Face -> Plane
# \-> DisplacementInfo -> DisplacementVertex
#
# ClipPortalVertices are AreaPortal geometry [citation neeeded]
# classes for each lump, in alphabetical order:
class Leaf(base.Struct): # LUMP 10
contents: int # contents bitflags
cluster: int # index of this Leaf's cluster (parent node?) (visibility?)
area_flags: int
mins: List[float]
maxs: List[float]
first_leaf_face: int
num_leaf_faces: int
first_leaf_brush: int
num_leaf_brushes: int
leaf_water_data_id: int
padding: int # should be empty
__slots__ = ["contents", "cluster", "area_flags", "mins", "maxs",
"first_leaf_face", "num_leaf_faces", "first_leaf_brush",
"num_leaf_brushes", "leaf_water_data_id", "padding"]
_format = "i8h4H2h"
_arrays = {"mins": [*"xyz"], "maxs": [*"xyz"]}
# classes for special lumps, in alphabetical order:
class PhysicsDisplacement(list): # LUMP 28
def __init__(self, raw_lump: bytes):
lump = io.BytesIO(raw_lump)
count = int.from_bytes(lump.read(2), "little")
data_sizes = list(*struct.unpack(f"{count}H", lump.read(count * 2)))
physics_data = list()
for size in data_sizes:
physics_data.append(lump.read(size))
super().__init__(physics_data)
def as_bytes(self) -> bytes:
count = len(self).to_bytes(2, "little")
sizes = map(lambda s: s.to_bytes(2, "little"), [len(d) for d in self])
return b"".join(count, *sizes, *self)
class StaticPropv10(base.Struct): # sprp GAME LUMP (LUMP 35)
origin: List[float] # origin.xyz
angles: List[float] # origin.yzx QAngle; Z0 = East
name_index: int # index into AME_LUMP.sprp.model_names
first_leaf: int # index into Leaf lump
num_leafs: int # number of Leafs after first_leaf this StaticPropv10 is in
solid_mode: int # collision flags enum
skin: int # index of this StaticProp's skin in the .mdl
fade_distance: List[float]
lighting_origin: List[float]
forced_fade_scale: float
dx_level: List[int]
flags: int
lightmap: List[int]
__slots__ = ["origin", "angles", "name_index", "first_leaf", "num_leafs",
"solid_mode", "skin", "fade_distance", "lighting_origin",
"forced_fade_scale", "dx_level", "flags", "lightmap"]
_format = "6f3HBi6f2Hi2H"
_arrays = {"origin": [*"xyz"], "angles": [*"yzx"], "fade_distance": ["min", "max"],
"lighting_origin": [*"xyz"], "dx_level": ["min", "max"],
"lightmap": ["width", "height"]}
# {"LUMP_NAME": {version: LumpClass}}
BASIC_LUMP_CLASSES = source.BASIC_LUMP_CLASSES.copy()
LUMP_CLASSES = source.LUMP_CLASSES.copy()
LUMP_CLASSES.update({"LEAVES": {1: Leaf}})
SPECIAL_LUMP_CLASSES = source.SPECIAL_LUMP_CLASSES.copy()
GAME_LUMP_HEADER = source.GAME_LUMP_HEADER
# {"lump": {version: SpecialLumpClass}}
GAME_LUMP_CLASSES = source.GAME_LUMP_CLASSES.copy()
GAME_LUMP_CLASSES["sprp"].update({7: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10), # 7*
10: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10)})
methods = [*source.methods]
| true
| true
|
1c4a711bd8237da25790895bd0df02e797b5f2e6
| 5,306
|
py
|
Python
|
benchmark/networks/centralized_ac.py
|
HONGcalmJIN/SMARTS
|
0e2249a3bc985ee9279512d6154ce32732065835
|
[
"MIT"
] | null | null | null |
benchmark/networks/centralized_ac.py
|
HONGcalmJIN/SMARTS
|
0e2249a3bc985ee9279512d6154ce32732065835
|
[
"MIT"
] | null | null | null |
benchmark/networks/centralized_ac.py
|
HONGcalmJIN/SMARTS
|
0e2249a3bc985ee9279512d6154ce32732065835
|
[
"MIT"
] | 1
|
2022-03-31T02:14:09.000Z
|
2022-03-31T02:14:09.000Z
|
import numpy as np
from collections import OrderedDict
from typing import List, Dict
from gym import spaces
from ray.rllib.utils.framework import try_import_tf, get_activation_fn
from ray.rllib.utils.annotations import override
from ray.rllib.utils.types import ModelConfigDict, TensorType
from ray.rllib.models import ModelCatalog
from ray.rllib.models.tf.misc import normc_initializer
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.preprocessors import get_preprocessor
import tensorflow as tf
tf1, tf, tf_version = try_import_tf()
class CentralizedActorCriticModel(TFModelV2):
CRITIC_OBS = "critic_obs"
def __init__(
self,
obs_space: spaces.Space,
action_space: spaces.Space,
num_outputs: int,
model_config: ModelConfigDict,
name: str,
):
super(CentralizedActorCriticModel, self).__init__(
obs_space, action_space, num_outputs, model_config, name
)
model_config = model_config["custom_model_config"]
self.n_agents = model_config["agent_number"]
if model_config["critic_mode"] == "mean":
self.critic_obs = spaces.Dict(
OrderedDict(
{
"own_obs": self.obs_space,
"own_act": self.action_space,
"oppo_act": self.action_space,
}
)
)
else:
self.critic_obs = spaces.Dict(
OrderedDict(
{
**{f"AGENT-{i}": self.obs_space for i in range(self.n_agents)},
**{
f"AGENT-{i}-action": self.action_space
for i in range(self.n_agents)
},
}
)
)
self.critic_preprocessor = get_preprocessor(self.critic_obs)(self.critic_obs)
self.obs_preprocessor = get_preprocessor(self.obs_space)(self.obs_space)
self.act_preprocessor = get_preprocessor(self.action_space)(self.action_space)
self.action_model = self._build_action_model(model_config["action_model"])
self.value_model = self._build_value_model(model_config["value_model"])
self.register_variables(self.action_model.variables)
self.register_variables(self.value_model.variables)
def _build_action_model(self, model_config: ModelConfigDict):
"""Build action model with model configuration
model_config = {'activation': str, 'hiddens': Sequence}
"""
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.obs_preprocessor.shape),), name="policy-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
logits_out = tf.keras.layers.Dense(
self.num_outputs,
name="logits_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [logits_out])
def _build_value_model(self, model_config: ModelConfigDict):
"""Build value model with given model configuration
model_config = {'activation': str, 'hiddens': Sequence}
"""
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.critic_preprocessor.shape),), name="value-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
value_out = tf.keras.layers.Dense(
1,
name="value_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [value_out])
@override(ModelV2)
def forward(
self,
input_dict: Dict[str, TensorType],
state: List[TensorType],
seq_lens: TensorType,
) -> (TensorType, List[TensorType]):
# obs = self.obs_preprocessor.transform(input_dict["obs"])
logits_out = self.action_model(input_dict["obs_flat"])
return logits_out, state
def central_value_function(self, critic_obs):
# Dict({obs, action})
# critic_obs = self.critic_preprocessor.transform(critic_obs)
self._value_out = self.value_model(critic_obs)
return tf.reshape(self._value_out, [-1])
@override(ModelV2)
def value_function(self) -> TensorType:
return tf.reshape(self._value_out, [-1])
ModelCatalog.register_custom_model("CAC", CentralizedActorCriticModel)
| 35.851351
| 87
| 0.613456
|
import numpy as np
from collections import OrderedDict
from typing import List, Dict
from gym import spaces
from ray.rllib.utils.framework import try_import_tf, get_activation_fn
from ray.rllib.utils.annotations import override
from ray.rllib.utils.types import ModelConfigDict, TensorType
from ray.rllib.models import ModelCatalog
from ray.rllib.models.tf.misc import normc_initializer
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.preprocessors import get_preprocessor
import tensorflow as tf
tf1, tf, tf_version = try_import_tf()
class CentralizedActorCriticModel(TFModelV2):
CRITIC_OBS = "critic_obs"
def __init__(
self,
obs_space: spaces.Space,
action_space: spaces.Space,
num_outputs: int,
model_config: ModelConfigDict,
name: str,
):
super(CentralizedActorCriticModel, self).__init__(
obs_space, action_space, num_outputs, model_config, name
)
model_config = model_config["custom_model_config"]
self.n_agents = model_config["agent_number"]
if model_config["critic_mode"] == "mean":
self.critic_obs = spaces.Dict(
OrderedDict(
{
"own_obs": self.obs_space,
"own_act": self.action_space,
"oppo_act": self.action_space,
}
)
)
else:
self.critic_obs = spaces.Dict(
OrderedDict(
{
**{f"AGENT-{i}": self.obs_space for i in range(self.n_agents)},
**{
f"AGENT-{i}-action": self.action_space
for i in range(self.n_agents)
},
}
)
)
self.critic_preprocessor = get_preprocessor(self.critic_obs)(self.critic_obs)
self.obs_preprocessor = get_preprocessor(self.obs_space)(self.obs_space)
self.act_preprocessor = get_preprocessor(self.action_space)(self.action_space)
self.action_model = self._build_action_model(model_config["action_model"])
self.value_model = self._build_value_model(model_config["value_model"])
self.register_variables(self.action_model.variables)
self.register_variables(self.value_model.variables)
def _build_action_model(self, model_config: ModelConfigDict):
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.obs_preprocessor.shape),), name="policy-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
logits_out = tf.keras.layers.Dense(
self.num_outputs,
name="logits_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [logits_out])
def _build_value_model(self, model_config: ModelConfigDict):
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.critic_preprocessor.shape),), name="value-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
value_out = tf.keras.layers.Dense(
1,
name="value_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [value_out])
@override(ModelV2)
def forward(
self,
input_dict: Dict[str, TensorType],
state: List[TensorType],
seq_lens: TensorType,
) -> (TensorType, List[TensorType]):
logits_out = self.action_model(input_dict["obs_flat"])
return logits_out, state
def central_value_function(self, critic_obs):
self._value_out = self.value_model(critic_obs)
return tf.reshape(self._value_out, [-1])
@override(ModelV2)
def value_function(self) -> TensorType:
return tf.reshape(self._value_out, [-1])
ModelCatalog.register_custom_model("CAC", CentralizedActorCriticModel)
| true
| true
|
1c4a7170b0fa8e60f08719c553b8346bac8ea857
| 525
|
py
|
Python
|
src/asymmetric_jwt_auth/migrations/0002_publickey_comment.py
|
crgwbr/asymmetric-jwt-auth
|
e4b7889a893dcc57eab20a2ed7265b6e9f44d4b9
|
[
"0BSD"
] | 18
|
2017-08-12T06:57:27.000Z
|
2022-03-17T18:55:04.000Z
|
src/asymmetric_jwt_auth/migrations/0002_publickey_comment.py
|
crgwbr/asymmetric-jwt-auth
|
e4b7889a893dcc57eab20a2ed7265b6e9f44d4b9
|
[
"0BSD"
] | 18
|
2017-03-25T04:39:23.000Z
|
2021-07-07T13:18:38.000Z
|
src/asymmetric_jwt_auth/migrations/0002_publickey_comment.py
|
crgwbr/asymmetric-jwt-auth
|
e4b7889a893dcc57eab20a2ed7265b6e9f44d4b9
|
[
"0BSD"
] | 8
|
2017-03-02T14:00:02.000Z
|
2020-03-21T08:29:17.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("asymmetric_jwt_auth", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="publickey",
name="comment",
field=models.CharField(
max_length=100, help_text="Comment describing this key", default=""
),
preserve_default=False,
),
]
| 22.826087
| 83
| 0.586667
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("asymmetric_jwt_auth", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="publickey",
name="comment",
field=models.CharField(
max_length=100, help_text="Comment describing this key", default=""
),
preserve_default=False,
),
]
| true
| true
|
1c4a7192efc324ef4cc55befd164bd83b94f86a8
| 342
|
py
|
Python
|
python/p026.py
|
Martin-Gong/euler
|
dc29cb99c0e5f9916428de624edc375d9d5b4543
|
[
"MIT"
] | null | null | null |
python/p026.py
|
Martin-Gong/euler
|
dc29cb99c0e5f9916428de624edc375d9d5b4543
|
[
"MIT"
] | null | null | null |
python/p026.py
|
Martin-Gong/euler
|
dc29cb99c0e5f9916428de624edc375d9d5b4543
|
[
"MIT"
] | null | null | null |
# 26
def getRecurLen(n):
seq = {}
rem = 1 % n
i = 1
while(rem not in seq):
seq[rem] = i
rem = (10 * rem) % n
if rem == 0:
return 0
i += 1
return len(seq) - seq[rem] + 1
rec = 0
for d in range(1, 1000):
n = getRecurLen(d)
if n > rec:
rec = d
print(rec)
| 13.68
| 34
| 0.421053
|
def getRecurLen(n):
seq = {}
rem = 1 % n
i = 1
while(rem not in seq):
seq[rem] = i
rem = (10 * rem) % n
if rem == 0:
return 0
i += 1
return len(seq) - seq[rem] + 1
rec = 0
for d in range(1, 1000):
n = getRecurLen(d)
if n > rec:
rec = d
print(rec)
| true
| true
|
1c4a71fe54c3b911852677cd9a5459e70d22fa8e
| 7,860
|
py
|
Python
|
stonesoup/types/multihypothesis.py
|
0sm1um/Stone-Soup
|
aaa895b54383e9a9b9c9f9ff746291bf60242aab
|
[
"MIT"
] | 1
|
2021-12-02T00:17:21.000Z
|
2021-12-02T00:17:21.000Z
|
stonesoup/types/multihypothesis.py
|
0sm1um/Stone-Soup
|
aaa895b54383e9a9b9c9f9ff746291bf60242aab
|
[
"MIT"
] | null | null | null |
stonesoup/types/multihypothesis.py
|
0sm1um/Stone-Soup
|
aaa895b54383e9a9b9c9f9ff746291bf60242aab
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from collections.abc import Sized, Iterable, Container
from typing import Sequence
from .detection import MissedDetection
from .numeric import Probability
from ..base import Property
from ..types import Type
from ..types.detection import Detection
from ..types.hypothesis import SingleHypothesis, CompositeHypothesis
from ..types.prediction import Prediction
class MultipleHypothesis(Type, Sized, Iterable, Container):
"""Multiple Hypothesis base type
A Multiple Hypothesis is a container to store a collection of hypotheses.
"""
single_hypotheses: Sequence[SingleHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.SingleHypothesis`. Default `None` "
"which initialises with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.SingleHypothesis`. Default "
"is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if any(not isinstance(hypothesis, SingleHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-SingleHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
# normalise the weights of 'single_hypotheses', if indicated
if self.normalise:
self.normalise_probabilities()
def __len__(self):
return self.single_hypotheses.__len__()
def __contains__(self, index):
# check if 'single_hypotheses' contains any SingleHypotheses with
# Detection matching 'index'
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return True
return False
# check if 'single_hypotheses' contains any SingleHypotheses with
# Prediction matching 'index'
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return True
return False
# check if 'single_hypotheses' contains any SingleHypotheses
# matching 'index'
if isinstance(index, SingleHypothesis):
return index in self.single_hypotheses
def __iter__(self):
for hypothesis in self.single_hypotheses:
yield hypothesis
def __getitem__(self, index):
# retrieve SingleHypothesis by array index
if isinstance(index, int):
return self.single_hypotheses[index]
# retrieve SingleHypothesis by measurement
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return hypothesis
return None
# retrieve SingleHypothesis by prediction
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return hypothesis
return None
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
# verify that SingleHypotheses composing this MultipleHypothesis
# all have Probabilities
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError("MultipleHypothesis not composed of Probability"
" hypotheses!")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability =\
(hypothesis.probability * total_weight)/sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if isinstance(hypothesis.measurement, MissedDetection):
if hasattr(hypothesis, 'probability'):
return hypothesis.probability
return None
class MultipleCompositeHypothesis(Type, Sized, Iterable, Container):
"""Multiple composite hypothesis type
A Multiple Composite Hypothesis is a container to store a collection of composite hypotheses.
Interfaces the same as MultipleHypothesis, but permits different input, hence methods are
redefined.
"""
single_hypotheses: Sequence[CompositeHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.CompositeHypothesis`. Default `None` which initialises "
"with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.CompositeHypothesis`. Default is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if not all(isinstance(hypothesis, CompositeHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-CompositeHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
# normalise the weights of 'single_hypotheses', if indicated
if self.normalise:
self.normalise_probabilities()
def __contains__(self, index):
# cannot check instance index is detection or prediction as composite hypotheses create
# their own composite detections and predictions
# check if 'single_hypotheses' contains any CompositeHypotheses matching 'index'
# use `is` as standard list __contains__ checks for equality which may not work in cases
# where hypotheses do not all share same attributes
if isinstance(index, CompositeHypothesis):
return any(index is single_hypothesis for single_hypothesis in self.single_hypotheses)
def __getitem__(self, index):
return self.single_hypotheses.__getitem__(index)
def __iter__(self):
return self.single_hypotheses.__iter__()
def __len__(self):
return self.single_hypotheses.__len__()
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
# verify that SingleHypotheses composing this MultipleHypothesis
# all have Probabilities
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError(
"MultipleHypothesis not composed of composite hypotheses with probabilities")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
# this will NOT affect the probabilities of each composite hypothesis' sub-hypotheses
for hypothesis in self.single_hypotheses:
hypothesis.probability = \
(hypothesis.probability * total_weight) / sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if hasattr(hypothesis, 'probability') and not hypothesis:
return hypothesis.probability
return None
| 38.719212
| 99
| 0.663232
|
from collections.abc import Sized, Iterable, Container
from typing import Sequence
from .detection import MissedDetection
from .numeric import Probability
from ..base import Property
from ..types import Type
from ..types.detection import Detection
from ..types.hypothesis import SingleHypothesis, CompositeHypothesis
from ..types.prediction import Prediction
class MultipleHypothesis(Type, Sized, Iterable, Container):
single_hypotheses: Sequence[SingleHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.SingleHypothesis`. Default `None` "
"which initialises with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.SingleHypothesis`. Default "
"is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if any(not isinstance(hypothesis, SingleHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-SingleHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
if self.normalise:
self.normalise_probabilities()
def __len__(self):
return self.single_hypotheses.__len__()
def __contains__(self, index):
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return True
return False
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return True
return False
if isinstance(index, SingleHypothesis):
return index in self.single_hypotheses
def __iter__(self):
for hypothesis in self.single_hypotheses:
yield hypothesis
def __getitem__(self, index):
if isinstance(index, int):
return self.single_hypotheses[index]
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return hypothesis
return None
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return hypothesis
return None
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError("MultipleHypothesis not composed of Probability"
" hypotheses!")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability =\
(hypothesis.probability * total_weight)/sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if isinstance(hypothesis.measurement, MissedDetection):
if hasattr(hypothesis, 'probability'):
return hypothesis.probability
return None
class MultipleCompositeHypothesis(Type, Sized, Iterable, Container):
single_hypotheses: Sequence[CompositeHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.CompositeHypothesis`. Default `None` which initialises "
"with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.CompositeHypothesis`. Default is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if not all(isinstance(hypothesis, CompositeHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-CompositeHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
if self.normalise:
self.normalise_probabilities()
def __contains__(self, index):
if isinstance(index, CompositeHypothesis):
return any(index is single_hypothesis for single_hypothesis in self.single_hypotheses)
def __getitem__(self, index):
return self.single_hypotheses.__getitem__(index)
def __iter__(self):
return self.single_hypotheses.__iter__()
def __len__(self):
return self.single_hypotheses.__len__()
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError(
"MultipleHypothesis not composed of composite hypotheses with probabilities")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability = \
(hypothesis.probability * total_weight) / sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if hasattr(hypothesis, 'probability') and not hypothesis:
return hypothesis.probability
return None
| true
| true
|
1c4a72bba58f35607b720e9dfaa0b6d738ef6fd0
| 26,738
|
py
|
Python
|
tests/test_invariant.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 244
|
2018-08-15T22:58:58.000Z
|
2022-03-12T16:10:39.000Z
|
tests/test_invariant.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 157
|
2018-08-29T21:36:47.000Z
|
2022-02-14T19:30:24.000Z
|
tests/test_invariant.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 23
|
2019-04-24T11:09:10.000Z
|
2022-02-14T15:56:26.000Z
|
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
# pylint: disable=unused-argument
import textwrap
import time
import unittest
from typing import Dict, Iterator, Mapping, Optional, Any, NamedTuple # pylint: disable=unused-import
import icontract
import tests.error
import tests.mock
class TestOK(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_instance_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
inst.some_method()
self.assertEqual(1000, inst.x)
def test_unbound_instance_method_with_self_as_kwarg(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
func = inst.some_method.__func__ # type: ignore
func(self=inst)
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = 1000
inst = SomeClass()
inst()
self.assertEqual(1000, inst.x)
def test_class_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@classmethod
def some_class_method(cls) -> None:
pass
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_static_method(self) -> None:
# Adapted from https://github.com/Parquery/icontract/issues/186
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
# We need this instance method for easier debugging.
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
_ = A()
def test_inherited_static_method(self) -> None:
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
# We need this instance method for easier debugging.
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
# We need to test for inheritance.
# See https://stackoverflow.com/questions/14187973/#comment74562120_37147128
class B(A):
pass
_ = B()
def test_protected_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A protected method is allowed to break the invariant.
def _some_protected_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_protected_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A protected method can not expect the invariant to hold.
def _some_protected_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_private_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A private method is allowed to break the invariant.
def __some_private_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_private_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A private method can not expect the invariant to hold.
def __some_private_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_with_empty_arguments(self) -> None: # pylint: disable=no-self-use
z = 42
@icontract.invariant(lambda: z == 42)
class A:
pass
_ = A()
def test_no_dict_pollution(self) -> None:
testSelf = self
@icontract.invariant(lambda self: self.mustHold())
class A:
def mustHold(self) -> bool:
testSelf.assertDictEqual({}, self.__dict__)
return True
_ = A()
def test_new_exempted(self) -> None:
# This test is related to the issue #167.
new_call_counter = 0
init_call_counter = 0
@icontract.invariant(lambda self: True)
class Foo:
def __new__(cls, *args, **kwargs) -> 'Foo': # type: ignore
nonlocal new_call_counter
new_call_counter += 1
return super(Foo, cls).__new__(cls) # type: ignore
def __init__(self) -> None:
nonlocal init_call_counter
init_call_counter += 1
_ = Foo()
self.assertEqual(1, new_call_counter)
self.assertEqual(1, init_call_counter)
def test_subclass_of_generic_mapping(self) -> None:
# This test is related to the issue #167.
counter = 0
def increase_counter(self: Any) -> bool:
nonlocal counter
counter += 1
return True
@icontract.invariant(increase_counter)
class Foo(Mapping[str, int]):
def __init__(self, table: Dict[str, int]) -> None:
self._table = table
def __getitem__(self, key: str) -> int:
return self._table[key]
def __iter__(self) -> Iterator[str]:
return iter(self._table)
def __len__(self) -> int:
return len(self._table)
def __str__(self) -> str:
return '{}({})'.format(self.__class__.__name__, self._table)
f = Foo({'a': 1}) # test the constructor
_ = f['a'] # test __getitem__
_ = iter(f) # test __iter__
_ = len(f) # test __len__
_ = str(f) # test __str__
# 1 invariant check after the constructor +
# 4 checks before the methods +
# 4 checks after the methods.
self.assertEqual(9, counter)
class TestViolation(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self, x: int) -> None:
self.x = x
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
_ = SomeClass(x=1)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = SomeClass(x=0)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was 0"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_as_precondition(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.x = -1
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_first_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_last_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x < 10:
self was an instance of SomeClass
self.x was 100"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_after_pre(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.require(lambda y: y > 0)
def some_method(self, y: int) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method(y=-1)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
y > 0:
self was an instance of SomeClass
y was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
violation_error = None
try:
inst = SomeClass()
inst.some_method(y=100)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_ok_but_post_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = 10
return -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
result > 0:
result was -1
self was an instance of SomeClass"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_but_post_ok(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = -1
return 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_with_empty_arguments(self) -> None:
z = 42
@icontract.invariant(lambda: z != 42)
class A:
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = A()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
z != 42:
self was an instance of A
z was 42"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function(self) -> None:
def some_condition(self: 'A') -> bool:
return self.x > 0
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
# Valid call
a = A()
# Invalid call
violation_error = None # type: Optional[icontract.ViolationError]
try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function_with_default_argument_value(self) -> None:
def some_condition(self: 'A', y: int = 0) -> bool:
return self.x > y
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
# Valid call
a = A()
# Invalid call
violation_error = None # type: Optional[icontract.ViolationError]
try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
class TestProperty(unittest.TestCase):
def test_property_getter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
self.toggled = True
return 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_setter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.setter
def some_prop(self, value: int) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None # type: Optional[icontract.ViolationError]
try:
some_inst.some_prop = 0
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_deleter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.deleter
def some_prop(self) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None # type: Optional[icontract.ViolationError]
try:
del some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
class TestError(unittest.TestCase):
def test_as_type(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=ValueError)
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of A
self.x was 0"""), tests.error.wo_mandatory_location(str(value_error)))
def test_as_function(self) -> None:
@icontract.invariant(
lambda self: self.x > 0, error=lambda self: ValueError("x must be positive, but got: {}".format(self.x)))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive, but got: 0', str(value_error))
def test_as_function_with_empty_args(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=lambda: ValueError("x must be positive"))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive', str(value_error))
class TestToggling(unittest.TestCase):
def test_disabled(self) -> None:
@icontract.invariant(lambda self: self.x > 0, enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = -1
inst = SomeClass()
self.assertEqual(-1, inst.x)
class TestBenchmark(unittest.TestCase):
@unittest.skip("Skipped the benchmark, execute manually on a prepared benchmark machine.")
def test_benchmark_when_disabled(self) -> None:
def some_long_condition() -> bool:
time.sleep(5)
return True
@icontract.invariant(lambda self: some_long_condition(), enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = 100
class AnotherClass:
def __init__(self) -> None:
self.x = 100
start = time.time()
_ = SomeClass()
duration_with_inv = time.time() - start
start = time.time()
_ = AnotherClass()
duration_wo_inv = time.time() - start
self.assertLess(duration_with_inv / duration_wo_inv, 1.2)
class TestInvalid(unittest.TestCase):
def test_with_invalid_arguments(self) -> None:
val_err = None # type: Optional[ValueError]
try:
@icontract.invariant(lambda self, z: self.x > z)
class _:
def __init__(self) -> None:
self.x = 100
except ValueError as err:
val_err = err
self.assertIsNotNone(val_err)
self.assertEqual("Expected an invariant condition with at most an argument 'self', but got: ['self', 'z']",
str(val_err))
def test_no_boolyness(self) -> None:
@icontract.invariant(lambda self: tests.mock.NumpyArray([True, False]))
class A:
def __init__(self) -> None:
pass
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertEqual('Failed to negate the evaluation of the condition.',
tests.error.wo_mandatory_location(str(value_error)))
if __name__ == '__main__':
unittest.main()
| 32.214458
| 117
| 0.559803
|
import textwrap
import time
import unittest
from typing import Dict, Iterator, Mapping, Optional, Any, NamedTuple
import icontract
import tests.error
import tests.mock
class TestOK(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_instance_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
inst.some_method()
self.assertEqual(1000, inst.x)
def test_unbound_instance_method_with_self_as_kwarg(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
func = inst.some_method.__func__
func(self=inst)
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = 1000
inst = SomeClass()
inst()
self.assertEqual(1000, inst.x)
def test_class_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@classmethod
def some_class_method(cls) -> None:
pass
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_static_method(self) -> None:
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
_ = A()
def test_inherited_static_method(self) -> None:
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
pass
_ = B()
def test_protected_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def _some_protected_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_protected_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def _some_protected_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_private_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __some_private_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_private_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __some_private_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_with_empty_arguments(self) -> None:
z = 42
@icontract.invariant(lambda: z == 42)
class A:
pass
_ = A()
def test_no_dict_pollution(self) -> None:
testSelf = self
@icontract.invariant(lambda self: self.mustHold())
class A:
def mustHold(self) -> bool:
testSelf.assertDictEqual({}, self.__dict__)
return True
_ = A()
def test_new_exempted(self) -> None:
new_call_counter = 0
init_call_counter = 0
@icontract.invariant(lambda self: True)
class Foo:
def __new__(cls, *args, **kwargs) -> 'Foo':
nonlocal new_call_counter
new_call_counter += 1
return super(Foo, cls).__new__(cls)
def __init__(self) -> None:
nonlocal init_call_counter
init_call_counter += 1
_ = Foo()
self.assertEqual(1, new_call_counter)
self.assertEqual(1, init_call_counter)
def test_subclass_of_generic_mapping(self) -> None:
counter = 0
def increase_counter(self: Any) -> bool:
nonlocal counter
counter += 1
return True
@icontract.invariant(increase_counter)
class Foo(Mapping[str, int]):
def __init__(self, table: Dict[str, int]) -> None:
self._table = table
def __getitem__(self, key: str) -> int:
return self._table[key]
def __iter__(self) -> Iterator[str]:
return iter(self._table)
def __len__(self) -> int:
return len(self._table)
def __str__(self) -> str:
return '{}({})'.format(self.__class__.__name__, self._table)
f = Foo({'a': 1})
_ = f['a']
_ = iter(f)
_ = len(f)
_ = str(f)
self.assertEqual(9, counter)
class TestViolation(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self, x: int) -> None:
self.x = x
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
_ = SomeClass(x=1)
violation_error = None
try:
_ = SomeClass(x=0)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was 0"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_as_precondition(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
inst = SomeClass()
inst.x = -1
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
inst = SomeClass()
inst()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_first_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_last_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x < 10:
self was an instance of SomeClass
self.x was 100"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_after_pre(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.require(lambda y: y > 0)
def some_method(self, y: int) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
inst = SomeClass()
inst.some_method(y=-1)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
y > 0:
self was an instance of SomeClass
y was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
violation_error = None
try:
inst = SomeClass()
inst.some_method(y=100)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_ok_but_post_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = 10
return -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
result > 0:
result was -1
self was an instance of SomeClass"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_but_post_ok(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = -1
return 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_with_empty_arguments(self) -> None:
z = 42
@icontract.invariant(lambda: z != 42)
class A:
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None
try:
_ = A()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
z != 42:
self was an instance of A
z was 42"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function(self) -> None:
def some_condition(self: 'A') -> bool:
return self.x > 0
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
a = A()
violation_error = None
try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function_with_default_argument_value(self) -> None:
def some_condition(self: 'A', y: int = 0) -> bool:
return self.x > y
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
a = A()
violation_error = None
try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
class TestProperty(unittest.TestCase):
def test_property_getter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
self.toggled = True
return 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None
try:
_ = some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_setter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.setter
def some_prop(self, value: int) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None
try:
some_inst.some_prop = 0
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_deleter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.deleter
def some_prop(self) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None
try:
del some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
class TestError(unittest.TestCase):
def test_as_type(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=ValueError)
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of A
self.x was 0"""), tests.error.wo_mandatory_location(str(value_error)))
def test_as_function(self) -> None:
@icontract.invariant(
lambda self: self.x > 0, error=lambda self: ValueError("x must be positive, but got: {}".format(self.x)))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive, but got: 0', str(value_error))
def test_as_function_with_empty_args(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=lambda: ValueError("x must be positive"))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive', str(value_error))
class TestToggling(unittest.TestCase):
def test_disabled(self) -> None:
@icontract.invariant(lambda self: self.x > 0, enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = -1
inst = SomeClass()
self.assertEqual(-1, inst.x)
class TestBenchmark(unittest.TestCase):
@unittest.skip("Skipped the benchmark, execute manually on a prepared benchmark machine.")
def test_benchmark_when_disabled(self) -> None:
def some_long_condition() -> bool:
time.sleep(5)
return True
@icontract.invariant(lambda self: some_long_condition(), enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = 100
class AnotherClass:
def __init__(self) -> None:
self.x = 100
start = time.time()
_ = SomeClass()
duration_with_inv = time.time() - start
start = time.time()
_ = AnotherClass()
duration_wo_inv = time.time() - start
self.assertLess(duration_with_inv / duration_wo_inv, 1.2)
class TestInvalid(unittest.TestCase):
def test_with_invalid_arguments(self) -> None:
val_err = None
try:
@icontract.invariant(lambda self, z: self.x > z)
class _:
def __init__(self) -> None:
self.x = 100
except ValueError as err:
val_err = err
self.assertIsNotNone(val_err)
self.assertEqual("Expected an invariant condition with at most an argument 'self', but got: ['self', 'z']",
str(val_err))
def test_no_boolyness(self) -> None:
@icontract.invariant(lambda self: tests.mock.NumpyArray([True, False]))
class A:
def __init__(self) -> None:
pass
value_error = None
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertEqual('Failed to negate the evaluation of the condition.',
tests.error.wo_mandatory_location(str(value_error)))
if __name__ == '__main__':
unittest.main()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.