id
stringlengths
1
265
text
stringlengths
6
5.19M
dataset_id
stringclasses
7 values
/BlueWhale3-3.31.3.tar.gz/BlueWhale3-3.31.3/Orange/preprocess/remove.py
from collections import namedtuple import numpy as np from Orange.data import Domain, DiscreteVariable from Orange.preprocess.transformation import Lookup from Orange.statistics.util import nanunique from .preprocess import Preprocess __all__ = ["Remove"] class Remove(Preprocess): """ Construct a preprocessor for removing constant features/classes and unused values. Given a data table, preprocessor returns a new table and a list of results. In the new table, the constant features/classes and unused values are removed. The list of results consists of two dictionaries. The first one contains numbers of 'removed', 'reduced' and 'sorted' features. The second one contains numbers of 'removed', 'reduced' and 'sorted' features. Parameters ---------- attr_flags : int (default: 0) If SortValues, values of discrete attributes are sorted. If RemoveConstant, unused attributes are removed. If RemoveUnusedValues, unused values are removed from discrete attributes. It is possible to merge operations in one by summing several types. class_flags: int (default: 0) If SortValues, values of discrete class attributes are sorted. If RemoveConstant, unused class attributes are removed. If RemoveUnusedValues, unused values are removed from discrete class attributes. It is possible to merge operations in one by summing several types. Examples -------- >>> from Orange.data import Table >>> from Orange.preprocess import Remove >>> data = Table("zoo")[:10] >>> flags = sum([Remove.SortValues, Remove.RemoveConstant, Remove.RemoveUnusedValues]) >>> remover = Remove(attr_flags=flags, class_flags=flags) >>> new_data = remover(data) >>> attr_results, class_results = remover.attr_results, remover.class_results """ SortValues, RemoveConstant, RemoveUnusedValues = 1, 2, 4 def __init__(self, attr_flags=0, class_flags=0, meta_flags=0): self.attr_flags = attr_flags self.class_flags = class_flags self.meta_flags = meta_flags self.attr_results = None self.class_results = None self.meta_results = None def __call__(self, data): """ Removes unused features or classes from the given data. Returns a new data table. Parameters ---------- data : Orange.data.Table A data table to remove features or classes from. Returns ------- data : Orange.data.Table New data table. """ if data is None: return None domain = data.domain attrs_state = [purge_var_M(var, data, self.attr_flags) for var in domain.attributes] class_state = [purge_var_M(var, data, self.class_flags) for var in domain.class_vars] metas_state = [purge_var_M(var, data, self.meta_flags) for var in domain.metas] att_vars, self.attr_results = self.get_vars_and_results(attrs_state) cls_vars, self.class_results = self.get_vars_and_results(class_state) meta_vars, self.meta_results = self.get_vars_and_results(metas_state) domain = Domain(att_vars, cls_vars, meta_vars) return data.transform(domain) def get_vars_and_results(self, state): removed, reduced, sorted = 0, 0, 0 vars = [] for st in state: removed += is_removed(st) reduced += not is_removed(st) and is_reduced(st) sorted += not is_removed(st) and is_sorted(st) if not is_removed(st): vars.append(merge_transforms(st).var) res = {'removed': removed, 'reduced': reduced, 'sorted': sorted} return vars, res # Define a simple Purge expression 'language'. #: A input variable (leaf expression). Var = namedtuple("Var", ["var"]) #: Removed variable (can only ever be present as a root node). Removed = namedtuple("Removed", ["sub", "var"]) #: A reduced variable Reduced = namedtuple("Reduced", ["sub", "var"]) #: A sorted variable Sorted = namedtuple("Sorted", ["sub", "var"]) #: A general (lookup) transformed variable. #: (this node is returned as a result of `merge` which joins consecutive #: Removed/Reduced nodes into a single Transformed node) Transformed = namedtuple("Transformed", ["sub", "var"]) def is_var(exp): """Is `exp` a `Var` node.""" return isinstance(exp, Var) def is_removed(exp): """Is `exp` a `Removed` node.""" return isinstance(exp, Removed) def _contains(exp, cls): """Does `node` contain a sub node of type `cls`""" if isinstance(exp, cls): return True elif isinstance(exp, Var): return False else: return _contains(exp.sub, cls) def is_reduced(exp): """Does `exp` contain a `Reduced` node.""" return _contains(exp, Reduced) def is_sorted(exp): """Does `exp` contain a `Reduced` node.""" return _contains(exp, Sorted) def merge_transforms(exp): """ Merge consecutive Removed, Reduced or Transformed nodes. .. note:: Removed nodes are returned unchanged. """ if isinstance(exp, (Var, Removed)): return exp elif isinstance(exp, (Reduced, Sorted, Transformed)): prev = merge_transforms(exp.sub) if isinstance(prev, (Reduced, Sorted, Transformed)): B = exp.var.compute_value assert isinstance(B, Lookup) A = B.variable.compute_value assert isinstance(A, Lookup) new_var = DiscreteVariable( exp.var.name, values=exp.var.values, compute_value=merge_lookup(A, B), sparse=exp.var.sparse, ) assert isinstance(prev.sub, Var) return Transformed(prev.sub, new_var) else: assert prev is exp.sub return exp else: raise TypeError def purge_var_M(var, data, flags): state = Var(var) if flags & Remove.RemoveConstant: var = remove_constant(state.var, data) if var is None: return Removed(state, state.var) if state.var.is_discrete: if flags & Remove.RemoveUnusedValues: newattr = remove_unused_values(state.var, data) if newattr is not state.var: state = Reduced(state, newattr) if flags & Remove.RemoveConstant and len(state.var.values) < 2: return Removed(state, state.var) if flags & Remove.SortValues: newattr = sort_var_values(state.var) if newattr is not state.var: state = Sorted(state, newattr) return state def has_at_least_two_values(data, var): ((dist, unknowns),) = data._compute_distributions([var]) # TODO this check is suboptimal for sparse since get_column_view # densifies the data. Should be irrelevant after Pandas. _, sparse = data.get_column_view(var) if var.is_continuous: dist = dist[1, :] min_size = 0 if sparse and unknowns else 1 return np.sum(dist > 0.0) > min_size def remove_constant(var, data): if var.is_continuous: if not has_at_least_two_values(data, var): return None else: return var elif var.is_discrete: if len(var.values) < 2: return None else: return var else: return var def remove_unused_values(var, data): unique = nanunique(data.get_column_view(var)[0].astype(float)).astype(int) if len(unique) == len(var.values): return var used_values = [var.values[i] for i in unique] translation_table = np.array([np.NaN] * len(var.values)) translation_table[unique] = range(len(used_values)) return DiscreteVariable(var.name, values=used_values, sparse=var.sparse, compute_value=Lookup(var, translation_table)) def sort_var_values(var): newvalues = list(sorted(var.values)) if newvalues == list(var.values): return var translation_table = np.array( [float(newvalues.index(value)) for value in var.values] ) return DiscreteVariable(var.name, values=newvalues, compute_value=Lookup(var, translation_table), sparse=var.sparse) def merge_lookup(A, B): """ Merge two consecutive Lookup transforms into one. """ lookup_table = np.array(A.lookup_table) mask = np.isfinite(lookup_table) indices = np.array(lookup_table[mask], dtype=int) lookup_table[mask] = B.lookup_table[indices] return Lookup(A.variable, lookup_table)
PypiClean
/Gbtestapi0.1-0.1a10-py3-none-any.whl/gailbot/core/engines/exception.py
class ConnectionError(Exception): def __str__(self) -> str: return "ERROR 404: STT Connection Error" class TranscriptionError(Exception): def __init__(self, error: str = None) -> None: super().__init__(error) self.error = error def __str__(self) -> str: return f"ERROR 500: Transcription error: {self.error}" class APIKeyError(Exception): def __init__(self, *args: object) -> None: super().__init__(*args) def __str__(self) -> str: return "ERROR 508: API key error" class AudioFileError(Exception): def __init__(self, *args: object) -> None: super().__init__(*args) def __str__(self) -> str: return "ERROR 510: Not a valid audio file" class ModelCreateError(Exception): def __init__(self, *args: object) -> None: super().__init__(*args) def __str__(self) -> str: return "ERROR 511: Model creation error" class WatsonMethodExecutionError(Exception): def __init__(self, *args: object) -> None: super().__init__(*args) def __str__(self) -> str: return "ERROR 512: Watson method execution error" class OutPutError(Exception): def __init__(self, *args: object) -> None: super().__init__(*args) def __str__(self) -> str: return "ERROR 520: Error writing output" class GetUttResultError(Exception): def __init__(self, *args: object) -> None: super().__init__(*args) def __str__(self) -> str: return "ERROR 521: Failed to get utterance result" from dataclasses import dataclass @dataclass class ERROR: CONNECTION_ERROR = "ERROR 404: No internet connection" GOOGLE_TRANSCRIPTION_FAILED = "ERROR 501: Google STT transcription failed" WATSON_TRANSCRIPTION_FAILED = "ERROR 502: Watson STT transcription failed" WHISPER_TRANSCRIPTION_FAILED = "ERROR 503: Whisper STT transcription failed" AUDIO_COMPRESSION_FAILED = ( "ERROR 505: Failed to compress large audio file to opus format" ) CHILD_PROCESS_STOPPED = "ERROR 531: Child process stopped" CHILD_PROCESS_ERROR = "ERROR 532: Child process error" CHILD_PROCESS_NOT_FOUND = "ERROR 533: Child process not found"
PypiClean
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/standard/plugins/a11yhelp/dialogs/lang/sr-latn.js
/* Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ CKEDITOR.plugins.setLang("a11yhelp","sr-latn",{title:"Uputstva za pomoć",contents:"Sadržaji za pomoć. Da bi ste zatvorili diјalog pritisnite ESC.",legend:[{name:"Opšte",items:[{name:"Alatke za uređivanje",legend:"Pritisnite ${toolbarFocus} da bi označili alatke. Do sledeće i prethodne grupe alatki možete doći sa tasterom TAB i SHIFT+TAB. Do tastera sledeće i predthodne grupe alatki možete doći sa tasterima STRELICA LEVO i STRELICA DESNO. Pritisnite SPACE ili ENTER da bi aktivirali taster alatki."}, {name:"Uređivač dijaloga",legend:"U prozoru dijalog pritisnite TAB da bi došli do sledećeg polja dijaloga, pritisnite ENTER za prihvatanje dijaloga, pritisnite ESC za odbijanje dijaloga. Kada dijalog ima više kartica, do njih možete doći pritiskom na ALT + F10 ili TAB. Zatim sa TAB ili STRELICA DESNO dolazite do naredne kartice."},{name:"Uređivač lokalnog menija",legend:"Pritisnite ${contextMenu} ili APPLICATION TASTER za otvaranje lokalnog menija. Zatim sa TAB ili STRELICA DOLE možete preći na sledeću tačku menija. Prethodnu opciju možete postići sa SHIFT+TAB ili STRELICA GORE. Pritisnite SPACE ili ENTER za odabir tačke menija. Pritisnite SPACE ili ENTER da bi ste otvorili podmeni trenutne stavke menija. Za povratak u glavni meni pritisnite ESC ili STRELICA DESNO. Zatvorite lokalni meni pomoću tastera ESC."}, {name:"Uređjivač liste",legend:"Do sledećеg elementa liste možete doći sa TAB ili STERLICA DOLE. Za odabir prethodnog elementa pritisnite SHIFT+TAB ili STREKICA DOLE. Za odabir elementa pritisnite SPACE ili ENTER. Sa pritiskom ESC zatvarate listu. "},{name:"Uredjivač trake puta elemenata",legend:"Pritisnite $ {elementsPathFocus} da bi ste označili traku puta elenementa. Do sledećеg elementa možete doći sa TAB ili STRELICA DESNO. Do prethodnоg dolazite sa SHIFT+TAB ili STRELICA DESNO. Sa SPACE ili ENTER možete odbrati element u uredjivaču."}]}, {name:"Komanda",items:[{name:"Otkaži komandu",legend:"Pritisni ${undo}"},{name:"Prepoznavanje komande",legend:"Pritisni ${redo}"},{name:"Podebljana komanda",legend:"Pritisni ${bold}"},{name:"Kurziv komanda",legend:"Pritisni ${italic}"},{name:"Precrtana komanda",legend:"Pritisni ${underline}"},{name:"Link komanda",legend:"Pritisni ${link}"},{name:"Zatvori traku uredjivača komanda ",legend:"Pritisni ${toolbarCollapse}"},{name:"Pristup prethodnom fokus mestu komanda ",legend:"Pritisni ${accessNextSpace} da bi pristupio najbližem nedostupnom fokus mestu pre znaka hiányjel, na primer: dva susedna HR elementa.Ponovi kombinaciju tastera da pronadješ fokus mesto koje se nalazi dalje."}, {name:"Pristup sledećem fokus mestu komanda ",legend:"Pritisni ${accessNextSpace} da bi pristupio najbližem nedostupnom fokus mestu posle znaka hiányjel, na primer: dva susedna HR elementa.Ponovi kombinaciju tastera da pronadješ fokus mesto koje se nalazi dalje."},{name:"Pomoć pristupačnosti",legend:"Pritisni ${a11yHelp}"},{name:"Nalepi kao običan tekst",legend:"Pritisnite: ${pastetext}",legendEdge:"Pritisnite ${pastetext}-t, zatim ${paste}-t"}]}],tab:"Tab",pause:"Pause",capslock:"Caps Lock",escape:"Escape", pageUp:"Page Up",pageDown:"Page Down",leftArrow:"Strelica levo",upArrow:"strelica gore",rightArrow:"strelica desno",downArrow:"strelica dole",insert:"Insert",leftWindowKey:"levi Windows-taster",rightWindowKey:"desni Windows-taster",selectKey:"Odabir tastera",numpad0:"Tasteri sa brojevima 0",numpad1:"Tasteri sa brojevima 1",numpad2:"Tasteri sa brojevima 2",numpad3:"Tasteri sa brojevima 3",numpad4:"Tasteri sa brojevima 4",numpad5:"Tasteri sa brojevima 5",numpad6:"Tasteri sa brojevima 6",numpad7:"Tasteri sa brojevima 7", numpad8:"Tasteri sa brojevima 8",numpad9:"Tasteri sa brojevima 9",multiply:"Množenje",add:"Sabiranje",subtract:"Oduzimanje",decimalPoint:"Decimalna tačka",divide:"Deljenjje",f1:"F1",f2:"F2",f3:"F3",f4:"F4",f5:"F5",f6:"F6",f7:"F7",f8:"F8",f9:"F9",f10:"F10",f11:"F11",f12:"F12",numLock:"Num Lock",scrollLock:"Scroll Lock",semiColon:"Tačka zarez",equalSign:"Znak jednakosti",comma:"Zarez",dash:"Crtica",period:"Tačka",forwardSlash:"Kosa crta",graveAccent:"Obrnuti znak akcenta",openBracket:"Otvorena čoškasta zagrada", backSlash:"Obrnuta kosa crta",closeBracket:"Zatvorena ćoškasta zagrada",singleQuote:"Simpli znak navoda"});
PypiClean
/MakeItSo-0.6.1.tar.gz/MakeItSo-0.6.1/makeitso/python.py
# TODO: console_scripts for all of these import os import shutil import string import sys from cli import MakeItSoCLI from makeitso import ContentTemplate from optparse import OptionParser from template import MakeItSoTemplate from template import Variable class SetupPy(MakeItSoTemplate): """template for setup.py""" templates = [('python_package', 'setup.py')] class Unittest(MakeItSoTemplate): """template for python unittest""" templates = [('python_package', 'tests', 'test_{{package}}.py')] def pre(self, variables, output): if output and 'package' not in variables: package = os.path.splitext(os.path.basename(output.rstrip(os.path.sep)))[0] indicator = 'test_' if package.startswith(indicator): package = package[len(indicator):] variables['package'] = package class PythonTemplate(MakeItSoTemplate): """abstract base class for python-type templates""" vars = [Variable('description'), Variable('author', 'author of the package'), Variable('email', "author's email"), Variable('url', 'project url'), Variable('repo', 'project repository'), ] def output2name(self, path): return os.path.splitext(os.path.basename(path.rstrip(os.path.sep)))[0] class PythonScriptTemplate(PythonTemplate): """template for a single python script""" templates = [('python_package', '{{package}}', '{{main}}.py')] vars = [Variable('description')] class PythonModuleTemplate(PythonTemplate): """single module python package""" templates = ['python_module', ('python_package', '{{package}}', '{{main}}.py')] vars = [Variable('description')] look = False def pre(self, variables, output): variables['project'] = variables['module'] = variables['main'] = self.output2name(output) def post(self, variables, output): shutil.move(os.path.join(output, '{{main}}.py'), os.path.join(output, '{}.py'.format(variables['main']))) class PythonPackageTemplate(PythonTemplate): """ python package template """ # TODO: get the templates you actually care about [maybe from the CLI?] name = 'python-package' templates = ['python_package'] vars = [Variable('description'), Variable('author', 'author of the package'), Variable('email', "author's email"), Variable('url', 'project url'), Variable('repo', 'project repository'), ] look = False # things that go in setup.py dependencies = {'web.py': ['webob'], 'template.py': ['MakeItSo']} console_scripts = {'main.py': '{{project}} = {{package}}.{{main}}:main', 'template.py': '{{package}}-template = {{package}}.template:main' } def pre(self, variables, output): """ sanitize some variables """ # get project from output directory variables['project'] = self.output2name(output) # get package name from project allowable = set(string.letters + string.digits + '_') package = variables['project'].lower() package = ''.join([c for c in package if c in allowable]) # TODO better: translation matrix variables['package'] = package # name of CLI main file variables['main'] = 'main' # package dependencies dependencies = set([]) for template, dependency in self.dependencies.items(): dependencies.update(dependency) variables['dependencies'] = list(dependencies) # console_scripts console_scripts = [] for template, console_script in self.console_scripts.items(): console_scripts.append(console_script) if console_scripts: s = 'setup(' # placeholder string script_strings = ['[console_scripts]'] for console_script in console_scripts: template = ContentTemplate(console_script) output = template.substitute(project=variables['project'], package=variables['package'], main=variables['main']) script_strings.append(output) variables['console_scripts'] = '\n'.join([' ' * len(s) + i for i in script_strings]) else: variables['console_scripts'] = '' class PythonPackageCLI(MakeItSoCLI): """ CLI front end for the python package template """ usage = '%prog [options] project' def main(args=sys.argv[1:]): """CLI""" cli = PythonPackageCLI(PythonPackageTemplate) cli(*args) if __name__ == '__main__': main()
PypiClean
/d2lvalence-1.2.2.tar.gz/d2lvalence-1.2.2/README.rst
====================================== Desire2Learn Client Library for Python ====================================== The Python library divides functionality into a number of modules. The primary module helps with authentication. Several supporting modules can assist with making calls in several areas of the Valence API. **Authentication**. The :py:mod:`d2lvalence.auth` module provides assistance for the authentication needed to invoke Valence APIs. You use the module's functions (and perhaps also classes) to create a :py:class:`calling user context <d2lvalence.auth.D2LUserContext>` object that you can then employ in conjunction with the Requests package as an authentication helper. Installation ============ You can find the source for our Python client library SDK in two locations: * Our own `Python client GitHub repository <https://github.com/Desire2Learn-Valence/valence-sdk-python>`_. * On the PyPi `package index repository <http://pypi.python.org/pypi/D2LValence>`_, so you can install it as a package with pip or easy_install. **Dependencies**. In order to use the Python client library SDK, you'll need to first ensure you have a working Python development environment: * Python 3 (the reference environment uses Python 3.5), or Python 2.7 (via the use of the future library). * The `Requests Python package <http://docs.python-requests.org/en/latest/index.html>`_ gets included in our :py:mod:`auth <d2lvalence.auth>` module so that you can use a calling user context object as an authentication helper for Requests. * The `python-future <http://python-future.org/index.html>`_ library gets used to provide Python 2.7 compatibility. * The `Bottle Python package <http://bottlepy.org/docs/dev/>`_ if you want to use the samples available in conjunction with this client library (not a dependency for the client library itself).
PypiClean
/INGInious-0.8.7.tar.gz/INGInious-0.8.7/inginious/frontend/installer.py
import hashlib import os import tarfile import tempfile import re import urllib.request from binascii import hexlify import docker from docker.errors import BuildError from gridfs import GridFS from pymongo import MongoClient from inginious import __version__ import inginious.common.custom_yaml as yaml from inginious.frontend.user_manager import UserManager HEADER = '\033[95m' INFO = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[33m' WHITE = '\033[97m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' DOC = '\033[39m' BACKGROUND_RED = '\033[101m' class Installer: """ Custom installer for the WebApp frontend """ def __init__(self, config_path=None): self._config_path = config_path ####################################### # Display functions # ####################################### def _display_header(self, title): """ Displays an header in the console """ print("") print(BOLD + HEADER + "--- " + title + " ---" + ENDC) def _display_warning(self, content): """ Displays a warning in the console """ print(WARNING + "(WARN) " + content + ENDC) def _display_info(self, content): """ Displays an info message in the console """ print(INFO + "(INFO) " + content + ENDC) def _display_question(self, content): """ Displays a preamble to a question """ print(DOC + content + ENDC) def _display_error(self, content): """ Displays an error """ print(WHITE + BACKGROUND_RED + "(ERROR) " + content + ENDC) def _display_big_warning(self, content): """ Displays a BIG warning """ print("") print(BOLD + WARNING + "--- WARNING ---" + ENDC) print(WARNING + content + ENDC) print("") def _ask_with_default(self, question, default=""): default = str(default) answer = input(DOC + UNDERLINE + question + " [" + default + "]:" + ENDC + " ") if answer == "": answer = default return answer def _ask_boolean(self, question, default): while True: val = self._ask_with_default(question, ("yes" if default else "no")).lower() if val in ["yes", "y", "1", "true", "t"]: return True elif val in ["no", "n", "0", "false", "f"]: return False self._display_question("Please answer 'yes' or 'no'.") def _ask_integer(self, question, default): while True: try: return int(self._ask_with_default(question, default)) except: pass def _configure_directory(self, dirtype: str): """Configure user specified directory and create it if required""" self._display_question("Please choose a directory in which to store the %s files." % dirtype) directory = None while directory is None: directory = self._ask_with_default("%s directory" % (dirtype[0].upper()+dirtype[1:]), "./%s" % dirtype) if not os.path.exists(directory): if self._ask_boolean("Path does not exist. Create directory?", True): try: os.makedirs(directory) except FileExistsError: pass # We should never reach this part since the path is verified above except PermissionError: self._display_error("Permission denied. Are you sure of your path?\nIf yes, contact your system administrator" " or create manually the directory with the correct user permissions.\nOtherwise, you may" " enter a new path now.") directory = None else: directory = None return os.path.abspath(directory) ####################################### # Main function # ####################################### def run(self): """ Run the installator """ self._display_header("BACKEND CONFIGURATION") options = {} while True: options = {} backend = self.ask_backend() if backend == "local": self._display_info("Backend chosen: local. Testing the configuration.") options = self._ask_local_config() if not self.test_local_docker_conf(): self._display_error( "An error occurred while testing the configuration. Please make sure you are able do run `docker info` in " "your command line, and environment parameters like DOCKER_HOST are correctly set.") if self._ask_boolean("Would you like to continue anyway?", False): break else: break else: self._display_warning( "Backend chosen: manual. As it is a really advanced feature, you will have to configure it yourself in " "the configuration file, at the end of the setup process.") options = {"backend": backend} break self._display_header("MONGODB CONFIGURATION") mongo_opt = self.configure_mongodb() options.update(mongo_opt) self._display_header("TASK DIRECTORY") task_directory_opt = self.configure_task_directory() options.update(task_directory_opt) self._display_header("CONTAINERS") self.select_containers_to_build() self._display_header("MISC") misc_opt = self.configure_misc() options.update(misc_opt) database = self.try_mongodb_opts(options["mongo_opt"]["host"], options["mongo_opt"]["database"]) self._display_header("BACKUP DIRECTORY") backup_directory_opt = self.configure_backup_directory() options.update(backup_directory_opt) self._display_header("AUTHENTIFICATION") auth_opts = self.configure_authentication(database) options.update(auth_opts) self._display_info("You may want to add additional plugins to the configuration file.") self._display_header("REMOTE DEBUGGING - IN BROWSER") self._display_info( "If you want to activate the remote debugging of task in the users' browser, you have to install separately " "INGInious-xterm, which is available on Github, according to the parameters you have given for the hostname and the " "port range given in the configuration of the remote debugging.") self._display_info( "You can leave the following question empty to disable this feature; remote debugging will still be available, " "but not in the browser.") webterm = self._ask_with_default( "Please indicate the link to your installation of INGInious-xterm (for example: " "https://your-hostname.com:8080).", "") if webterm != "": options["webterm"] = webterm self._display_header("END") file_dir = self._config_path or os.path.join(os.getcwd(), self.configuration_filename()) try: yaml.dump(options, open(file_dir, "w")) self._display_info("Successfully written the configuration file") except: self._display_error("Cannot write the configuration file on disk. Here is the content of the file") print(yaml.dump(options)) ####################################### # Docker configuration # ####################################### def _ask_local_config(self): """ Ask some parameters about the local configuration """ options = {"backend": "local", "local-config": {}} # Concurrency while True: concurrency = self._ask_with_default( "Maximum concurrency (number of tasks running simultaneously). Leave it empty to use the number of " "CPU of your host.", "") if concurrency == "": break try: concurrency = int(concurrency) except: self._display_error("Invalid number") continue if concurrency <= 0: self._display_error("Invalid number") continue options["local-config"]["concurrency"] = concurrency break # Debug hostname hostname = self._ask_with_default( "What is the external hostname/address of your machine? You can leave this empty and let INGInious " "autodetect it.", "") if hostname != "": options["local-config"]["debug_host"] = hostname self._display_info( "You can now enter the port range for the remote debugging feature of INGInious. Please verify that these " "ports are open in your firewall. You can leave this parameters empty, the default is 64100-64200") # Debug port range port_range = None while True: start_port = self._ask_with_default("Beginning of the range", "") if start_port != "": try: start_port = int(start_port) except: self._display_error("Invalid number") continue end_port = self._ask_with_default("End of the range", str(start_port + 100)) try: end_port = int(end_port) except: self._display_error("Invalid number") continue if start_port > end_port: self._display_error("Invalid range") continue port_range = str(start_port) + "-" + str(end_port) else: break if port_range != None: options["local-config"]["debug_ports"] = port_range return options def test_local_docker_conf(self): """ Test to connect to a local Docker daemon """ try: docker_connection = docker.from_env() except Exception as e: self._display_error("- Unable to connect to Docker. Error was %s" % str(e)) return False try: self._display_info("- Asking Docker some info") if docker.utils.compare_version('1.24', docker_connection.version()['ApiVersion']) < 0: self._display_error("- Docker version >= 1.12.0 is required.") return False except Exception as e: self._display_error("- Unable to contact Docker. Error was %s" % str(e)) return False self._display_info("- Successfully got info from Docker. Docker connection works.") return True def ask_backend(self): """ Ask the user to choose the backend """ response = self._ask_boolean( "Do you have a local docker daemon (on Linux), do you use docker-machine via a local machine, or do you use " "Docker for macOS?", True) if (response): self._display_info("If you use docker-machine on macOS, please see " "http://inginious.readthedocs.io/en/latest/install_doc/troubleshooting.html") return "local" else: self._display_info( "You will have to run inginious-backend and inginious-agent yourself. Please run the commands without argument " "and/or read the documentation for more info") return self._display_question("Please enter the address of your backend") ####################################### # MONGODB CONFIGURATION # ####################################### def try_mongodb_opts(self, host="localhost", database_name='INGInious'): """ Try MongoDB configuration """ try: mongo_client = MongoClient(host=host) # Effective access only occurs when we call a method on the connexion mongo_version = str(mongo_client.server_info()['version']) self._display_info("Found mongodb server running version %s on %s." % (mongo_version, host)) except Exception as e: self._display_warning("Cannot connect to MongoDB on host %s: %s" % (host, str(e))) return None try: database = mongo_client[database_name] # Effective access only occurs when we call a method on the database. database.list_collection_names() except Exception as e: self._display_warning("Cannot access database %s: %s" % (database_name, str(e))) return None try: # Effective access only occurs when we call a method on the gridfs object. GridFS(database).find_one() except Exception as e: self._display_warning("Cannot access gridfs %s: %s" % (database_name, str(e))) return None return database def configure_mongodb(self): """ Configure MongoDB """ self._display_info("Trying default configuration") host = "localhost" database_name = "INGInious" should_ask = True if self.try_mongodb_opts(host, database_name) is not None: should_ask = self._ask_boolean( "Successfully connected to MongoDB. Do you want to edit the configuration anyway?", False) else: self._display_info("Cannot guess configuration for MongoDB.") while should_ask: self._display_question( "Please enter the MongoDB host. If you need to enter a password, here is the syntax:") self._display_question("mongodb://USERNAME:PASSWORD@HOST:PORT/AUTHENTIFICATION_DATABASE") host = self._ask_with_default("MongoDB host", host) database_name = self._ask_with_default("Database name", database_name) if not self.try_mongodb_opts(host, database_name): if self._ask_boolean("Cannot connect to MongoDB. Would you like to continue anyway?", False): break else: self._display_info("Successfully connected to MongoDB") break return {"mongo_opt": {"host": host, "database": database_name}} ####################################### # TASK DIRECTORY # ####################################### def configure_task_directory(self): """ Configure task directory """ task_directory = self._configure_directory("tasks") if os.path.exists(task_directory): self._display_question("Demonstration tasks can be downloaded to let you discover INGInious.") if self._ask_boolean("Would you like to download them ?", True): try: self._retrieve_and_extract_tarball("https://api.github.com/repos/UCL-INGI/INGInious-demo-tasks/tarball", task_directory) self._display_info("Successfully downloaded and copied demonstration tasks.") except Exception as e: self._display_error("An error occurred while copying the directory: %s" % str(e)) else: self._display_warning("Skipping copying the 'test' course because the task dir does not exists") return {"tasks_directory": task_directory} ####################################### # CONTAINERS # ####################################### def _build_container(self, name, folder): self._display_info("Building container {}...".format(name)) docker_connection = docker.from_env() docker_connection.images.build(path=folder, tag=name) self._display_info("done.".format(name)) def select_containers_to_build(self): #If on a dev branch, download from github master branch (then manually rebuild if needed) #If on an pip installed version, download with the correct tag if not self._ask_boolean("Build the default containers? This is highly recommended, and is required to build other containers.", True): self._display_info("Skipping container building.") return # Mandatory images: stock_images = [] try: docker_connection = docker.from_env() for image in docker_connection.images.list(): for tag in image.attrs["RepoTags"]: if re.match(r"^ingi/inginious-c-(base|default):v" + __version__, tag): stock_images.append(tag) except: self._display_info(FAIL + "Cannot connect to Docker!" + ENDC) self._display_info(FAIL + "Restart this command after making sure the command `docker info` works" + ENDC) return # If there are already available images, ask to rebuild or not if len(stock_images) >= 2: self._display_info("You already have the minimum required images for version " + __version__) if not self._ask_boolean("Do you want to re-build them ?", "yes"): self._display_info("Continuing with previous images. If you face issues, run inginious-container-update") return try: with tempfile.TemporaryDirectory() as tmpdirname: self._display_info("Downloading the base container source directory...") if "dev" in __version__: tarball_url = "https://api.github.com/repos/UCL-INGI/INGInious/tarball" containers_version = "dev (github branch master)" dev = True else: tarball_url = "https://api.github.com/repos/UCL-INGI/INGInious/tarball/v" + __version__ containers_version = __version__ dev = False self._display_info("Downloading containers for version:" + containers_version) self._retrieve_and_extract_tarball(tarball_url, tmpdirname) self._build_container("ingi/inginious-c-base", os.path.join(tmpdirname, "base-containers", "base")) self._build_container("ingi/inginious-c-default", os.path.join(tmpdirname, "base-containers", "default")) if dev: self._display_info("If you modified files in base-containers folder, don't forget to rebuild manually to make these changes effective !") # Other non-mandatory containers: with tempfile.TemporaryDirectory() as tmpdirname: self._display_info("Downloading the other containers source directory...") self._retrieve_and_extract_tarball( "https://api.github.com/repos/UCL-INGI/INGInious-containers/tarball", tmpdirname) todo = {"ingi/inginious-c-base": None, "ingi/inginious-c-default": "ingi/inginious-c-base"} available_containers = set(os.listdir(os.path.join(tmpdirname, 'grading'))) self._display_info("Done.") def add_container(container): if container in todo: return line_from = \ [l for l in open(os.path.join(tmpdirname, 'grading', container, 'Dockerfile')).read().split("\n") if l.startswith("FROM")][0] supercontainer = line_from.strip()[4:].strip().split(":")[0] if supercontainer.startswith("ingi/") and supercontainer not in todo: self._display_info( "Container {} requires container {}, I'll build it too.".format(container, supercontainer)) add_container(supercontainer) todo[container] = supercontainer if supercontainer.startswith("ingi/") else None self._display_info("The following containers can be built:") for container in available_containers: self._display_info("\t" + container) while True: answer = self._ask_with_default( "Indicate the name of a container to build, or press enter to continue") if answer == "": break if answer not in available_containers: self._display_warning("Unknown container. Please retry") else: self._display_info("Ok, I'll build container {}".format(answer)) add_container(answer) done = {"ingi/inginious-c-base", "ingi/inginious-c-default"} del todo["ingi/inginious-c-base"] del todo["ingi/inginious-c-default"] while len(todo) != 0: todo_now = [x for x, y in todo.items() if y is None or y in done] for x in todo_now: del todo[x] for container in todo_now: try: self._build_container("ingi/inginious-c-{}".format(container), os.path.join(tmpdirname, 'grading', container)) except BuildError: self._display_error( "An error occured while building the container. Please retry manually.") except Exception as e: self._display_error("An error occurred while copying the directory: {}".format(e)) ####################################### # MISC # ####################################### def configure_misc(self): """ Configure various things """ options = {} options["use_minified_js"] = self._ask_boolean( "Use minified javascript? (Useful in production, but should be disabled in dev environment)", True) return options def configure_backup_directory(self): """ Configure backup directory """ return {"backup_directory": self._configure_directory("backups")} def ldap_plugin(self): """ Configures the LDAP plugin """ name = self._ask_with_default("Authentication method name (will be displayed on the login page)", "LDAP") prefix = self._ask_with_default("Prefix to append to the username before db storage. Usefull when you have more than one auth method with " "common usernames.", "") ldap_host = self._ask_with_default("LDAP Host", "ldap.your.domain.com") encryption = 'none' while True: encryption = self._ask_with_default("Encryption (either 'ssl', 'tls', or 'none')", 'none') if encryption not in ['none', 'ssl', 'tls']: self._display_error("Invalid value") else: break base_dn = self._ask_with_default("Base DN", "ou=people,c=com") request = self._ask_with_default("Request to find a user. '{}' will be replaced by the username", "uid={}") require_cert = self._ask_boolean("Require certificate validation?", encryption is not None) return { "plugin_module": "inginious.frontend.plugins.auth.ldap_auth", "host": ldap_host, "encryption": encryption, "base_dn": base_dn, "request": request, "prefix": prefix, "name": name, "require_cert": require_cert } def configure_authentication(self, database): """ Configure the authentication """ options = {"plugins": [], "superadmins": []} self._display_info("We will now create the first user.") username = self._ask_with_default("Enter the login of the superadmin", "superadmin") realname = self._ask_with_default("Enter the name of the superadmin", "INGInious SuperAdmin") email = None while not email: email = self._ask_with_default("Enter the email address of the superadmin", "superadmin@inginious.org") email = UserManager.sanitize_email(email) if email is None: self._display_error("Invalid email format.") password = self._ask_with_default("Enter the password of the superadmin", "superadmin") database.users.insert_one({"username": username, "realname": realname, "email": email, "password": UserManager.hash_password(password), "bindings": {}, "language": "en"}) options["superadmins"].append(username) while True: if not self._ask_boolean("Would you like to add another auth method?", False): break self._display_info("You can choose an authentication plugin between:") self._display_info("- 1. LDAP auth plugin. This plugin allows to connect to a distant LDAP host.") self._display_info("There are other plugins available that are not configurable directly by inginious-install.") self._display_info("Please consult the online documentation to install them yourself.") plugin = self._ask_with_default("Enter the corresponding number to your choice", 'skip') if plugin == '1': options["plugins"].append(self.ldap_plugin()) else: continue options["session_parameters"] = {} options["session_parameters"]['timeout'] = self._ask_integer("How much time should a user stay connected, " "in seconds? The default is 86400, one day.", 86400) options["session_parameters"]['ignore_change_ip'] = not self._ask_boolean("Should user be disconnected when " "their IP changes? It may prevent " "cookie stealing.", True) options["session_parameters"]['secure'] = self._ask_boolean("Do you plan to serve your INGInious instance only" " in HTTPS?", False) options["session_parameters"]['secret_key'] = hexlify(os.urandom(32)).decode('utf-8') return options def configuration_filename(self): """ Returns the name of the configuration file """ return "configuration.yaml" def support_remote_debugging(self): """ Returns True if the frontend supports remote debugging, False else""" return True def _retrieve_and_extract_tarball(self, link, folder): filename, _ = urllib.request.urlretrieve(link) with tarfile.open(filename, mode="r:gz") as thetarfile: members = thetarfile.getmembers() commonpath = os.path.commonpath([tarinfo.name for tarinfo in members]) for member in members: member.name = member.name[len(commonpath) + 1:] if member.name: thetarfile.extract(member, folder)
PypiClean
/Hikka_Pyro-2.0.66-py3-none-any.whl/pyrogram/storage/sqlite_storage.py
import inspect import sqlite3 import time from threading import Lock from typing import List, Tuple, Any from pyrogram import raw from .storage import Storage from .. import utils # language=SQLite SCHEMA = """ CREATE TABLE sessions ( dc_id INTEGER PRIMARY KEY, api_id INTEGER, test_mode INTEGER, auth_key BLOB, date INTEGER NOT NULL, user_id INTEGER, is_bot INTEGER ); CREATE TABLE peers ( id INTEGER PRIMARY KEY, access_hash INTEGER, type INTEGER NOT NULL, username TEXT, phone_number TEXT, last_update_on INTEGER NOT NULL DEFAULT (CAST(STRFTIME('%s', 'now') AS INTEGER)) ); CREATE TABLE version ( number INTEGER PRIMARY KEY ); CREATE INDEX idx_peers_id ON peers (id); CREATE INDEX idx_peers_username ON peers (username); CREATE INDEX idx_peers_phone_number ON peers (phone_number); CREATE TRIGGER trg_peers_last_update_on AFTER UPDATE ON peers BEGIN UPDATE peers SET last_update_on = CAST(STRFTIME('%s', 'now') AS INTEGER) WHERE id = NEW.id; END; """ def get_input_peer(peer_id: int, access_hash: int, peer_type: str): if peer_type in ["user", "bot"]: return raw.types.InputPeerUser( user_id=peer_id, access_hash=access_hash ) if peer_type == "group": return raw.types.InputPeerChat( chat_id=-peer_id ) if peer_type in ["channel", "supergroup"]: return raw.types.InputPeerChannel( channel_id=utils.get_channel_id(peer_id), access_hash=access_hash ) raise ValueError(f"Invalid peer type: {peer_type}") class SQLiteStorage(Storage): VERSION = 3 USERNAME_TTL = 8 * 60 * 60 def __init__(self, name: str): super().__init__(name) self.conn = None # type: sqlite3.Connection self.lock = Lock() def create(self): with self.lock, self.conn: self.conn.executescript(SCHEMA) self.conn.execute( "INSERT INTO version VALUES (?)", (self.VERSION,) ) self.conn.execute( "INSERT INTO sessions VALUES (?, ?, ?, ?, ?, ?, ?)", (2, None, None, None, 0, None, None) ) async def open(self): raise NotImplementedError async def save(self): await self.date(int(time.time())) with self.lock: self.conn.commit() async def close(self): with self.lock: self.conn.close() async def delete(self): raise NotImplementedError async def update_peers(self, peers: List[Tuple[int, int, str, str, str]]): with self.lock: self.conn.executemany( "REPLACE INTO peers (id, access_hash, type, username, phone_number)" "VALUES (?, ?, ?, ?, ?)", peers ) async def get_peer_by_id(self, peer_id: int): r = self.conn.execute( "SELECT id, access_hash, type FROM peers WHERE id = ?", (peer_id,) ).fetchone() if r is None: raise KeyError(f"ID not found: {peer_id}") return get_input_peer(*r) async def get_peer_by_username(self, username: str): r = self.conn.execute( "SELECT id, access_hash, type, last_update_on FROM peers WHERE username = ?" "ORDER BY last_update_on DESC", (username,) ).fetchone() if r is None: raise KeyError(f"Username not found: {username}") if abs(time.time() - r[3]) > self.USERNAME_TTL: raise KeyError(f"Username expired: {username}") return get_input_peer(*r[:3]) async def get_peer_by_phone_number(self, phone_number: str): r = self.conn.execute( "SELECT id, access_hash, type FROM peers WHERE phone_number = ?", (phone_number,) ).fetchone() if r is None: raise KeyError(f"Phone number not found: {phone_number}") return get_input_peer(*r) def _get(self): attr = inspect.stack()[2].function return self.conn.execute( f"SELECT {attr} FROM sessions" ).fetchone()[0] def _set(self, value: Any): attr = inspect.stack()[2].function with self.lock, self.conn: self.conn.execute( f"UPDATE sessions SET {attr} = ?", (value,) ) def _accessor(self, value: Any = object): return self._get() if value == object else self._set(value) async def dc_id(self, value: int = object): return self._accessor(value) async def api_id(self, value: int = object): return self._accessor(value) async def test_mode(self, value: bool = object): return self._accessor(value) async def auth_key(self, value: bytes = object): return self._accessor(value) async def date(self, value: int = object): return self._accessor(value) async def user_id(self, value: int = object): return self._accessor(value) async def is_bot(self, value: bool = object): return self._accessor(value) def version(self, value: int = object): if value == object: return self.conn.execute( "SELECT number FROM version" ).fetchone()[0] else: with self.lock, self.conn: self.conn.execute( "UPDATE version SET number = ?", (value,) )
PypiClean
/DMS_APP-0.2.1-py3-none-any.whl/dms_app/resources/schema/schema_data.py
from flask import request from flask_restx import Resource, fields from ...db.db_connection import database_access from ...namespace import api import logging from ...response_helper import get_response import json from bson import json_util from ..login_register.login import token_required, token_required_get post_schema = api.model("SchemaAdd", { "schema": fields.Raw( [], required=True, example=[ { "name": "person_id", "display_name": "Person ID", "data_type": "string", "is_required": "false", "default_value": "", "length": 0, "is_unique": "false", "is_key": "false", "is_hidden": "false", "enter_values": "", "componentType": "", "dateTimeFormat": "%d-%m-%Y" }, ] ) }) put_schema = api.model("SchemaUpdate", { "name": fields.String, "display_name": fields.String, "data_type": fields.String, "is_required": fields.String, "default_value": fields.String, "length": fields.Integer, "is_unique": fields.String, "is_key": fields.String, "is_hidden": fields.String, "enter_values": fields.String, "componentType": fields.String, "dateTimeFormat": fields.String }) class AddSchema(Resource): @token_required_get def get(self, *args): try: database_connection = database_access() schema_col = database_connection["schema"] data = list(schema_col.find()) if len(data): _response = get_response(200) _response["data"] = json.loads(json_util.dumps(data)) return _response else: return get_response(404) except Exception as e: _response = get_response(404) _response['message'] = 'Failed to Get Schema' logging.error(e) return _response @token_required @api.expect(post_schema) def post(self, *args): args = request.get_json() try: database_connection = database_access() schema_col = database_connection["schema"] schema_col.delete_many({}) schema_col.insert_one( {"schema": args["schema"]}) logging.info(get_response(200)) return get_response(200) except Exception as e: _response = get_response(404) _response['message'] = 'Failed to Store Schema' logging.error(e) return _response @token_required @api.expect(put_schema) def put(self, *args): args = request.get_json() try: database_connection = database_access() schema_col = database_connection["schema"] if schema_col.find_one({"schema.name": args["name"]}): schema_col.update_one({"schema.name": args["name"]}, {'$set': { "schema.$.display_name": args['display_name'], "schema.$.data_type": args['data_type'], "schema.$.is_required": args['is_required'], "schema.$.default_value": args[ 'default_value'], "schema.$.length": args['length'], "schema.$.is_unique": args['is_unique'], "schema.$.is_key": args['is_key'], "schema.$.is_hidden": args['is_hidden'], "schema.$.enter_values": args['enter_values'], "schema.$.componentType": args[ 'componentType'], "schema.$.dateTimeFormat": args[ 'dateTimeFormat']}} ) logging.info(get_response(200)) return get_response(200) else: logging.info(get_response(404)) return get_response(404) except Exception as e: _response = get_response(404) _response['message'] = 'Failed to Update Schema' logging.error(e) return _response
PypiClean
/Nikwus-0.5.4.zip/Nikwus-0.5.4/nikwus/__init__.py
from PIL import Image import cssutils import logging import os.path SPRITE_OFFSET = 10 def as_bool(value): """Converts a value into Boolean.""" return str(value).lower() in ('1', 'true', 'on', 'yes') def normalize_filename(url): """ Normalize file names from file URL to local file names. Filenames start with `file:///` on Windows and `file://` on Unix. """ fname = url.replace('file://', '') if os.sep != '/' and not os.path.exists(fname): fname = fname.lstrip('/') return fname class Sprite(object): # The name of this sprite name = None # Autosize set to True means we automatically add width and height # properties. autosize = True # The CSS declarations where the background image should be inserted. # For each resolution there is one entry in the dict. # If this is not set for a sprite, then the background image is inserted # for every declaration that uses any of the images in the sprite. selector_declarations = None # A list of CSS declarations which contain images for this sprite image_declarations = None # Horizontal spacing of icons space_x = SPRITE_OFFSET space_y = SPRITE_OFFSET def __init__(self, name): self.name = name self.image_declarations = [] self.selector_declarations = {} def generate(self, directory, reldir, offset=None): """Write an image for this sprite into the directory. """ print 'writing {0} into {1}'.format(self.name, directory) if offset is not None: self.space_x = self.space_y = offset if self.selector_declarations and not self.selector_declarations.get(1): raise ValueError('Missing sprite selector for default resolution') # Load images and determine the target image width/height if self.selector_declarations: resolutions = self.selector_declarations.keys() else: resolutions = [1] images = self._load_images(resolutions) distribution = self._calculate_distribution(images) # Create and save the sprites sprites = self._create_sprites(directory, reldir, resolutions, distribution) # Change the CSS rules for the sprite self._rewrite_css(distribution, sprites) def _load_images(self, resolutions=None): """Load the images from the image declarations. For each image we load every resolution. Checks that the images have the correct size, e.g. that an image of resolution 2 is in fact two times bigger than the original. """ images = {} for block, url in self.image_declarations: file_name = normalize_filename(url) if file_name not in images: img_resolutions = {} img = Image.open(file_name) img_resolutions[1] = img width, height = img.size if resolutions: for resolution in resolutions: # Get the correct filename for this resolution if resolution != 1: root, ext = os.path.splitext(file_name) res_file_name = '{root}-{resolution}x{ext}'.format( root=root, resolution=resolution, ext=ext) img = Image.open(res_file_name) if img.size[0] / resolution != width: raise ValueError('Invalid width for {0}'.format( res_file_name)) if img.size[1] / resolution != height: raise ValueError('Invalid height for {0}'.format( res_file_name)) img_resolutions[resolution] = img images[file_name] = img_resolutions return images def _calculate_distribution(self, images): """Calculate the best distribution of the sprite image. """ positions = {} target_width = target_height = 0 for block, url in self.image_declarations: file_name = normalize_filename(url) if file_name in positions: positions[file_name]['blocks'].append(block) else: block_images = images[file_name] offset_x = target_width offset_y = 0 width, height = block_images[1].size positions[file_name] = { 'x': offset_x, 'y': offset_y, 'width': width, 'height': height, 'file_name': file_name, 'images': block_images, 'blocks': [block] } target_width += width + self.space_x target_height = max(target_height, height) target_width -= self.space_x return { 'width': target_width, 'height': target_height, 'positions': positions.values(), } def _create_sprites(self, directory, reldir, resolutions, distribution): if not distribution['positions']: return # Create sprite images sprites = {} for resolution in resolutions: if resolution == 1: sprite_fname = self.name + '.png' else: sprite_fname = '{0}-{1}x.png'.format(self.name, resolution) # Make the sprite URL relative to the CSS file if reldir: sprite_url = reldir + '/' + sprite_fname else: sprite_url = sprite_fname image = Image.new( mode='RGBA', size=(distribution['width'] * resolution, distribution['height'] * resolution), color=(0, 0, 0, 0)) # Place images on sprite and edit the CSS rules for pos in distribution['positions']: pos_x = pos['x'] pos_y = pos['y'] pos_img = pos['images'][resolution] image.paste(pos_img, (pos_x * resolution, pos_y * resolution)) sprites[resolution] = { 'url': sprite_url, 'img': image, } image.save(os.path.join(directory, sprite_fname)) return sprites def _rewrite_css(self, distribution, sprites): default_width, default_height = self._get_default_size() # Edit the CSS rules for pos in distribution['positions']: sprite_url = sprites[1]['url'] for block in pos['blocks']: self._rewrite_css_block(block, pos, sprite_url, default_width, default_height) for resolution, decl in self.selector_declarations.iteritems(): sprite_url = sprites[resolution]['url'] if resolution == 1: decl.setProperty('background', 'url({0}) no-repeat 0 0'.format( sprite_url)) else: decl.setProperty('background-image', 'url({0})'.format( sprite_url)) decl.setProperty('background-size', '{0}px {1}px'.format( distribution['width'], distribution['height'])) def _rewrite_css_block(self, block, pos, sprite_url, default_width, default_height): """Rewrite an individual block.""" pos_x = pos['x'] pos_y = pos['y'] width = pos['width'] height = pos['height'] if self.selector_declarations: block.removeProperty('background') if pos_x > 0 or pos_y > 0: block.setProperty('background-position', '{0}px {1}'.format( 0 - pos_x, pos_y)) else: block.setProperty('background', 'url({2}) no-repeat {0}px {1}'.format( 0 - pos_x, pos_y, sprite_url)) if self.autosize: if not block.getPropertyValue('width') and not block.getPropertyValue('height'): width_str = '{0}px'.format(width) height_str = '{0}px'.format(height) if width_str != default_width or height_str != default_height: block.setProperty('width', width_str) block.setProperty('height', height_str) def _get_default_size(self): """Return the default sizes of the icons. Used for autosizing to decide whether we need to include the width/height of the image in the block. """ default_width = default_height = '' if self.selector_declarations: default_width = self.selector_declarations[1].getPropertyValue('width') default_height = self.selector_declarations[1].getPropertyValue('height') return default_width, default_height def sprite(directory, cssfile, outfile=None, offset=SPRITE_OFFSET): logger = logging.getLogger('cssutils') logger.setLevel(logging.FATAL) cssutils.log.setLog(logger) if outfile is None: outfile = cssfile style_sheet = cssutils.parseFile(cssfile, validate=False) # Calculate relative directory name from CSS file to the output directory reldir = os.path.relpath(directory, os.path.dirname(cssfile)) reldir = reldir.replace('\\', '/').rstrip('/') if reldir == '.': reldir = '' # Name the default sprite the same as the CSS file default_sprite_name, _ = os.path.splitext(os.path.basename(cssfile)) sprites = get_sprites(style_sheet.cssRules, default_sprite_name) for sprite in sprites: sprite.generate(directory, reldir, offset=offset) with open(outfile, 'wb') as f: f.write(fixup_css(style_sheet.cssText)) return True def fixup_css(text): """Apply a workaround for CSSUtils, which replaces `\0` tokens in the code with actual 0-bytes. `\0` is used as a IE hack, e.g. by Twitter's Bootstrap. """ return text.replace('\x00', '\\0') def get_sprites(rules, default_sprite_name, sprites=None): """Returns a dict of all sprites that need to be created from the given rules. The key is the name of the sprite, the value is a `Sprite` instance. This is used recursively, in which case `sprites` is set to the dict that is returned in the end. """ if sprites is None: sprites = {} for rule in rules: if hasattr(rule, 'style'): _process_rule(rule, sprites, default_sprite_name) if hasattr(rule, 'cssRules'): get_sprites(rule.cssRules, default_sprite_name, sprites) return sprites.values() def _process_rule(rule, sprites, default_sprite_name): """Process an individual rule for sprite preparation. """ block = rule.style sprite_name = block.getPropertyValue('-sprite-name') sprite_autosize = block.getPropertyValue('-sprite-autosize') sprite_selector = block.getPropertyValue('-sprite-selector') sprite_on = block.getPropertyValue('-sprite') background = block.getPropertyCSSValue('background') background_image = None if background: for val in background: if isinstance(val, cssutils.css.URIValue): background_image = val break if not sprite_selector and not background_image: return sprite_resolution = 1 if sprite_selector and ' ' in sprite_selector: sprite_selector, sprite_resolution = sprite_selector.split(' ', 1) if not sprite_resolution.endswith('x'): raise ValueError('Invalid sprite resolution: {0}'.format( sprite_resolution)) if not sprite_resolution[:-1].isnumeric(): raise ValueError('Invalid sprite resolution: {0}'.format( sprite_resolution)) sprite_resolution = int(sprite_resolution[:-1]) sprite_name = sprite_selector or sprite_name or default_sprite_name if sprite_name == 'default': sprite_name = default_sprite_name sprite = sprites.setdefault(sprite_name, Sprite(sprite_name)) block.removeProperty('-sprite-name') if sprite_selector: block.removeProperty('-sprite-selector') if sprite_resolution in sprite.selector_declarations: raise ValueError( 'Multiple sprite-selectors for %s at resolution %d', sprite_selector, sprite_resolution) sprite.selector_declarations[sprite_resolution] = block if sprite_autosize: block.removeProperty('-sprite-autosize') sprite.autosize = as_bool(sprite_autosize) if sprite_on: block.removeProperty('-sprite') if background_image: if sprite_on: sprite_on = as_bool(sprite_on) else: # Spriting is turned off by default for GIF images sprite_on = not background_image.absoluteUri.endswith('.gif') if sprite_on: sprite.image_declarations.append( (block, background_image.absoluteUri))
PypiClean
/Abies-0.0.5.tar.gz/Abies-0.0.5/extern/pybind11/docs/advanced/smart_ptrs.rst
Smart pointers ############## std::unique_ptr =============== Given a class ``Example`` with Python bindings, it's possible to return instances wrapped in C++11 unique pointers, like so .. code-block:: cpp std::unique_ptr<Example> create_example() { return std::unique_ptr<Example>(new Example()); } .. code-block:: cpp m.def("create_example", &create_example); In other words, there is nothing special that needs to be done. While returning unique pointers in this way is allowed, it is *illegal* to use them as function arguments. For instance, the following function signature cannot be processed by pybind11. .. code-block:: cpp void do_something_with_example(std::unique_ptr<Example> ex) { ... } The above signature would imply that Python needs to give up ownership of an object that is passed to this function, which is generally not possible (for instance, the object might be referenced elsewhere). std::shared_ptr =============== The binding generator for classes, :class:`class_`, can be passed a template type that denotes a special *holder* type that is used to manage references to the object. If no such holder type template argument is given, the default for a type named ``Type`` is ``std::unique_ptr<Type>``, which means that the object is deallocated when Python's reference count goes to zero. It is possible to switch to other types of reference counting wrappers or smart pointers, which is useful in codebases that rely on them. For instance, the following snippet causes ``std::shared_ptr`` to be used instead. .. code-block:: cpp py::class_<Example, std::shared_ptr<Example> /* <- holder type */> obj(m, "Example"); Note that any particular class can only be associated with a single holder type. One potential stumbling block when using holder types is that they need to be applied consistently. Can you guess what's broken about the following binding code? .. code-block:: cpp class Child { }; class Parent { public: Parent() : child(std::make_shared<Child>()) { } Child *get_child() { return child.get(); } /* Hint: ** DON'T DO THIS ** */ private: std::shared_ptr<Child> child; }; PYBIND11_MODULE(example, m) { py::class_<Child, std::shared_ptr<Child>>(m, "Child"); py::class_<Parent, std::shared_ptr<Parent>>(m, "Parent") .def(py::init<>()) .def("get_child", &Parent::get_child); } The following Python code will cause undefined behavior (and likely a segmentation fault). .. code-block:: python from example import Parent print(Parent().get_child()) The problem is that ``Parent::get_child()`` returns a pointer to an instance of ``Child``, but the fact that this instance is already managed by ``std::shared_ptr<...>`` is lost when passing raw pointers. In this case, pybind11 will create a second independent ``std::shared_ptr<...>`` that also claims ownership of the pointer. In the end, the object will be freed **twice** since these shared pointers have no way of knowing about each other. There are two ways to resolve this issue: 1. For types that are managed by a smart pointer class, never use raw pointers in function arguments or return values. In other words: always consistently wrap pointers into their designated holder types (such as ``std::shared_ptr<...>``). In this case, the signature of ``get_child()`` should be modified as follows: .. code-block:: cpp std::shared_ptr<Child> get_child() { return child; } 2. Adjust the definition of ``Child`` by specifying ``std::enable_shared_from_this<T>`` (see cppreference_ for details) as a base class. This adds a small bit of information to ``Child`` that allows pybind11 to realize that there is already an existing ``std::shared_ptr<...>`` and communicate with it. In this case, the declaration of ``Child`` should look as follows: .. _cppreference: http://en.cppreference.com/w/cpp/memory/enable_shared_from_this .. code-block:: cpp class Child : public std::enable_shared_from_this<Child> { }; .. _smart_pointers: Custom smart pointers ===================== pybind11 supports ``std::unique_ptr`` and ``std::shared_ptr`` right out of the box. For any other custom smart pointer, transparent conversions can be enabled using a macro invocation similar to the following. It must be declared at the top namespace level before any binding code: .. code-block:: cpp PYBIND11_DECLARE_HOLDER_TYPE(T, SmartPtr<T>); The first argument of :func:`PYBIND11_DECLARE_HOLDER_TYPE` should be a placeholder name that is used as a template parameter of the second argument. Thus, feel free to use any identifier, but use it consistently on both sides; also, don't use the name of a type that already exists in your codebase. The macro also accepts a third optional boolean parameter that is set to false by default. Specify .. code-block:: cpp PYBIND11_DECLARE_HOLDER_TYPE(T, SmartPtr<T>, true); if ``SmartPtr<T>`` can always be initialized from a ``T*`` pointer without the risk of inconsistencies (such as multiple independent ``SmartPtr`` instances believing that they are the sole owner of the ``T*`` pointer). A common situation where ``true`` should be passed is when the ``T`` instances use *intrusive* reference counting. Please take a look at the :ref:`macro_notes` before using this feature. By default, pybind11 assumes that your custom smart pointer has a standard interface, i.e. provides a ``.get()`` member function to access the underlying raw pointer. If this is not the case, pybind11's ``holder_helper`` must be specialized: .. code-block:: cpp // Always needed for custom holder types PYBIND11_DECLARE_HOLDER_TYPE(T, SmartPtr<T>); // Only needed if the type's `.get()` goes by another name namespace pybind11 { namespace detail { template <typename T> struct holder_helper<SmartPtr<T>> { // <-- specialization static const T *get(const SmartPtr<T> &p) { return p.getPointer(); } }; }} The above specialization informs pybind11 that the custom ``SmartPtr`` class provides ``.get()`` functionality via ``.getPointer()``. .. seealso:: The file :file:`tests/test_smart_ptr.cpp` contains a complete example that demonstrates how to work with custom reference-counting holder types in more detail.
PypiClean
/CherryPy-18.8.0.tar.gz/CherryPy-18.8.0/cherrypy/test/modfastcgi.py
import os import re import cherrypy from cherrypy.process import servers from cherrypy.test import helper curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) def read_process(cmd, args=''): pipein, pipeout = os.popen4('%s %s' % (cmd, args)) try: firstline = pipeout.readline() if (re.search(r'(not recognized|No such file|not found)', firstline, re.IGNORECASE)): raise IOError('%s must be on your system path.' % cmd) output = firstline + pipeout.read() finally: pipeout.close() return output APACHE_PATH = 'apache2ctl' CONF_PATH = 'fastcgi.conf' conf_fastcgi = """ # Apache2 server conf file for testing CherryPy with mod_fastcgi. # fumanchu: I had to hard-code paths due to crazy Debian layouts :( ServerRoot /usr/lib/apache2 User #1000 ErrorLog %(root)s/mod_fastcgi.error.log DocumentRoot "%(root)s" ServerName 127.0.0.1 Listen %(port)s LoadModule fastcgi_module modules/mod_fastcgi.so LoadModule rewrite_module modules/mod_rewrite.so Options +ExecCGI SetHandler fastcgi-script RewriteEngine On RewriteRule ^(.*)$ /fastcgi.pyc [L] FastCgiExternalServer "%(server)s" -host 127.0.0.1:4000 """ def erase_script_name(environ, start_response): environ['SCRIPT_NAME'] = '' return cherrypy.tree(environ, start_response) class ModFCGISupervisor(helper.LocalWSGISupervisor): httpserver_class = 'cherrypy.process.servers.FlupFCGIServer' using_apache = True using_wsgi = True template = conf_fastcgi def __str__(self): return 'FCGI Server on %s:%s' % (self.host, self.port) def start(self, modulename): cherrypy.server.httpserver = servers.FlupFCGIServer( application=erase_script_name, bindAddress=('127.0.0.1', 4000)) cherrypy.server.httpserver.bind_addr = ('127.0.0.1', 4000) cherrypy.server.socket_port = 4000 # For FCGI, we both start apache... self.start_apache() # ...and our local server cherrypy.engine.start() self.sync_apps() def start_apache(self): fcgiconf = CONF_PATH if not os.path.isabs(fcgiconf): fcgiconf = os.path.join(curdir, fcgiconf) # Write the Apache conf file. with open(fcgiconf, 'wb') as f: server = repr(os.path.join(curdir, 'fastcgi.pyc'))[1:-1] output = self.template % {'port': self.port, 'root': curdir, 'server': server} output = output.replace('\r\n', '\n') f.write(output) result = read_process(APACHE_PATH, '-k start -f %s' % fcgiconf) if result: print(result) def stop(self): """Gracefully shutdown a server that is serving forever.""" read_process(APACHE_PATH, '-k stop') helper.LocalWSGISupervisor.stop(self) def sync_apps(self): cherrypy.server.httpserver.fcgiserver.application = self.get_app( erase_script_name)
PypiClean
/Newgram-0.0.5.tar.gz/Newgram-0.0.5/newgram/methods/messages/edit_message_caption.py
from typing import Union, List, Optional import newgram from newgram import types, enums class EditMessageCaption: async def edit_message_caption( self: "newgram.Client", chat_id: Union[int, str], message_id: int, caption: str, parse_mode: Optional["enums.ParseMode"] = None, caption_entities: List["types.MessageEntity"] = None, reply_markup: "types.InlineKeyboardMarkup" = None ) -> "types.Message": """Edit the caption of media messages. .. include:: /_includes/usable-by/users-bots.rst Parameters: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. For your personal cloud (Saved Messages) you can simply use "me" or "self". For a contact that exists in your Telegram address book you can use his phone number (str). message_id (``int``): Message identifier in the chat specified in chat_id. caption (``str``): New caption of the media message. parse_mode (:obj:`~newgram.enums.ParseMode`, *optional*): By default, texts are parsed using both Markdown and HTML styles. You can combine both syntaxes together. caption_entities (List of :obj:`~newgram.types.MessageEntity`): List of special entities that appear in the caption, which can be specified instead of *parse_mode*. reply_markup (:obj:`~newgram.types.InlineKeyboardMarkup`, *optional*): An InlineKeyboardMarkup object. Returns: :obj:`~newgram.types.Message`: On success, the edited message is returned. Example: .. code-block:: python await app.edit_message_caption(chat_id, message_id, "new media caption") """ return await self.edit_message_text( chat_id=chat_id, message_id=message_id, text=caption, parse_mode=parse_mode, entities=caption_entities, reply_markup=reply_markup )
PypiClean
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/standard/plugins/a11yhelp/dialogs/lang/he.js
/* Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ CKEDITOR.plugins.setLang("a11yhelp","he",{title:"הוראות נגישות",contents:"הוראות נגישות. לסגירה לחץ אסקייפ (ESC).",legend:[{name:"כללי",items:[{name:"סרגל הכלים",legend:"לחץ על ${toolbarFocus} כדי לנווט לסרגל הכלים. עבור לכפתור הבא עם מקש הטאב (TAB) או חץ שמאלי. עבור לכפתור הקודם עם מקש השיפט (SHIFT) + טאב (TAB) או חץ ימני. לחץ רווח או אנטר (ENTER) כדי להפעיל את הכפתור הנבחר."},{name:"דיאלוגים (חלונות תשאול)",legend:"Inside a dialog, press TAB to navigate to the next dialog element, press SHIFT+TAB to move to the previous dialog element, press ENTER to submit the dialog, press ESC to cancel the dialog. When a dialog has multiple tabs, the tab list can be reached either with ALT+F10 or with TAB as part of the dialog tabbing order. With tab list focused, move to the next and previous tab with RIGHT and LEFT ARROW, respectively."}, {name:"תפריט ההקשר (Context Menu)",legend:"לחץ ${contextMenu} או APPLICATION KEYכדי לפתוח את תפריט ההקשר. עבור לאפשרות הבאה עם טאב (TAB) או חץ למטה. עבור לאפשרות הקודמת עם שיפט (SHIFT) + טאב (TAB) או חץ למעלה. לחץ רווח או אנטר (ENTER) כדי לבחור את האפשרות. פתח את תת התפריט (Sub-menu) של האפשרות הנוכחית עם רווח או אנטר (ENTER) או חץ שמאלי. חזור לתפריט האב עם אסקייפ (ESC) או חץ שמאלי. סגור את תפריט ההקשר עם אסקייפ (ESC)."},{name:"תפריטים צפים (List boxes)",legend:"Inside a list-box, move to next list item with TAB OR DOWN ARROW. Move to previous list item with SHIFT+TAB or UP ARROW. Press SPACE or ENTER to select the list option. Press ESC to close the list-box."}, {name:"עץ אלמנטים (Elements Path)",legend:"לחץ ${elementsPathFocus} כדי לנווט לעץ האלמנטים. עבור לפריט הבא עם טאב (TAB) או חץ ימני. עבור לפריט הקודם עם שיפט (SHIFT) + טאב (TAB) או חץ שמאלי. לחץ רווח או אנטר (ENTER) כדי לבחור את האלמנט בעורך."}]},{name:"פקודות",items:[{name:" ביטול צעד אחרון",legend:"לחץ ${undo}"},{name:" חזרה על צעד אחרון",legend:"לחץ ${redo}"},{name:" הדגשה",legend:"לחץ ${bold}"},{name:" הטייה",legend:"לחץ ${italic}"},{name:" הוספת קו תחתון",legend:"לחץ ${underline}"},{name:" הוספת לינק", legend:"לחץ ${link}"},{name:" כיווץ סרגל הכלים",legend:"לחץ ${toolbarCollapse}"},{name:"גישה למיקום המיקוד הקודם",legend:"לחץ ${accessPreviousSpace} כדי לגשת למיקום המיקוד הלא-נגיש הקרוב לפני הסמן, למשל בין שני אלמנטים סמוכים מסוג HR. חזור על צירוף מקשים זה כדי להגיע למקומות מיקוד רחוקים יותר."},{name:"גישה למיקום המיקוד הבא",legend:"לחץ ${accessNextSpace} כדי לגשת למיקום המיקוד הלא-נגיש הקרוב אחרי הסמן, למשל בין שני אלמנטים סמוכים מסוג HR. חזור על צירוף מקשים זה כדי להגיע למקומות מיקוד רחוקים יותר."}, {name:" הוראות נגישות",legend:"לחץ ${a11yHelp}"},{name:" Paste as plain text",legend:"Press ${pastetext}",legendEdge:"Press ${pastetext}, followed by ${paste}"}]}],tab:"Tab",pause:"Pause",capslock:"Caps Lock",escape:"Escape",pageUp:"Page Up",pageDown:"Page Down",leftArrow:"חץ שמאלה",upArrow:"חץ למעלה",rightArrow:"חץ ימינה",downArrow:"חץ למטה",insert:"הכנס",leftWindowKey:"Left Windows key",rightWindowKey:"Right Windows key",selectKey:"בחר מקש",numpad0:"Numpad 0",numpad1:"Numpad 1",numpad2:"Numpad 2", numpad3:"Numpad 3",numpad4:"Numpad 4",numpad5:"Numpad 5",numpad6:"Numpad 6",numpad7:"Numpad 7",numpad8:"Numpad 8",numpad9:"Numpad 9",multiply:"Multiply",add:"הוסף",subtract:"Subtract",decimalPoint:"Decimal Point",divide:"Divide",f1:"F1",f2:"F2",f3:"F3",f4:"F4",f5:"F5",f6:"F6",f7:"F7",f8:"F8",f9:"F9",f10:"F10",f11:"F11",f12:"F12",numLock:"Num Lock",scrollLock:"Scroll Lock",semiColon:"Semicolon",equalSign:"Equal Sign",comma:"Comma",dash:"Dash",period:"Period",forwardSlash:"סלאש",graveAccent:"Grave Accent", openBracket:"Open Bracket",backSlash:"סלאש הפוך",closeBracket:"Close Bracket",singleQuote:"ציטוט יחיד"});
PypiClean
/Klarna_API-3.1.0.tar.gz/Klarna_API-3.1.0/klarna/ilt.py
# Copyright 2015 Klarna AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # python3k campatibility from __future__ import print_function import sys if sys.version_info >= (3,): basestring = str class Info(object): def __init__(self, url): if sys.version_info >= (3,): from urllib.request import urlopen else: # Wrap urllib2's urlopen to support __exit__ import contextlib import urllib2 def urlopen(*args): return contextlib.closing(urllib2.urlopen(*args)) import json with urlopen(url) as r: self._data = json.loads(r.read().decode()) def get_questions(self, keys): def mkquestion(key, data): if 'values' in data: values = [Value(v['value'], v['text']) for v in data['values']] else: values = None return Question(key, data['text'], Question.Pre if data.get('pre', False) else Question.Ilt, data['type'], values ) qdata = self._data['questions'] return [mkquestion(key, qdata[key]) for key in keys] def get_dictionary(self): return self._data['dictionary'] class Value(object): ''' ILT Enumeration value ''' def __init__(self, value, text): self.value = value self.text = text def __repr__(self): return self.value def __str__(self): return self.text # pre_ilt - don't send to KO class Question(object): ''' Holds information about a ILT question to ask the customer key: the name of the field to set in income_info with the answer to this question. text: the text of the question display to the customer answer_type: the type of input (enum, integer) answer_values: list of possible answers for the enum type ''' Pre = object() Ilt = object() @property def is_select(self): return self.answer_type == 'enum' def __init__(self, key, text, question_type, answer_type, values): self.key = key self.text = text self.question_type = question_type self.answer_type = answer_type self.answer_values = values def __repr__(self): return 'Question(%s, %r, %r)' % (self.key, self.answer_type, self.answer_values)
PypiClean
/Auxjad-1.0.0.tar.gz/Auxjad-1.0.0/auxjad/get/time_signature_list.py
import abjad def time_signature_list(container: abjad.Container, *, do_not_use_none: bool = False, implicit_common_time: bool = True, omit_repeated: bool = False, ) -> list[abjad.TimeSignature]: r"""Returns a :obj:`list` with the |abjad.TimeSignature|'s for all measures of an input |abjad.Container|. Basic usage: This function returns a :obj:`list` with one |abjad.TimeSignature| per measure. >>> container = abjad.Container(r"\time 3/4 c'2. \time 4/4 e'1") >>> time_signatures = auxjad.get.time_signature_list(container) >>> time_signatures [TimeSignature((3, 4)), TimeSignature((4, 4))] .. note:: Auxjad automatically adds this function as an extension function to |abjad.get|. It can thus be used from either |auxjad.get|_ or |abjad.get| namespaces. Therefore, the two lines below are equivalent: >>> container = abjad.Container(r"\time 3/4 c'2. \time 4/4 e'1") >>> auxjad.get.time_signature_list(container) [TimeSignature((3, 4)), TimeSignature((4, 4))] >>> abjad.get.time_signature_list(container) [TimeSignature((3, 4)), TimeSignature((4, 4))] ``do_not_use_none``: By default, the :obj:`list` will contain a ``None`` if a measure does not have an explicit time signature. >>> container = abjad.Container( ... r"\time 5/8 c'4 ~ c'16 \time 3/8 d'4. e'4." ... ) >>> time_signatures = auxjad.get.time_signature_list(container) >>> time_signatures [TimeSignature((5, 8)), TimeSignature((3, 8)), None] Set the keyword argument ``do_not_use_none`` to ``True`` to change this behaviour. >>> time_signatures = auxjad.get.time_signature_list( ... container, ... do_not_use_none=True, ... ) >>> time_signatures [TimeSignature((5, 8)), TimeSignature((3, 8)), TimeSignature((3, 8))] ``omit_repeated``: By default, time signatures are output according to the container, even if there are multiple instances of a same time signature. >>> container = abjad.Container( ... r"\time 3/4 c'2. d'2. \time 3/4 e'2. f'2." ... ) >>> time_signatures = auxjad.get.time_signature_list(container) >>> time_signatures [abjad.TimeSignature((3, 4)), None, abjad.TimeSignature((3, 4)), None] Set the keyword argument ``omit_repeated`` to ``True`` to replace repeated time signatures with ``None``. >>> time_signatures = auxjad.get.time_signature_list( ... container, ... omit_repeated=True, ... ) >>> time_signatures [abjad.TimeSignature((3, 4)), None, None, None] .. error:: Setting both ``do_not_use_none`` and ``omit_repeated`` to ``True`` will raise a :exc:`ValueError` exception: >>> container = abjad.Container( ... r"\time 3/4 c'2. d'2. \time 3/4 e'2. f'2." ... ) >>> time_signatures = auxjad.get.time_signature_list( ... container, ... do_not_use_none=True, ... omit_repeated=True, ... ) ValueError: 'omit_repeated' and 'do_not_use_none' cannot be both set to 'True' ``implicit_common_time``: LilyPond uses an implicit time signature of 4/4 whenever a time signature is not found. This function behaves the same way. >>> container = abjad.Container(r"c'1 d'1 e'1 f'1") >>> time_signatures = auxjad.get.time_signature_list(container) >>> time_signatures [TimeSignature((4, 4)), None, None, None] To disable this behaviour, set ``implicit_common_time`` to ``False``. >>> time_signatures = auxjad.get.time_signature_list( ... container, ... implicit_common_time=False, ... ) >>> time_signatures [None, None, None, None] .. error:: Setting ``do_not_use_none`` to ``True`` and ``implicit_common_time`` to ``False`` on a container that starts with no time signature will raise a :exc:`ValueError` exception: >>> container = abjad.Container(r"c'1 d'1 e'1 f'1") >>> time_signatures = auxjad.get.time_signature_list( ... container, ... do_not_use_none=True, ... implicit_common_time=False, ... ) ValueError: container does not have a time signature attached to its first leaf, with 'implicit_common_time' set to 'False' and 'omit_repeated' set to 'True' """ if not isinstance(container, abjad.Container): raise TypeError("first argument must be 'abjad.Container' or " "child class") if not isinstance(do_not_use_none, bool): raise TypeError("'do_not_use_none' must be 'bool'") if not isinstance(implicit_common_time, bool): raise TypeError("'implicit_common_time' must be 'bool'") if not isinstance(omit_repeated, bool): raise TypeError("'omit_repeated' must be 'bool'") if omit_repeated and do_not_use_none: raise ValueError("'omit_repeated' and 'do_not_use_none' cannot be " "both set to 'True'") if not implicit_common_time and do_not_use_none: head_leaf = abjad.select(container).leaf(0) if not abjad.get.indicator(head_leaf, abjad.TimeSignature): raise ValueError("container does not have a time signature " "attached to its first leaf, with " "'implicit_common_time' set to 'False' and " "'omit_repeated' set to 'True'") measures = abjad.select(container[:]).group_by_measure() time_signatures = [] for measure in measures: head = abjad.select(measure).leaf(0) time_signature = abjad.get.indicator(head, abjad.TimeSignature) if measure is measures[0] and time_signature is None: if implicit_common_time: time_signature = abjad.TimeSignature((4, 4)) time_signatures.append(time_signature) if do_not_use_none: if time_signatures[0] is None: time_signatures[0] = abjad.TimeSignature((4, 4)) for i in range(1, len(time_signatures)): if time_signatures[i] is None: time_signatures[i] = abjad.TimeSignature( time_signatures[i - 1].pair ) elif omit_repeated: effective_time_signature = None for i in range(len(time_signatures)): current_time_signature = time_signatures[i] if current_time_signature is not None: current_time_signature = abjad.TimeSignature( current_time_signature.pair ) if current_time_signature == effective_time_signature: time_signatures[i] = None else: if current_time_signature is not None: effective_time_signature = current_time_signature return time_signatures
PypiClean
/DataTig-0.5.0.tar.gz/DataTig-0.5.0/datatig/process.py
import os import shutil import sys import tempfile from datatig.models.siteconfig import SiteConfigModel from .readers.directory import process_type from .repository_access import RepositoryAccessLocalFiles, RepositoryAccessLocalGit from .sqlite import DataStoreSQLite from .sqliteversioned import DataStoreSQLiteVersioned from .validate.jsonschema import JsonSchemaValidator, JsonSchemaValidatorVersioned from .writers.frictionless.frictionless import FrictionlessWriter from .writers.static.static import StaticWriter from .writers.staticversioned.staticversioned import StaticVersionedWriter def go( source_dir: str, staticsite_output: str = None, staticsite_url: str = None, sqlite_output: str = None, frictionless_output: str = None, verbose: bool = False, check_errors: bool = False, check_record_errors: bool = False, sys_exit: bool = False, ) -> None: had_errors = False # Repository Access repository_access = RepositoryAccessLocalFiles(source_dir) # Config config = SiteConfigModel(source_dir) config.load_from_file(repository_access) # SQLite - we always create a SQLite DB. If not requested, we just make it in temp directory and delete after temp_dir = None if sqlite_output is None: temp_dir = tempfile.mkdtemp() sqlite_output = os.path.join(temp_dir, "database.sqlite") datastore = DataStoreSQLite(config, sqlite_output) # Load data for type in config.get_types().values(): process_type( config, repository_access, type, lambda record: datastore.store(record), lambda error: datastore.store_error(error), ) # Validate data validate_json_schema = JsonSchemaValidator(config, datastore) validate_json_schema.go() # Look for errors if check_errors: for error in datastore.get_all_errors_generator(): if verbose: print( "FILENAME " + error.get_filename() + " HAS ERROR: " + error.get_message() ) had_errors = True # Look for validation errors if check_record_errors: for type in config.get_types().keys(): for error in datastore.get_all_record_errors_generator_in_type(type): if verbose: print( "TYPE " + type + " RECORD " + error.get_record_id() + " HAS VALIDATION ERROR: " + error.get_message() ) had_errors = True # Frictionless Output if frictionless_output: frictionless_writer = FrictionlessWriter(config, datastore, frictionless_output) frictionless_writer.go() # Static Site Output # TODO if frictionless_output is set, should pass it somehow. Otherwise StaticWriter will run it again, which is wastefull. if staticsite_output: static_writer = StaticWriter( config, datastore, staticsite_output, url=staticsite_url ) static_writer.go() # We have now finished - start clearing up # Delete temp if temp_dir: shutil.rmtree(temp_dir) # Print final message and exit, if requested if had_errors: if verbose: print("ERRORS OCCURRED- See Above") if sys_exit: sys.exit(-1) else: if sys_exit: sys.exit(0) def versioned_build( source_dir: str, staticsite_output: str = None, staticsite_url: str = None, sqlite_output: str = None, refs_str: str = "", all_branches: bool = False, default_ref: str = "", ) -> None: # Repository Access repository_access = RepositoryAccessLocalGit(source_dir) # Work out list of refs refs: list = [i for i in refs_str.split(",") if i] # Make list unique. # Might use something like "main,$BRANCH" in build servers, and then you might get passed "main,main" refs = list(set(refs)) if all_branches: for ref in repository_access.list_branches(): if ref not in refs: refs.append(ref) # TODO if no refs passed, error if default_ref: if default_ref not in refs: refs.append(default_ref) else: default_ref = refs[0] # SQLite - we always create a SQLite DB. If not requested, we just make it in temp directory and delete after temp_dir = None if sqlite_output is None: temp_dir = tempfile.mkdtemp() sqlite_output = os.path.join(temp_dir, "database.sqlite") datastore = DataStoreSQLiteVersioned(sqlite_output) # For each ref for ref in refs: # Set the commit we want, get info repository_access.set_ref(ref) git_commit = repository_access.get_current_commit() # TODO if commit hash is already known to us, don't load data, it is already there # (2 branches / refs can point to same commit) # Config config = SiteConfigModel(source_dir) config.load_from_file(repository_access) config_id: int = datastore.store_config(config) # Save commit datastore.store_git_commit(git_commit, config_id) # Process data for type in config.get_types().values(): process_type( config, repository_access, type, lambda record: datastore.store_record(git_commit, record), lambda error: datastore.store_error(git_commit, error), ) # Validate data validate_json_schema = JsonSchemaValidatorVersioned( config, datastore, git_commit ) validate_json_schema.go() # If default ref not one of the refs we found ... if not datastore.is_ref_known(default_ref): default_ref = refs[0] # Static Site Output if staticsite_output: static_writer = StaticVersionedWriter( datastore, staticsite_output, url=staticsite_url, default_ref=default_ref ) static_writer.go() # Delete temp if temp_dir: shutil.rmtree(temp_dir)
PypiClean
/OTLModel/Classes/Onderdeel/Pomp.py
from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut from OTLMOW.OTLModel.Classes.Abstracten.LinkendElement import LinkendElement from OTLMOW.OTLModel.Datatypes.BooleanField import BooleanField from OTLMOW.OTLModel.Datatypes.KlPompMerk import KlPompMerk from OTLMOW.OTLModel.Datatypes.KlPompModelnaam import KlPompModelnaam from OTLMOW.OTLModel.Datatypes.KlPompSoort import KlPompSoort from OTLMOW.OTLModel.Datatypes.KwantWrdInKubiekeMeter import KwantWrdInKubiekeMeter from OTLMOW.OTLModel.Datatypes.KwantWrdInMillimeter import KwantWrdInMillimeter from OTLMOW.OTLModel.Datatypes.KwantWrdInWatt import KwantWrdInWatt from OTLMOW.GeometrieArtefact.PuntGeometrie import PuntGeometrie # Generated with OTLClassCreator. To modify: extend, do not edit class Pomp(LinkendElement, PuntGeometrie): """Een pomp is een werktuig dat water verplaatst door er energie aan af te geven in de vorm van een drukverhoging of snelheidsverhoging.""" typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp' """De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI.""" def __init__(self): LinkendElement.__init__(self) PuntGeometrie.__init__(self) self._binnenDiameter = OTLAttribuut(field=KwantWrdInMillimeter, naam='binnenDiameter', label='binnendiameter', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.binnenDiameter', definition='Afmeting van de binnenkant van de opening waardoor het opgepompte water loopt.', owner=self) self._buitenDiameter = OTLAttribuut(field=KwantWrdInMillimeter, naam='buitenDiameter', label='buitendiameter', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.buitenDiameter', definition='Afmeting van de buitenkant van de opening waarlangs het opgepomte water loopt in functie van een aansluiting van een afvoer.', owner=self) self._maximaalDebiet = OTLAttribuut(field=KwantWrdInKubiekeMeter, naam='maximaalDebiet', label='maximaal debiet', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.maximaalDebiet', definition='Het debiet dat de pomp kan verplaatsen wanneer ze op volle capaciteit werkt volgens de specificaties van de fabrikant.', owner=self) self._merk = OTLAttribuut(field=KlPompMerk, naam='merk', label='merk', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.merk', definition='De naam van het merk volgens de fabrikant.', owner=self) self._metSoftstarter = OTLAttribuut(field=BooleanField, naam='metSoftstarter', label='met softstarter', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.metSoftstarter', definition='Geeft aan of het toestel voorzien is van een eigen softstarter.', owner=self) self._metTempSensor = OTLAttribuut(field=BooleanField, naam='metTempSensor', label='met temperatuur sensor', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.metTempSensor', definition='Geeft aan of het toestel uitgerust is met een temperatuur sensor in functie van de bewaking van de correcte werking.', owner=self) self._metVochtsensor = OTLAttribuut(field=BooleanField, naam='metVochtsensor', label='met vocht sensor', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.metVochtsensor', definition='Geeft aan of het toestel uitgerust is met een vocht sensor in functie van de bewaking van de correcte werking.', owner=self) self._modelnaam = OTLAttribuut(field=KlPompModelnaam, naam='modelnaam', label='modelnaam', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.modelnaam', definition='Naam van het model van het toestel volgens de fabrikant.', owner=self) self._soort = OTLAttribuut(field=KlPompSoort, naam='soort', label='soort', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.soort', definition='Bepaalt de aard van de pomp volgens haar werkingsprincipe.', owner=self) self._vermogen = OTLAttribuut(field=KwantWrdInWatt, naam='vermogen', label='vermogen', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Pomp.vermogen', definition='Elektrisch vermogen van het toestels volgens de specificaties van de fabrikant.', owner=self) @property def binnenDiameter(self): """Afmeting van de binnenkant van de opening waardoor het opgepompte water loopt.""" return self._binnenDiameter.get_waarde() @binnenDiameter.setter def binnenDiameter(self, value): self._binnenDiameter.set_waarde(value, owner=self) @property def buitenDiameter(self): """Afmeting van de buitenkant van de opening waarlangs het opgepomte water loopt in functie van een aansluiting van een afvoer.""" return self._buitenDiameter.get_waarde() @buitenDiameter.setter def buitenDiameter(self, value): self._buitenDiameter.set_waarde(value, owner=self) @property def maximaalDebiet(self): """Het debiet dat de pomp kan verplaatsen wanneer ze op volle capaciteit werkt volgens de specificaties van de fabrikant.""" return self._maximaalDebiet.get_waarde() @maximaalDebiet.setter def maximaalDebiet(self, value): self._maximaalDebiet.set_waarde(value, owner=self) @property def merk(self): """De naam van het merk volgens de fabrikant.""" return self._merk.get_waarde() @merk.setter def merk(self, value): self._merk.set_waarde(value, owner=self) @property def metSoftstarter(self): """Geeft aan of het toestel voorzien is van een eigen softstarter.""" return self._metSoftstarter.get_waarde() @metSoftstarter.setter def metSoftstarter(self, value): self._metSoftstarter.set_waarde(value, owner=self) @property def metTempSensor(self): """Geeft aan of het toestel uitgerust is met een temperatuur sensor in functie van de bewaking van de correcte werking.""" return self._metTempSensor.get_waarde() @metTempSensor.setter def metTempSensor(self, value): self._metTempSensor.set_waarde(value, owner=self) @property def metVochtsensor(self): """Geeft aan of het toestel uitgerust is met een vocht sensor in functie van de bewaking van de correcte werking.""" return self._metVochtsensor.get_waarde() @metVochtsensor.setter def metVochtsensor(self, value): self._metVochtsensor.set_waarde(value, owner=self) @property def modelnaam(self): """Naam van het model van het toestel volgens de fabrikant.""" return self._modelnaam.get_waarde() @modelnaam.setter def modelnaam(self, value): self._modelnaam.set_waarde(value, owner=self) @property def soort(self): """Bepaalt de aard van de pomp volgens haar werkingsprincipe.""" return self._soort.get_waarde() @soort.setter def soort(self, value): self._soort.set_waarde(value, owner=self) @property def vermogen(self): """Elektrisch vermogen van het toestels volgens de specificaties van de fabrikant.""" return self._vermogen.get_waarde() @vermogen.setter def vermogen(self, value): self._vermogen.set_waarde(value, owner=self)
PypiClean
/EASYPLOT_TOOLBOX-2023.8.tar.gz/EASYPLOT_TOOLBOX-2023.8/license.md
Copyright (c) [2023] [Wanderlei Malaquias Pereira Junior] Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
PypiClean
/CityLearn-1.3.1-py3-none-any.whl/citylearn/rl.py
import random import numpy as np # conditional imports try: import torch from torch.distributions import Normal import torch.nn as nn import torch.nn.functional as F except ImportError: raise Exception("This functionality requires you to install torch. You can install torch by : pip install torch torchvision, or for more detailed instructions please visit https://pytorch.org.") class PolicyNetwork(nn.Module): def __init__(self, num_inputs, num_actions, action_space, action_scaling_coef, hidden_dim = [400,300], init_w = 3e-3, log_std_min = -20, log_std_max = 2, epsilon = 1e-6): super(PolicyNetwork, self).__init__() self.log_std_min = log_std_min self.log_std_max = log_std_max self.epsilon = epsilon self.linear1 = nn.Linear(num_inputs, hidden_dim[0]) self.linear2 = nn.Linear(hidden_dim[0], hidden_dim[1]) self.mean_linear = nn.Linear(hidden_dim[1], num_actions) self.log_std_linear = nn.Linear(hidden_dim[1], num_actions) self.mean_linear.weight.data.uniform_(-init_w, init_w) self.mean_linear.bias.data.uniform_(-init_w, init_w) self.log_std_linear.weight.data.uniform_(-init_w, init_w) self.log_std_linear.bias.data.uniform_(-init_w, init_w) self.action_scale = torch.FloatTensor( action_scaling_coef * (action_space.high - action_space.low) / 2.) self.action_bias = torch.FloatTensor( action_scaling_coef * (action_space.high + action_space.low) / 2.) def forward(self, state): x = F.relu(self.linear1(state)) x = F.relu(self.linear2(x)) mean = self.mean_linear(x) log_std = self.log_std_linear(x) log_std = torch.clamp(log_std, min=self.log_std_min, max=self.log_std_max) return mean, log_std def sample(self, state): mean, log_std = self.forward(state) std = log_std.exp() normal = Normal(mean, std) x_t = normal.rsample() # for reparameterization trick (mean + std * N(0,1)) y_t = torch.tanh(x_t) action = y_t * self.action_scale + self.action_bias log_prob = normal.log_prob(x_t) # Enforcing Action Bound log_prob -= torch.log(self.action_scale * (1 - y_t.pow(2)) + self.epsilon) log_prob = log_prob.sum(1, keepdim=True) mean = torch.tanh(mean) * self.action_scale + self.action_bias return action, log_prob, mean def to(self, device): self.action_scale = self.action_scale.to(device) self.action_bias = self.action_bias.to(device) return super(PolicyNetwork, self).to(device) class ReplayBuffer: def __init__(self, capacity): self.capacity = capacity self.buffer = [] self.position = 0 def push(self, state, action, reward, next_state, done): if len(self.buffer) < self.capacity: self.buffer.append(None) self.buffer[self.position] = (state, action, reward, next_state, done) self.position = (self.position + 1) % self.capacity def sample(self, batch_size): batch = random.sample(self.buffer, batch_size) state, action, reward, next_state, done = map(np.stack, zip(*batch)) return state, action, reward, next_state, done def __len__(self): return len(self.buffer) class RegressionBuffer: def __init__(self, capacity): self.capacity = capacity self.x = [] self.y = [] self.position = 0 def push(self, variables, targets): if len(self.x) < self.capacity and len(self.x)==len(self.y): self.x.append(None) self.y.append(None) self.x[self.position] = variables self.y[self.position] = targets self.position = (self.position + 1) % self.capacity def __len__(self): return len(self.x) class SoftQNetwork(nn.Module): def __init__(self, num_inputs, num_actions, hidden_size=[400,300], init_w=3e-3): super(SoftQNetwork, self).__init__() self.linear1 = nn.Linear(num_inputs + num_actions, hidden_size[0]) self.linear2 = nn.Linear(hidden_size[0], hidden_size[1]) self.linear3 = nn.Linear(hidden_size[1], 1) self.ln1 = nn.LayerNorm(hidden_size[0]) self.ln2 = nn.LayerNorm(hidden_size[1]) self.linear3.weight.data.uniform_(-init_w, init_w) self.linear3.bias.data.uniform_(-init_w, init_w) def forward(self, state, action): x = torch.cat([state, action], 1) x = self.ln1(F.relu(self.linear1(x))) x = self.ln2(F.relu(self.linear2(x))) x = self.linear3(x) return x
PypiClean
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/packages/pip/_vendor/lockfile/__init__.py
from __future__ import absolute_import import functools import os import socket import threading import warnings # Work with PEP8 and non-PEP8 versions of threading module. if not hasattr(threading, "current_thread"): threading.current_thread = threading.currentThread if not hasattr(threading.Thread, "get_name"): threading.Thread.get_name = threading.Thread.getName __all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked', 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock', 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', 'LockBase', 'locked'] class Error(Exception): """ Base class for other exceptions. >>> try: ... raise Error ... except Exception: ... pass """ pass class LockError(Error): """ Base class for error arising from attempts to acquire the lock. >>> try: ... raise LockError ... except Error: ... pass """ pass class LockTimeout(LockError): """Raised when lock creation fails within a user-defined period of time. >>> try: ... raise LockTimeout ... except LockError: ... pass """ pass class AlreadyLocked(LockError): """Some other thread/process is locking the file. >>> try: ... raise AlreadyLocked ... except LockError: ... pass """ pass class LockFailed(LockError): """Lock file creation failed for some other reason. >>> try: ... raise LockFailed ... except LockError: ... pass """ pass class UnlockError(Error): """ Base class for errors arising from attempts to release the lock. >>> try: ... raise UnlockError ... except Error: ... pass """ pass class NotLocked(UnlockError): """Raised when an attempt is made to unlock an unlocked file. >>> try: ... raise NotLocked ... except UnlockError: ... pass """ pass class NotMyLock(UnlockError): """Raised when an attempt is made to unlock a file someone else locked. >>> try: ... raise NotMyLock ... except UnlockError: ... pass """ pass class _SharedBase(object): def __init__(self, path): self.path = path def acquire(self, timeout=None): """ Acquire the lock. * If timeout is omitted (or None), wait forever trying to lock the file. * If timeout > 0, try to acquire the lock for that many seconds. If the lock period expires and the file is still locked, raise LockTimeout. * If timeout <= 0, raise AlreadyLocked immediately if the file is already locked. """ raise NotImplemented("implement in subclass") def release(self): """ Release the lock. If the file is not locked, raise NotLocked. """ raise NotImplemented("implement in subclass") def __enter__(self): """ Context manager support. """ self.acquire() return self def __exit__(self, *_exc): """ Context manager support. """ self.release() def __repr__(self): return "<%s: %r>" % (self.__class__.__name__, self.path) class LockBase(_SharedBase): """Base class for platform-specific lock classes.""" def __init__(self, path, threaded=True, timeout=None): """ >>> lock = LockBase('somefile') >>> lock = LockBase('somefile', threaded=False) """ super(LockBase, self).__init__(path) self.lock_file = os.path.abspath(path) + ".lock" self.hostname = socket.gethostname() self.pid = os.getpid() if threaded: t = threading.current_thread() # Thread objects in Python 2.4 and earlier do not have ident # attrs. Worm around that. ident = getattr(t, "ident", hash(t)) self.tname = "-%x" % (ident & 0xffffffff) else: self.tname = "" dirname = os.path.dirname(self.lock_file) # unique name is mostly about the current process, but must # also contain the path -- otherwise, two adjacent locked # files conflict (one file gets locked, creating lock-file and # unique file, the other one gets locked, creating lock-file # and overwriting the already existing lock-file, then one # gets unlocked, deleting both lock-file and unique file, # finally the last lock errors out upon releasing. self.unique_name = os.path.join(dirname, "%s%s.%s%s" % (self.hostname, self.tname, self.pid, hash(self.path))) self.timeout = timeout def is_locked(self): """ Tell whether or not the file is locked. """ raise NotImplemented("implement in subclass") def i_am_locking(self): """ Return True if this object is locking the file. """ raise NotImplemented("implement in subclass") def break_lock(self): """ Remove a lock. Useful if a locking thread failed to unlock. """ raise NotImplemented("implement in subclass") def __repr__(self): return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, self.path) def _fl_helper(cls, mod, *args, **kwds): warnings.warn("Import from %s module instead of lockfile package" % mod, DeprecationWarning, stacklevel=2) # This is a bit funky, but it's only for awhile. The way the unit tests # are constructed this function winds up as an unbound method, so it # actually takes three args, not two. We want to toss out self. if not isinstance(args[0], str): # We are testing, avoid the first arg args = args[1:] if len(args) == 1 and not kwds: kwds["threaded"] = True return cls(*args, **kwds) def LinkFileLock(*args, **kwds): """Factory function provided for backwards compatibility. Do not use in new code. Instead, import LinkLockFile from the lockfile.linklockfile module. """ from . import linklockfile return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", *args, **kwds) def MkdirFileLock(*args, **kwds): """Factory function provided for backwards compatibility. Do not use in new code. Instead, import MkdirLockFile from the lockfile.mkdirlockfile module. """ from . import mkdirlockfile return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", *args, **kwds) def SQLiteFileLock(*args, **kwds): """Factory function provided for backwards compatibility. Do not use in new code. Instead, import SQLiteLockFile from the lockfile.mkdirlockfile module. """ from . import sqlitelockfile return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", *args, **kwds) def locked(path, timeout=None): """Decorator which enables locks for decorated function. Arguments: - path: path for lockfile. - timeout (optional): Timeout for acquiring lock. Usage: @locked('/var/run/myname', timeout=0) def myname(...): ... """ def decor(func): @functools.wraps(func) def wrapper(*args, **kwargs): lock = FileLock(path, timeout=timeout) lock.acquire() try: return func(*args, **kwargs) finally: lock.release() return wrapper return decor if hasattr(os, "link"): from . import linklockfile as _llf LockFile = _llf.LinkLockFile else: from . import mkdirlockfile as _mlf LockFile = _mlf.MkdirLockFile FileLock = LockFile
PypiClean
/Kallithea-0.7.0.tar.gz/Kallithea-0.7.0/kallithea/controllers/api/api.py
import logging import traceback from datetime import datetime from tg import request from kallithea.controllers.api import JSONRPCController, JSONRPCError from kallithea.lib.auth import (AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel) from kallithea.lib.exceptions import DefaultUserException, UserGroupsAssignedException from kallithea.lib.utils import repo2db_mapper from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.exceptions import EmptyRepositoryError from kallithea.model import db, meta, userlog from kallithea.model.changeset_status import ChangesetStatusModel from kallithea.model.comment import ChangesetCommentsModel from kallithea.model.gist import GistModel from kallithea.model.pull_request import PullRequestModel from kallithea.model.repo import RepoModel from kallithea.model.repo_group import RepoGroupModel from kallithea.model.scm import ScmModel, UserGroupList from kallithea.model.user import UserModel from kallithea.model.user_group import UserGroupModel log = logging.getLogger(__name__) def store_update(updates, attr, name): """ Stores param in updates dict if it's not None (i.e. if user explicitly set a parameter). This allows easy updates of passed in params. """ if attr is not None: updates[name] = attr def get_user_or_error(userid): """ Get user by id or name or return JsonRPCError if not found :param userid: """ user = UserModel().get_user(userid) if user is None: raise JSONRPCError("user `%s` does not exist" % (userid,)) return user def get_repo_or_error(repoid): """ Get repo by id or name or return JsonRPCError if not found :param repoid: """ repo = RepoModel().get_repo(repoid) if repo is None: raise JSONRPCError('repository `%s` does not exist' % (repoid,)) return repo def get_repo_group_or_error(repogroupid): """ Get repo group by id or name or return JsonRPCError if not found :param repogroupid: """ repo_group = db.RepoGroup.guess_instance(repogroupid) if repo_group is None: raise JSONRPCError( 'repository group `%s` does not exist' % (repogroupid,)) return repo_group def get_user_group_or_error(usergroupid): """ Get user group by id or name or return JsonRPCError if not found :param usergroupid: """ user_group = UserGroupModel().get_group(usergroupid) if user_group is None: raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) return user_group def get_perm_or_error(permid, prefix=None): """ Get permission by id or name or return JsonRPCError if not found :param permid: """ perm = db.Permission.get_by_key(permid) if perm is None: raise JSONRPCError('permission `%s` does not exist' % (permid,)) if prefix: if not perm.permission_name.startswith(prefix): raise JSONRPCError('permission `%s` is invalid, ' 'should start with %s' % (permid, prefix)) return perm def get_gist_or_error(gistid): """ Get gist by id or gist_access_id or return JsonRPCError if not found :param gistid: """ gist = GistModel().get_gist(gistid) if gist is None: raise JSONRPCError('gist `%s` does not exist' % (gistid,)) return gist class ApiController(JSONRPCController): """ API Controller The authenticated user can be found as request.authuser. Example function:: def func(arg1, arg2,...): pass Each function should also **raise** JSONRPCError for any errors that happens. """ @HasPermissionAnyDecorator('hg.admin') def test(self, args): return args @HasPermissionAnyDecorator('hg.admin') def pull(self, repoid, clone_uri=None): """ Triggers a pull from remote location on given repo. Can be used to automatically keep remote repos up to date. This command can be executed only using api_key belonging to user with admin rights :param repoid: repository name or repository id :type repoid: str or int :param clone_uri: repository URI to pull from (optional) :type clone_uri: str OUTPUT:: id : <id_given_in_input> result : { "msg": "Pulled from `<repository name>`" "repository": "<repository name>" } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "Unable to pull changes from `<reponame>`" } """ repo = get_repo_or_error(repoid) try: ScmModel().pull_changes(repo.repo_name, request.authuser.username, request.ip_addr, clone_uri=clone_uri) return dict( msg='Pulled from `%s`' % repo.repo_name, repository=repo.repo_name ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'Unable to pull changes from `%s`' % repo.repo_name ) @HasPermissionAnyDecorator('hg.admin') def rescan_repos(self, remove_obsolete=False): """ Triggers rescan repositories action. If remove_obsolete is set than also delete repos that are in database but not in the filesystem. aka "clean zombies". This command can be executed only using api_key belonging to user with admin rights. :param remove_obsolete: deletes repositories from database that are not found on the filesystem :type remove_obsolete: Optional(bool) OUTPUT:: id : <id_given_in_input> result : { 'added': [<added repository name>,...] 'removed': [<removed repository name>,...] } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { 'Error occurred during rescan repositories action' } """ try: rm_obsolete = remove_obsolete added, removed = repo2db_mapper(ScmModel().repo_scan(), remove_obsolete=rm_obsolete) return {'added': added, 'removed': removed} except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'Error occurred during rescan repositories action' ) def invalidate_cache(self, repoid): """ Invalidate cache for repository. This command can be executed only using api_key belonging to user with admin rights or regular user that have write or admin or write access to repository. :param repoid: repository name or repository id :type repoid: str or int OUTPUT:: id : <id_given_in_input> result : { 'msg': Cache for repository `<repository name>` was invalidated, 'repository': <repository name> } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { 'Error occurred during cache invalidation action' } """ repo = get_repo_or_error(repoid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('write')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) try: ScmModel().mark_for_invalidation(repo.repo_name) return dict( msg='Cache for repository `%s` was invalidated' % (repoid,), repository=repo.repo_name ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'Error occurred during cache invalidation action' ) @HasPermissionAnyDecorator('hg.admin') def get_ip(self, userid=None): """ Shows IP address as seen from Kallithea server, together with all defined IP addresses for given user. If userid is not passed data is returned for user who's calling this function. This command can be executed only using api_key belonging to user with admin rights. :param userid: username to show ips for :type userid: Optional(str or int) OUTPUT:: id : <id_given_in_input> result : { "server_ip_addr": "<ip_from_clien>", "user_ips": [ { "ip_addr": "<ip_with_mask>", "ip_range": ["<start_ip>", "<end_ip>"], }, ... ] } """ if userid is None: userid = request.authuser.user_id user = get_user_or_error(userid) ips = db.UserIpMap.query().filter(db.UserIpMap.user == user).all() return dict( server_ip_addr=request.ip_addr, user_ips=ips ) # alias for old show_ip = get_ip @HasPermissionAnyDecorator('hg.admin') def get_server_info(self): """ return server info, including Kallithea version and installed packages OUTPUT:: id : <id_given_in_input> result : { 'modules': [<module name>,...] 'py_version': <python version>, 'platform': <platform type>, 'kallithea_version': <kallithea version> } error : null """ return db.Setting.get_server_info() def get_user(self, userid=None): """ Gets a user by username or user_id, Returns empty result if user is not found. If userid param is skipped it is set to id of user who is calling this method. This command can be executed only using api_key belonging to user with admin rights, or regular users that cannot specify different userid than theirs :param userid: user to get data for :type userid: Optional(str or int) OUTPUT:: id : <id_given_in_input> result: None if user does not exist or { "user_id" : "<user_id>", "api_key" : "<api_key>", "api_keys": "[<list of all API keys including additional ones>]" "username" : "<username>", "firstname": "<firstname>", "lastname" : "<lastname>", "email" : "<email>", "emails": "[<list of all emails including additional ones>]", "ip_addresses": "[<ip_address_for_user>,...]", "active" : "<bool: user active>", "admin" :  "<bool: user is admin>", "extern_name" : "<extern_name>", "extern_type" : "<extern type> "last_login": "<last_login>", "permissions": { "global": ["hg.create.repository", "repository.read", "hg.register.manual_activate"], "repositories": {"repo1": "repository.none"}, "repositories_groups": {"Group1": "group.read"} }, } error: null """ if not HasPermissionAny('hg.admin')(): # make sure normal user does not pass someone else userid, # he is not allowed to do that if userid is not None and userid != request.authuser.user_id: raise JSONRPCError( 'userid is not the same as your user' ) if userid is None: userid = request.authuser.user_id user = get_user_or_error(userid) data = user.get_api_data() data['permissions'] = AuthUser(user_id=user.user_id).permissions return data @HasPermissionAnyDecorator('hg.admin') def get_users(self): """ Lists all existing users. This command can be executed only using api_key belonging to user with admin rights. OUTPUT:: id : <id_given_in_input> result: [<user_object>, ...] error: null """ return [ user.get_api_data() for user in db.User.query() .order_by(db.User.username) .filter_by(is_default_user=False) ] @HasPermissionAnyDecorator('hg.admin') def create_user(self, username, email, password='', firstname='', lastname='', active=True, admin=False, extern_type=db.User.DEFAULT_AUTH_TYPE, extern_name=''): """ Creates new user. Returns new user object. This command can be executed only using api_key belonging to user with admin rights. :param username: new username :type username: str or int :param email: email :type email: str :param password: password :type password: Optional(str) :param firstname: firstname :type firstname: Optional(str) :param lastname: lastname :type lastname: Optional(str) :param active: active :type active: Optional(bool) :param admin: admin :type admin: Optional(bool) :param extern_name: name of extern :type extern_name: Optional(str) :param extern_type: extern_type :type extern_type: Optional(str) OUTPUT:: id : <id_given_in_input> result: { "msg" : "created new user `<username>`", "user": <user_obj> } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "user `<username>` already exist" or "email `<email>` already exist" or "failed to create user `<username>`" } """ if db.User.get_by_username(username): raise JSONRPCError("user `%s` already exist" % (username,)) if db.User.get_by_email(email): raise JSONRPCError("email `%s` already exist" % (email,)) try: user = UserModel().create_or_update( username=username, password=password, email=email, firstname=firstname, lastname=lastname, active=active, admin=admin, extern_type=extern_type, extern_name=extern_name ) meta.Session().commit() return dict( msg='created new user `%s`' % username, user=user.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to create user `%s`' % (username,)) @HasPermissionAnyDecorator('hg.admin') def update_user(self, userid, username=None, email=None, password=None, firstname=None, lastname=None, active=None, admin=None, extern_type=None, extern_name=None): """ updates given user if such user exists. This command can be executed only using api_key belonging to user with admin rights. :param userid: userid to update :type userid: str or int :param username: new username :type username: str or int :param email: email :type email: str :param password: password :type password: Optional(str) :param firstname: firstname :type firstname: Optional(str) :param lastname: lastname :type lastname: Optional(str) :param active: active :type active: Optional(bool) :param admin: admin :type admin: Optional(bool) :param extern_name: :type extern_name: Optional(str) :param extern_type: :type extern_type: Optional(str) OUTPUT:: id : <id_given_in_input> result: { "msg" : "updated user ID:<userid> <username>", "user": <user_object>, } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to update user `<username>`" } """ user = get_user_or_error(userid) # only non optional arguments will be stored in updates updates = {} try: store_update(updates, username, 'username') store_update(updates, password, 'password') store_update(updates, email, 'email') store_update(updates, firstname, 'name') store_update(updates, lastname, 'lastname') store_update(updates, active, 'active') store_update(updates, admin, 'admin') store_update(updates, extern_name, 'extern_name') store_update(updates, extern_type, 'extern_type') user = UserModel().update_user(user, **updates) meta.Session().commit() return dict( msg='updated user ID:%s %s' % (user.user_id, user.username), user=user.get_api_data() ) except DefaultUserException: log.error(traceback.format_exc()) raise JSONRPCError('editing default user is forbidden') except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to update user `%s`' % (userid,)) @HasPermissionAnyDecorator('hg.admin') def delete_user(self, userid): """ deletes given user if such user exists. This command can be executed only using api_key belonging to user with admin rights. :param userid: user to delete :type userid: str or int OUTPUT:: id : <id_given_in_input> result: { "msg" : "deleted user ID:<userid> <username>", "user": null } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to delete user ID:<userid> <username>" } """ user = get_user_or_error(userid) try: UserModel().delete(userid) meta.Session().commit() return dict( msg='deleted user ID:%s %s' % (user.user_id, user.username), user=None ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to delete user ID:%s %s' % (user.user_id, user.username)) # permission check inside def get_user_group(self, usergroupid): """ Gets an existing user group. This command can be executed only using api_key belonging to user with admin rights or user who has at least read access to user group. :param usergroupid: id of user_group to edit :type usergroupid: str or int OUTPUT:: id : <id_given_in_input> result : None if group not exist { "users_group_id" : "<id>", "group_name" : "<groupname>", "active": "<bool>", "members" : [<user_obj>,...] } error : null """ user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasUserGroupPermissionLevel('read')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) data = user_group.get_api_data() return data # permission check inside def get_user_groups(self): """ Lists all existing user groups. This command can be executed only using api_key belonging to user with admin rights or user who has at least read access to user group. OUTPUT:: id : <id_given_in_input> result : [<user_group_obj>,...] error : null """ return [ user_group.get_api_data() for user_group in UserGroupList(db.UserGroup.query().all(), perm_level='read') ] @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') def create_user_group(self, group_name, description='', owner=None, active=True): """ Creates new user group. This command can be executed only using api_key belonging to user with admin rights or an user who has create user group permission :param group_name: name of new user group :type group_name: str :param description: group description :type description: str :param owner: owner of group. If not passed apiuser is the owner :type owner: Optional(str or int) :param active: group is active :type active: Optional(bool) OUTPUT:: id : <id_given_in_input> result: { "msg": "created new user group `<groupname>`", "user_group": <user_group_object> } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "user group `<group name>` already exist" or "failed to create group `<group name>`" } """ if UserGroupModel().get_by_name(group_name): raise JSONRPCError("user group `%s` already exist" % (group_name,)) try: if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) ug = UserGroupModel().create(name=group_name, description=description, owner=owner, active=active) meta.Session().commit() return dict( msg='created new user group `%s`' % group_name, user_group=ug.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to create group `%s`' % (group_name,)) # permission check inside def update_user_group(self, usergroupid, group_name=None, description=None, owner=None, active=None): """ Updates given usergroup. This command can be executed only using api_key belonging to user with admin rights or an admin of given user group :param usergroupid: id of user group to update :type usergroupid: str or int :param group_name: name of new user group :type group_name: str :param description: group description :type description: str :param owner: owner of group. :type owner: Optional(str or int) :param active: group is active :type active: Optional(bool) OUTPUT:: id : <id_given_in_input> result : { "msg": 'updated user group ID:<user group id> <user group name>', "user_group": <user_group_object> } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to update user group `<user group name>`" } """ user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasUserGroupPermissionLevel('admin')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) if owner is not None: owner = get_user_or_error(owner) updates = {} store_update(updates, group_name, 'users_group_name') store_update(updates, description, 'user_group_description') store_update(updates, owner, 'owner') store_update(updates, active, 'users_group_active') try: UserGroupModel().update(user_group, updates) meta.Session().commit() return dict( msg='updated user group ID:%s %s' % (user_group.users_group_id, user_group.users_group_name), user_group=user_group.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to update user group `%s`' % (usergroupid,)) # permission check inside def delete_user_group(self, usergroupid): """ Delete given user group by user group id or name. This command can be executed only using api_key belonging to user with admin rights or an admin of given user group :param usergroupid: :type usergroupid: int OUTPUT:: id : <id_given_in_input> result : { "msg": "deleted user group ID:<user_group_id> <user_group_name>" } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to delete user group ID:<user_group_id> <user_group_name>" or "RepoGroup assigned to <repo_groups_list>" } """ user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasUserGroupPermissionLevel('admin')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) try: UserGroupModel().delete(user_group) meta.Session().commit() return dict( msg='deleted user group ID:%s %s' % (user_group.users_group_id, user_group.users_group_name), user_group=None ) except UserGroupsAssignedException as e: log.error(traceback.format_exc()) raise JSONRPCError(str(e)) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to delete user group ID:%s %s' % (user_group.users_group_id, user_group.users_group_name) ) # permission check inside def add_user_to_user_group(self, usergroupid, userid): """ Adds a user to a user group. If user exists in that group success will be `false`. This command can be executed only using api_key belonging to user with admin rights or an admin of given user group :param usergroupid: :type usergroupid: int :param userid: :type userid: int OUTPUT:: id : <id_given_in_input> result : { "success": True|False # depends on if member is in group "msg": "added member `<username>` to user group `<groupname>` | User is already in that group" } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to add member to user group `<user_group_name>`" } """ user = get_user_or_error(userid) user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasUserGroupPermissionLevel('admin')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) try: ugm = UserGroupModel().add_user_to_group(user_group, user) success = True if ugm is not True else False msg = 'added member `%s` to user group `%s`' % ( user.username, user_group.users_group_name ) msg = msg if success else 'User is already in that group' meta.Session().commit() return dict( success=success, msg=msg ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to add member to user group `%s`' % ( user_group.users_group_name, ) ) # permission check inside def remove_user_from_user_group(self, usergroupid, userid): """ Removes a user from a user group. If user is not in given group success will be `false`. This command can be executed only using api_key belonging to user with admin rights or an admin of given user group :param usergroupid: :param userid: OUTPUT:: id : <id_given_in_input> result: { "success": True|False, # depends on if member is in group "msg": "removed member <username> from user group <groupname> | User wasn't in group" } error: null """ user = get_user_or_error(userid) user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasUserGroupPermissionLevel('admin')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) try: success = UserGroupModel().remove_user_from_group(user_group, user) msg = 'removed member `%s` from user group `%s`' % ( user.username, user_group.users_group_name ) msg = msg if success else "User wasn't in group" meta.Session().commit() return dict(success=success, msg=msg) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to remove member from user group `%s`' % ( user_group.users_group_name, ) ) # permission check inside def get_repo(self, repoid, with_revision_names=False, with_pullrequests=False): """ Gets an existing repository by it's name or repository_id. Members will return either users_group or user associated to that repository. This command can be executed only using api_key belonging to user with admin rights or regular user that have at least read access to repository. :param repoid: repository name or repository id :type repoid: str or int OUTPUT:: id : <id_given_in_input> result : { { "repo_id" : "<repo_id>", "repo_name" : "<reponame>" "repo_type" : "<repo_type>", "clone_uri" : "<clone_uri>", "enable_downloads": "<bool>", "enable_statistics": "<bool>", "private": "<bool>", "created_on" : "<date_time_created>", "description" : "<description>", "landing_rev": "<landing_rev>", "last_changeset": { "author": "<full_author>", "date": "<date_time_of_commit>", "message": "<commit_message>", "raw_id": "<raw_id>", "revision": "<numeric_revision>", "short_id": "<short_id>" } "owner": "<repo_owner>", "fork_of": "<name_of_fork_parent>", "members" : [ { "name": "<username>", "type" : "user", "permission" : "repository.(read|write|admin)" }, … { "name": "<usergroup name>", "type" : "user_group", "permission" : "usergroup.(read|write|admin)" }, … ] "followers": [<user_obj>, ...], <if with_revision_names == True> "tags": { "<tagname>": "<raw_id>", ... }, "branches": { "<branchname>": "<raw_id>", ... }, "bookmarks": { "<bookmarkname>": "<raw_id>", ... }, } } error : null """ repo = get_repo_or_error(repoid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('read')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) members = [] for user in repo.repo_to_perm: perm = user.permission.permission_name user = user.user user_data = { 'name': user.username, 'type': "user", 'permission': perm } members.append(user_data) for user_group in repo.users_group_to_perm: perm = user_group.permission.permission_name user_group = user_group.users_group user_group_data = { 'name': user_group.users_group_name, 'type': "user_group", 'permission': perm } members.append(user_group_data) followers = [ uf.user.get_api_data() for uf in repo.followers ] data = repo.get_api_data(with_revision_names=with_revision_names, with_pullrequests=with_pullrequests) data['members'] = members data['followers'] = followers return data # permission check inside def get_repos(self): """ Lists all existing repositories. This command can be executed only using api_key belonging to user with admin rights or regular user that have admin, write or read access to repository. OUTPUT:: id : <id_given_in_input> result: [ { "repo_id" : "<repo_id>", "repo_name" : "<reponame>" "repo_type" : "<repo_type>", "clone_uri" : "<clone_uri>", "private": : "<bool>", "created_on" : "<datetimecreated>", "description" : "<description>", "landing_rev": "<landing_rev>", "owner": "<repo_owner>", "fork_of": "<name_of_fork_parent>", "enable_downloads": "<bool>", "enable_statistics": "<bool>", }, … ] error: null """ if not HasPermissionAny('hg.admin')(): repos = request.authuser.get_all_user_repos() else: repos = db.Repository.query() return [ repo.get_api_data() for repo in repos ] # permission check inside def get_repo_nodes(self, repoid, revision, root_path, ret_type='all'): """ returns a list of nodes and it's children in a flat list for a given path at given revision. It's possible to specify ret_type to show only `files` or `dirs`. This command can be executed only using api_key belonging to user with admin rights or regular user that have at least read access to repository. :param repoid: repository name or repository id :type repoid: str or int :param revision: revision for which listing should be done :type revision: str :param root_path: path from which start displaying :type root_path: str :param ret_type: return type 'all|files|dirs' nodes :type ret_type: Optional(str) OUTPUT:: id : <id_given_in_input> result: [ { "name" : "<name>" "type" : "<type>", }, … ] error: null """ repo = get_repo_or_error(repoid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('read')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) _map = {} try: _d, _f = ScmModel().get_nodes(repo, revision, root_path, flat=False) _map = { 'all': _d + _f, 'files': _f, 'dirs': _d, } return _map[ret_type] except KeyError: raise JSONRPCError('ret_type must be one of %s' % (','.join(sorted(_map)))) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to get repo: `%s` nodes' % repo.repo_name ) # permission check inside def create_repo(self, repo_name, owner=None, repo_type=None, description='', private=False, clone_uri=None, landing_rev='rev:tip', enable_statistics=None, enable_downloads=None, copy_permissions=False): """ Creates a repository. The repository name contains the full path, but the parent repository group must exist. For example "foo/bar/baz" require the groups "foo" and "bar" (with "foo" as parent), and create "baz" repository with "bar" as group. This command can be executed only using api_key belonging to user with admin rights or regular user that have create repository permission. Regular users cannot specify owner parameter :param repo_name: repository name :type repo_name: str :param owner: user_id or username :type owner: Optional(str) :param repo_type: 'hg' or 'git' :type repo_type: Optional(str) :param description: repository description :type description: Optional(str) :param private: :type private: bool :param clone_uri: :type clone_uri: str :param landing_rev: <rev_type>:<rev> :type landing_rev: str :param enable_downloads: :type enable_downloads: bool :param enable_statistics: :type enable_statistics: bool :param copy_permissions: Copy permission from group that repository is being created. :type copy_permissions: bool OUTPUT:: id : <id_given_in_input> result: { "msg": "Created new repository `<reponame>`", "success": true, "task": "<celery task id or None if done sync>" } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { 'failed to create repository `<repo_name>` } """ group_name = None repo_name_parts = repo_name.split('/') if len(repo_name_parts) > 1: group_name = '/'.join(repo_name_parts[:-1]) repo_group = db.RepoGroup.get_by_group_name(group_name) if repo_group is None: raise JSONRPCError("repo group `%s` not found" % group_name) if not(HasPermissionAny('hg.admin')() or HasRepoGroupPermissionLevel('write')(group_name)): raise JSONRPCError("no permission to create repo in %s" % group_name) else: if not HasPermissionAny('hg.admin', 'hg.create.repository')(): raise JSONRPCError("no permission to create top level repo") if not HasPermissionAny('hg.admin')(): if owner is not None: # forbid setting owner for non-admins raise JSONRPCError( 'Only Kallithea admin can specify `owner` param' ) if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) if RepoModel().get_by_repo_name(repo_name): raise JSONRPCError("repo `%s` already exist" % repo_name) defs = db.Setting.get_default_repo_settings(strip_prefix=True) if private is None: private = defs.get('repo_private') or False if repo_type is None: repo_type = defs.get('repo_type') if enable_statistics is None: enable_statistics = defs.get('repo_enable_statistics') if enable_downloads is None: enable_downloads = defs.get('repo_enable_downloads') try: data = dict( repo_name=repo_name_parts[-1], repo_name_full=repo_name, repo_type=repo_type, repo_description=description, repo_private=private, clone_uri=clone_uri, repo_group=group_name, repo_landing_rev=landing_rev, enable_statistics=enable_statistics, enable_downloads=enable_downloads, repo_copy_permissions=copy_permissions, ) RepoModel().create(form_data=data, cur_user=owner.username) # no commit, it's done in RepoModel, or async via celery return dict( msg="Created new repository `%s`" % (repo_name,), success=True, # cannot return the repo data here since fork # can be done async ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to create repository `%s`' % (repo_name,)) # permission check inside def update_repo(self, repoid, name=None, owner=None, group=None, description=None, private=None, clone_uri=None, landing_rev=None, enable_statistics=None, enable_downloads=None): """ Updates repo :param repoid: repository name or repository id :type repoid: str or int :param name: :param owner: :param group: :param description: :param private: :param clone_uri: :param landing_rev: :param enable_statistics: :param enable_downloads: """ repo = get_repo_or_error(repoid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('admin')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) if (name != repo.repo_name and repo.group_id is None and not HasPermissionAny('hg.create.repository')() ): raise JSONRPCError('no permission to create (or move) top level repositories') if owner is not None: # forbid setting owner for non-admins raise JSONRPCError( 'Only Kallithea admin can specify `owner` param' ) updates = {} repo_group = group if repo_group is not None: repo_group = get_repo_group_or_error(repo_group) # TODO: repos can thus currently not be moved to root if repo_group.group_id != repo.group_id: if not(HasPermissionAny('hg.admin')() or HasRepoGroupPermissionLevel('write')(repo_group.group_name)): raise JSONRPCError("no permission to create (or move) repo in %s" % repo_group.group_name) repo_group = repo_group.group_id try: store_update(updates, name, 'repo_name') store_update(updates, repo_group, 'repo_group') store_update(updates, owner, 'owner') store_update(updates, description, 'repo_description') store_update(updates, private, 'repo_private') store_update(updates, clone_uri, 'clone_uri') store_update(updates, landing_rev, 'repo_landing_rev') store_update(updates, enable_statistics, 'repo_enable_statistics') store_update(updates, enable_downloads, 'repo_enable_downloads') RepoModel().update(repo, **updates) meta.Session().commit() return dict( msg='updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), repository=repo.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to update repo `%s`' % repoid) # permission check inside @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository') def fork_repo(self, repoid, fork_name, owner=None, description='', copy_permissions=False, private=False, landing_rev='rev:tip'): """ Creates a fork of given repo. In case of using celery this will immediately return success message, while fork is going to be created asynchronous. This command can be executed only using api_key belonging to user with admin rights or regular user that have fork permission, and at least read access to forking repository. Regular users cannot specify owner parameter. :param repoid: repository name or repository id :type repoid: str or int :param fork_name: :param owner: :param description: :param copy_permissions: :param private: :param landing_rev: INPUT:: id : <id_for_response> api_key : "<api_key>" args: { "repoid" : "<reponame or repo_id>", "fork_name": "<forkname>", "owner": "<username or user_id = Optional(=apiuser)>", "description": "<description>", "copy_permissions": "<bool>", "private": "<bool>", "landing_rev": "<landing_rev>" } OUTPUT:: id : <id_given_in_input> result: { "msg": "Created fork of `<reponame>` as `<forkname>`", "success": true, "task": "<celery task id or None if done sync>" } error: null """ repo = get_repo_or_error(repoid) repo_name = repo.repo_name _repo = RepoModel().get_by_repo_name(fork_name) if _repo: type_ = 'fork' if _repo.fork else 'repo' raise JSONRPCError("%s `%s` already exist" % (type_, fork_name)) group_name = None fork_name_parts = fork_name.split('/') if len(fork_name_parts) > 1: group_name = '/'.join(fork_name_parts[:-1]) repo_group = db.RepoGroup.get_by_group_name(group_name) if repo_group is None: raise JSONRPCError("repo group `%s` not found" % group_name) if not(HasPermissionAny('hg.admin')() or HasRepoGroupPermissionLevel('write')(group_name)): raise JSONRPCError("no permission to create repo in %s" % group_name) else: if not HasPermissionAny('hg.admin', 'hg.create.repository')(): raise JSONRPCError("no permission to create top level repo") if HasPermissionAny('hg.admin')(): pass elif HasRepoPermissionLevel('read')(repo.repo_name): if owner is not None: # forbid setting owner for non-admins raise JSONRPCError( 'Only Kallithea admin can specify `owner` param' ) else: raise JSONRPCError('repository `%s` does not exist' % (repoid,)) if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) try: form_data = dict( repo_name=fork_name_parts[-1], repo_name_full=fork_name, repo_group=group_name, repo_type=repo.repo_type, description=description, private=private, copy_permissions=copy_permissions, landing_rev=landing_rev, update_after_clone=False, fork_parent_id=repo.repo_id, ) RepoModel().create_fork(form_data, cur_user=owner.username) # no commit, it's done in RepoModel, or async via celery return dict( msg='Created fork of `%s` as `%s`' % (repo.repo_name, fork_name), success=True, # cannot return the repo data here since fork # can be done async ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to fork repository `%s` as `%s`' % (repo_name, fork_name) ) # permission check inside def delete_repo(self, repoid, forks=''): """ Deletes a repository. This command can be executed only using api_key belonging to user with admin rights or regular user that have admin access to repository. When `forks` param is set it's possible to detach or delete forks of deleting repository :param repoid: repository name or repository id :type repoid: str or int :param forks: `detach` or `delete`, what do do with attached forks for repo :type forks: Optional(str) OUTPUT:: id : <id_given_in_input> result: { "msg": "Deleted repository `<reponame>`", "success": true } error: null """ repo = get_repo_or_error(repoid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('admin')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) try: handle_forks = forks _forks_msg = '' _forks = [f for f in repo.forks] if handle_forks == 'detach': _forks_msg = ' ' + 'Detached %s forks' % len(_forks) elif handle_forks == 'delete': _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) elif _forks: raise JSONRPCError( 'Cannot delete `%s` it still contains attached forks' % (repo.repo_name,) ) RepoModel().delete(repo, forks=forks) meta.Session().commit() return dict( msg='Deleted repository `%s`%s' % (repo.repo_name, _forks_msg), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to delete repository `%s`' % (repo.repo_name,) ) @HasPermissionAnyDecorator('hg.admin') def grant_user_permission(self, repoid, userid, perm): """ Grant permission for user on given repository, or update existing one if found. This command can be executed only using api_key belonging to user with admin rights. :param repoid: repository name or repository id :type repoid: str or int :param userid: :param perm: (repository.(none|read|write|admin)) :type perm: str OUTPUT:: id : <id_given_in_input> result: { "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", "success": true } error: null """ repo = get_repo_or_error(repoid) user = get_user_or_error(userid) perm = get_perm_or_error(perm) try: RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) meta.Session().commit() return dict( msg='Granted perm: `%s` for user: `%s` in repo: `%s`' % ( perm.permission_name, user.username, repo.repo_name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user: `%s` in repo: `%s`' % ( userid, repoid ) ) @HasPermissionAnyDecorator('hg.admin') def revoke_user_permission(self, repoid, userid): """ Revoke permission for user on given repository. This command can be executed only using api_key belonging to user with admin rights. :param repoid: repository name or repository id :type repoid: str or int :param userid: OUTPUT:: id : <id_given_in_input> result: { "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", "success": true } error: null """ repo = get_repo_or_error(repoid) user = get_user_or_error(userid) try: RepoModel().revoke_user_permission(repo=repo, user=user) meta.Session().commit() return dict( msg='Revoked perm for user: `%s` in repo: `%s`' % ( user.username, repo.repo_name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user: `%s` in repo: `%s`' % ( userid, repoid ) ) # permission check inside def grant_user_group_permission(self, repoid, usergroupid, perm): """ Grant permission for user group on given repository, or update existing one if found. This command can be executed only using api_key belonging to user with admin rights. :param repoid: repository name or repository id :type repoid: str or int :param usergroupid: id of usergroup :type usergroupid: str or int :param perm: (repository.(none|read|write|admin)) :type perm: str OUTPUT:: id : <id_given_in_input> result : { "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", "success": true } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' } """ repo = get_repo_or_error(repoid) perm = get_perm_or_error(perm) user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('admin')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) if not HasUserGroupPermissionLevel('read')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) try: RepoModel().grant_user_group_permission( repo=repo, group_name=user_group, perm=perm) meta.Session().commit() return dict( msg='Granted perm: `%s` for user group: `%s` in ' 'repo: `%s`' % ( perm.permission_name, user_group.users_group_name, repo.repo_name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user group: `%s` in ' 'repo: `%s`' % ( usergroupid, repo.repo_name ) ) # permission check inside def revoke_user_group_permission(self, repoid, usergroupid): """ Revoke permission for user group on given repository. This command can be executed only using api_key belonging to user with admin rights. :param repoid: repository name or repository id :type repoid: str or int :param usergroupid: OUTPUT:: id : <id_given_in_input> result: { "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", "success": true } error: null """ repo = get_repo_or_error(repoid) user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasRepoPermissionLevel('admin')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) if not HasUserGroupPermissionLevel('read')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) try: RepoModel().revoke_user_group_permission( repo=repo, group_name=user_group) meta.Session().commit() return dict( msg='Revoked perm for user group: `%s` in repo: `%s`' % ( user_group.users_group_name, repo.repo_name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user group: `%s` in ' 'repo: `%s`' % ( user_group.users_group_name, repo.repo_name ) ) @HasPermissionAnyDecorator('hg.admin') def get_repo_group(self, repogroupid): """ Returns given repo group together with permissions, and repositories inside the group :param repogroupid: id/name of repository group :type repogroupid: str or int """ repo_group = get_repo_group_or_error(repogroupid) members = [] for user in repo_group.repo_group_to_perm: perm = user.permission.permission_name user = user.user user_data = { 'name': user.username, 'type': "user", 'permission': perm } members.append(user_data) for user_group in repo_group.users_group_to_perm: perm = user_group.permission.permission_name user_group = user_group.users_group user_group_data = { 'name': user_group.users_group_name, 'type': "user_group", 'permission': perm } members.append(user_group_data) data = repo_group.get_api_data() data["members"] = members return data @HasPermissionAnyDecorator('hg.admin') def get_repo_groups(self): """ Returns all repository groups """ return [ repo_group.get_api_data() for repo_group in db.RepoGroup.query() ] @HasPermissionAnyDecorator('hg.admin') def create_repo_group(self, group_name, description='', owner=None, parent=None, copy_permissions=False): """ Creates a repository group. This command can be executed only using api_key belonging to user with admin rights. :param group_name: :type group_name: :param description: :type description: :param owner: :type owner: :param parent: :type parent: :param copy_permissions: :type copy_permissions: OUTPUT:: id : <id_given_in_input> result : { "msg": "created new repo group `<repo_group_name>`" "repo_group": <repogroup_object> } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { failed to create repo group `<repogroupid>` } """ if db.RepoGroup.get_by_group_name(group_name): raise JSONRPCError("repo group `%s` already exist" % (group_name,)) if owner is None: owner = request.authuser.user_id group_description = description parent_group = None if parent is not None: parent_group = get_repo_group_or_error(parent) try: repo_group = RepoGroupModel().create( group_name=group_name, group_description=group_description, owner=owner, parent=parent_group, copy_permissions=copy_permissions ) meta.Session().commit() return dict( msg='created new repo group `%s`' % group_name, repo_group=repo_group.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to create repo group `%s`' % (group_name,)) @HasPermissionAnyDecorator('hg.admin') def update_repo_group(self, repogroupid, group_name=None, description=None, owner=None, parent=None): repo_group = get_repo_group_or_error(repogroupid) updates = {} try: store_update(updates, group_name, 'group_name') store_update(updates, description, 'group_description') store_update(updates, owner, 'owner') store_update(updates, parent, 'parent_group') repo_group = RepoGroupModel().update(repo_group, updates) meta.Session().commit() return dict( msg='updated repository group ID:%s %s' % (repo_group.group_id, repo_group.group_name), repo_group=repo_group.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to update repository group `%s`' % (repogroupid,)) @HasPermissionAnyDecorator('hg.admin') def delete_repo_group(self, repogroupid): """ :param repogroupid: name or id of repository group :type repogroupid: str or int OUTPUT:: id : <id_given_in_input> result : { 'msg': 'deleted repo group ID:<repogroupid> <repogroupname> 'repo_group': null } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to delete repo group ID:<repogroupid> <repogroupname>" } """ repo_group = get_repo_group_or_error(repogroupid) try: RepoGroupModel().delete(repo_group) meta.Session().commit() return dict( msg='deleted repo group ID:%s %s' % (repo_group.group_id, repo_group.group_name), repo_group=None ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to delete repo group ID:%s %s' % (repo_group.group_id, repo_group.group_name) ) # permission check inside def grant_user_permission_to_repo_group(self, repogroupid, userid, perm, apply_to_children='none'): """ Grant permission for user on given repository group, or update existing one if found. This command can be executed only using api_key belonging to user with admin rights, or user who has admin right to given repository group. :param repogroupid: name or id of repository group :type repogroupid: str or int :param userid: :param perm: (group.(none|read|write|admin)) :type perm: str :param apply_to_children: 'none', 'repos', 'groups', 'all' :type apply_to_children: str OUTPUT:: id : <id_given_in_input> result: { "msg" : "Granted perm: `<perm>` (recursive:<apply_to_children>) for user: `<username>` in repo group: `<repo_group_name>`", "success": true } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to edit permission for user: `<userid>` in repo group: `<repo_group_name>`" } """ repo_group = get_repo_group_or_error(repogroupid) if not HasPermissionAny('hg.admin')(): if not HasRepoGroupPermissionLevel('admin')(repo_group.group_name): raise JSONRPCError('repository group `%s` does not exist' % (repogroupid,)) user = get_user_or_error(userid) perm = get_perm_or_error(perm, prefix='group.') try: RepoGroupModel().add_permission(repo_group=repo_group, obj=user, obj_type="user", perm=perm, recursive=apply_to_children) meta.Session().commit() return dict( msg='Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % ( perm.permission_name, apply_to_children, user.username, repo_group.name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user: `%s` in repo group: `%s`' % ( userid, repo_group.name)) # permission check inside def revoke_user_permission_from_repo_group(self, repogroupid, userid, apply_to_children='none'): """ Revoke permission for user on given repository group. This command can be executed only using api_key belonging to user with admin rights, or user who has admin right to given repository group. :param repogroupid: name or id of repository group :type repogroupid: str or int :param userid: :type userid: :param apply_to_children: 'none', 'repos', 'groups', 'all' :type apply_to_children: str OUTPUT:: id : <id_given_in_input> result: { "msg" : "Revoked perm (recursive:<apply_to_children>) for user: `<username>` in repo group: `<repo_group_name>`", "success": true } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to edit permission for user: `<userid>` in repo group: `<repo_group_name>`" } """ repo_group = get_repo_group_or_error(repogroupid) if not HasPermissionAny('hg.admin')(): if not HasRepoGroupPermissionLevel('admin')(repo_group.group_name): raise JSONRPCError('repository group `%s` does not exist' % (repogroupid,)) user = get_user_or_error(userid) try: RepoGroupModel().delete_permission(repo_group=repo_group, obj=user, obj_type="user", recursive=apply_to_children) meta.Session().commit() return dict( msg='Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % ( apply_to_children, user.username, repo_group.name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user: `%s` in repo group: `%s`' % ( userid, repo_group.name)) # permission check inside def grant_user_group_permission_to_repo_group( self, repogroupid, usergroupid, perm, apply_to_children='none'): """ Grant permission for user group on given repository group, or update existing one if found. This command can be executed only using api_key belonging to user with admin rights, or user who has admin right to given repository group. :param repogroupid: name or id of repository group :type repogroupid: str or int :param usergroupid: id of usergroup :type usergroupid: str or int :param perm: (group.(none|read|write|admin)) :type perm: str :param apply_to_children: 'none', 'repos', 'groups', 'all' :type apply_to_children: str OUTPUT:: id : <id_given_in_input> result : { "msg" : "Granted perm: `<perm>` (recursive:<apply_to_children>) for user group: `<usersgroupname>` in repo group: `<repo_group_name>`", "success": true } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to edit permission for user group: `<usergroup>` in repo group: `<repo_group_name>`" } """ repo_group = get_repo_group_or_error(repogroupid) perm = get_perm_or_error(perm, prefix='group.') user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasRepoGroupPermissionLevel('admin')(repo_group.group_name): raise JSONRPCError( 'repository group `%s` does not exist' % (repogroupid,)) if not HasUserGroupPermissionLevel('read')(user_group.users_group_name): raise JSONRPCError( 'user group `%s` does not exist' % (usergroupid,)) try: RepoGroupModel().add_permission(repo_group=repo_group, obj=user_group, obj_type="user_group", perm=perm, recursive=apply_to_children) meta.Session().commit() return dict( msg='Granted perm: `%s` (recursive:%s) for user group: `%s` in repo group: `%s`' % ( perm.permission_name, apply_to_children, user_group.users_group_name, repo_group.name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user group: `%s` in ' 'repo group: `%s`' % ( usergroupid, repo_group.name ) ) # permission check inside def revoke_user_group_permission_from_repo_group( self, repogroupid, usergroupid, apply_to_children='none'): """ Revoke permission for user group on given repository. This command can be executed only using api_key belonging to user with admin rights, or user who has admin right to given repository group. :param repogroupid: name or id of repository group :type repogroupid: str or int :param usergroupid: :param apply_to_children: 'none', 'repos', 'groups', 'all' :type apply_to_children: str OUTPUT:: id : <id_given_in_input> result: { "msg" : "Revoked perm (recursive:<apply_to_children>) for user group: `<usersgroupname>` in repo group: `<repo_group_name>`", "success": true } error: null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to edit permission for user group: `<usergroup>` in repo group: `<repo_group_name>`" } """ repo_group = get_repo_group_or_error(repogroupid) user_group = get_user_group_or_error(usergroupid) if not HasPermissionAny('hg.admin')(): if not HasRepoGroupPermissionLevel('admin')(repo_group.group_name): raise JSONRPCError( 'repository group `%s` does not exist' % (repogroupid,)) if not HasUserGroupPermissionLevel('read')(user_group.users_group_name): raise JSONRPCError( 'user group `%s` does not exist' % (usergroupid,)) try: RepoGroupModel().delete_permission(repo_group=repo_group, obj=user_group, obj_type="user_group", recursive=apply_to_children) meta.Session().commit() return dict( msg='Revoked perm (recursive:%s) for user group: `%s` in repo group: `%s`' % ( apply_to_children, user_group.users_group_name, repo_group.name ), success=True ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError( 'failed to edit permission for user group: `%s` in repo group: `%s`' % ( user_group.users_group_name, repo_group.name ) ) def get_gist(self, gistid): """ Get given gist by id :param gistid: id of private or public gist :type gistid: str """ gist = get_gist_or_error(gistid) if not HasPermissionAny('hg.admin')(): if gist.owner_id != request.authuser.user_id: raise JSONRPCError('gist `%s` does not exist' % (gistid,)) return gist.get_api_data() def get_gists(self, userid=None): """ Get all gists for given user. If userid is empty returned gists are for user who called the api :param userid: user to get gists for :type userid: Optional(str or int) """ if not HasPermissionAny('hg.admin')(): # make sure normal user does not pass someone else userid, # he is not allowed to do that if userid is not None and userid != request.authuser.user_id: raise JSONRPCError( 'userid is not the same as your user' ) if userid is None: user_id = request.authuser.user_id else: user_id = get_user_or_error(userid).user_id return [ gist.get_api_data() for gist in db.Gist().query() .filter_by(is_expired=False) .filter(db.Gist.owner_id == user_id) .order_by(db.Gist.created_on.desc()) ] def create_gist(self, files, owner=None, gist_type=db.Gist.GIST_PUBLIC, lifetime=-1, description=''): """ Creates new Gist :param files: files to be added to gist {'filename': {'content':'...', 'lexer': null}, 'filename2': {'content':'...', 'lexer': null}} :type files: dict :param owner: gist owner, defaults to api method caller :type owner: Optional(str or int) :param gist_type: type of gist 'public' or 'private' :type gist_type: Optional(str) :param lifetime: time in minutes of gist lifetime :type lifetime: Optional(int) :param description: gist description :type description: Optional(str) OUTPUT:: id : <id_given_in_input> result : { "msg": "created new gist", "gist": {} } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to create gist" } """ try: if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) gist = GistModel().create(description=description, owner=owner, ip_addr=request.ip_addr, gist_mapping=files, gist_type=gist_type, lifetime=lifetime) meta.Session().commit() return dict( msg='created new gist', gist=gist.get_api_data() ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to create gist') # permission check inside def delete_gist(self, gistid): """ Deletes existing gist :param gistid: id of gist to delete :type gistid: str OUTPUT:: id : <id_given_in_input> result : { "deleted gist ID: <gist_id>", "gist": null } error : null ERROR OUTPUT:: id : <id_given_in_input> result : null error : { "failed to delete gist ID:<gist_id>" } """ gist = get_gist_or_error(gistid) if not HasPermissionAny('hg.admin')(): if gist.owner_id != request.authuser.user_id: raise JSONRPCError('gist `%s` does not exist' % (gistid,)) try: GistModel().delete(gist) meta.Session().commit() return dict( msg='deleted gist ID:%s' % (gist.gist_access_id,), gist=None ) except Exception: log.error(traceback.format_exc()) raise JSONRPCError('failed to delete gist ID:%s' % (gist.gist_access_id,)) # permission check inside def get_changesets(self, repoid, start=None, end=None, start_date=None, end_date=None, branch_name=None, reverse=False, with_file_list=False, max_revisions=None): repo = get_repo_or_error(repoid) if not HasRepoPermissionLevel('read')(repo.repo_name): raise JSONRPCError('Access denied to repo %s' % repo.repo_name) format = "%Y-%m-%dT%H:%M:%S" try: return [e.__json__(with_file_list) for e in repo.scm_instance.get_changesets(start, end, datetime.strptime(start_date, format) if start_date else None, datetime.strptime(end_date, format) if end_date else None, branch_name, reverse, max_revisions)] except EmptyRepositoryError as e: raise JSONRPCError('Repository is empty') # permission check inside def get_changeset(self, repoid, raw_id, with_reviews=False): repo = get_repo_or_error(repoid) if not HasRepoPermissionLevel('read')(repo.repo_name): raise JSONRPCError('Access denied to repo %s' % repo.repo_name) changeset = repo.get_changeset(raw_id) if isinstance(changeset, EmptyChangeset): raise JSONRPCError('Changeset %s does not exist' % raw_id) info = dict(changeset.as_dict()) if with_reviews: reviews = ChangesetStatusModel().get_statuses( repo.repo_name, raw_id) info["reviews"] = reviews return info # permission check inside def get_pullrequest(self, pullrequest_id): """ Get given pull request by id """ pull_request = db.PullRequest.get(pullrequest_id) if pull_request is None: raise JSONRPCError('pull request `%s` does not exist' % (pullrequest_id,)) if not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name): raise JSONRPCError('not allowed') return pull_request.get_api_data() # permission check inside def comment_pullrequest(self, pull_request_id, comment_msg='', status=None, close_pr=False): """ Add comment, close and change status of pull request. """ apiuser = get_user_or_error(request.authuser.user_id) pull_request = db.PullRequest.get(pull_request_id) if pull_request is None: raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,)) if (not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name)): raise JSONRPCError('No permission to add comment. User needs at least reading permissions' ' to the source repository.') owner = apiuser.user_id == pull_request.owner_id reviewer = apiuser.user_id in [reviewer.user_id for reviewer in pull_request.reviewers] if close_pr and not (apiuser.admin or owner): raise JSONRPCError('No permission to close pull request. User needs to be admin or owner.') if status and not (apiuser.admin or owner or reviewer): raise JSONRPCError('No permission to change pull request status. User needs to be admin, owner or reviewer.') if pull_request.is_closed(): raise JSONRPCError('pull request is already closed') comment = ChangesetCommentsModel().create( text=comment_msg, repo=pull_request.org_repo.repo_id, author=apiuser.user_id, pull_request=pull_request.pull_request_id, f_path=None, line_no=None, status_change=db.ChangesetStatus.get_status_lbl(status), closing_pr=close_pr ) userlog.action_logger(apiuser, 'user_commented_pull_request:%s' % pull_request_id, pull_request.org_repo, request.ip_addr) if status: ChangesetStatusModel().set_status( pull_request.org_repo_id, status, apiuser.user_id, comment, pull_request=pull_request_id ) if close_pr: PullRequestModel().close_pull_request(pull_request_id) userlog.action_logger(apiuser, 'user_closed_pull_request:%s' % pull_request_id, pull_request.org_repo, request.ip_addr) meta.Session().commit() return True # permission check inside def edit_reviewers(self, pull_request_id, add=None, remove=None): """ Add and/or remove one or more reviewers to a pull request, by username or user ID. Reviewers are specified either as a single-user string or as a JSON list of one or more strings. """ if add is None and remove is None: raise JSONRPCError('''Invalid request. Neither 'add' nor 'remove' is specified.''') pull_request = db.PullRequest.get(pull_request_id) if pull_request is None: raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,)) apiuser = get_user_or_error(request.authuser.user_id) is_owner = apiuser.user_id == pull_request.owner_id is_repo_admin = HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name) if not (apiuser.admin or is_repo_admin or is_owner): raise JSONRPCError('No permission to edit reviewers of this pull request. User needs to be admin or pull request owner.') if pull_request.is_closed(): raise JSONRPCError('Cannot edit reviewers of a closed pull request.') if not isinstance(add, list): add = [add] if not isinstance(remove, list): remove = [remove] # look up actual user objects from given name or id. Bail out if unknown. add_objs = set(get_user_or_error(user) for user in add if user is not None) remove_objs = set(get_user_or_error(user) for user in remove if user is not None) new_reviewers = redundant_reviewers = set() if add_objs: new_reviewers, redundant_reviewers = PullRequestModel().add_reviewers(apiuser, pull_request, add_objs) if remove_objs: PullRequestModel().remove_reviewers(apiuser, pull_request, remove_objs) meta.Session().commit() return { 'added': [x.username for x in new_reviewers], 'already_present': [x.username for x in redundant_reviewers], # NOTE: no explicit check that removed reviewers were actually present. 'removed': [x.username for x in remove_objs], }
PypiClean
/Cahoots-0.5.2.zip/Cahoots-0.5.2/cahoots/parsers/programming/lexer.py
from pygments.lexers.agile import PerlLexer, PythonLexer, RubyLexer from pygments.lexers.compiled import CLexer, CppLexer from pygments.lexers.dotnet import CSharpLexer, VbNetLexer from pygments.lexers.jvm import JavaLexer from pygments.lexers.web import ActionScript3Lexer, PhpLexer, JavascriptLexer from pygments import lex from pygments.token import Token import threading class ProgrammingLexerThread(threading.Thread): """Represents a thread that will handle one parser parsing request""" lexer = None data_string = None result = None def __init__(self, lexer_id, lexer, data_string): """ :param lexer_id: the id of the lexer :type lexer_id: str :param lexer: The lexer this thread is going to use :type lexer: pygments.lexer.Lexer :param data_string: the string we're going to lex :type data_string: str """ self.thread_id = lexer_id self.lexer = lexer self.data_string = data_string threading.Thread.__init__(self) def run(self): """ Lexes the data to see what lexers can tokenize it. Any successful lexers are considered possible matches. """ bad_tokens = (Token.Text, Token.Name, Token.Name.Other) tokens = [tok for tok, text in lex(self.data_string, self.lexer) if tok not in bad_tokens and text != ''] token_count = len(tokens) # Errors mean we definitely didn't find the right language if Token.Error in tokens or token_count == 0: self.result = False else: self.result = token_count class ProgrammingLexer(object): """lexes a string with multiple programming lexers and returns tokens""" lexers = { 'actionscript': ActionScript3Lexer(), 'c': CLexer(), 'cpp': CppLexer(), 'cs': CSharpLexer(), 'java': JavaLexer(), 'javascript': JavascriptLexer(), 'perl': PerlLexer(), 'php': PhpLexer(startinline=True), 'python': PythonLexer(), 'ruby': RubyLexer(), 'vb': VbNetLexer(), } matched_languages = [] data = None def __init__(self, matched_langs, data_string): """ :param matched_langs: languages that we've detected tokens for :type matched_langs: list :param data_string: the data we want to lex for possible langs :type data_string: str """ self.matched_languages = matched_langs self.data = data_string def lex(self): """ For every possible matched language, we run a lexer to see if we can eliminate it as a possible match. If we detect errors, or have no lexer matches, we remove it from the list. :return: the list of lexer results :rtype: list """ results = {} threads = [] # Looping through each matched language that has a lexer for lexer_id, lexer in \ [[lexid, lxr] for lexid, lxr in self.lexers.items() if lexid in self.matched_languages]: # Creating a thread for each lexer thread = ProgrammingLexerThread(lexer_id, lexer, self.data) thread.start() threads.append(thread) for thr in threads: thr.join() for thr in [th for th in threads if th.result]: results[thr.thread_id] = thr.result return results
PypiClean
/CNN4IE-0.1.9-py3-none-any.whl/cnn4ie/channel_spatial_attention_cnn/model.py
import torch import torch.nn.functional as F import torch.nn as nn from cnn4ie.util import crf from cnn4ie.channel_spatial_attention_cnn.channel_spatial_attention import ChannelSpatialAttention DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu') class Encoder(nn.Module): def __init__(self, emb_dim, hid_dim, n_layers, kernel_size, dropout, max_length): ''' define encoder :param emb_dim: :param hid_dim: :param n_layers: :param kernel_size: :param dropout: :param max_length ''' super(Encoder, self).__init__() # for kernel in kernel_size: assert kernel_size % 2 == 1, 'kernel size must be odd!' # kernel is odd, which is convenient for PAD processing on both sides of the sequence self.scale = torch.sqrt(torch.FloatTensor([0.5])).to(DEVICE) # the variance of the entire network does not change significantly self.emb2hid = nn.Linear(emb_dim, hid_dim) # fc: emb_dim -> hid_dim self.hid2emb = nn.Linear(hid_dim, emb_dim) # fc: hid_dim -> emb_dim # convolution block self.convs = nn.ModuleList([ChannelSpatialAttention(in_channels=hid_dim, out_channels=2*hid_dim, kernel_size=kernel_size, max_length=max_length) for _ in range(n_layers)]) # convolution layer self.dropout = nn.Dropout(dropout) #self.BN = nn.BatchNorm1d() def forward(self, encoder_output): # encoder_output:[batch_size, src_len, emb_dim] # emb_dim -> hid_dim, as the input of convolution layers conv_input = self.emb2hid(encoder_output) # [batch_size, src_len, hid_dim] # change dimension,convolve the last dimension of input conv_input = conv_input.permute(0, 2, 1) # [batch_size, hid_dim, src_len] # convolution block for i, conv in enumerate(self.convs): conved = conv(self.dropout(conv_input)) # [batch_size, 2*hid_dim, src_len] #conved = self.BN(conved) # [batch_size, 2*hid_dim, src_len] # GLU activation function conved = F.glu(conved, dim=1) # [batch_size, hid_dim, src_len] # residual connection conved = (conved + conv_input) * self.scale # [batch_size, hid_dim, src_len] # input of the next convolution layer conv_input = conved # hid_dim -> emb_dim,as the output of convolution block conved = self.hid2emb(conved.permute(0, 2, 1)) # [batch_size, src_len, emb_dim] # residual connection,as the joint output feature of encoder combined = (conved + encoder_output) * self.scale # [batch_size, src_len, emb_dim] return conved, combined class MultiLayerCSAttCNN(nn.Module): def __init__(self, input_dim, output_dim, emb_dim, hid_dim, cnn_layers, encoder_layers, kernel_size, dropout, PAD_IDX, max_length=100, use_crf = True): ''' define berc model :param input_dim: :param output_dim: :param emb_dim: :param hid_dim: :param cnn_layers: :param encoder_layers: :param kernel_size: :param dropout: :param padding_idx: :param max_length: ''' super(MultiLayerCSAttCNN, self).__init__() self.tok_embedding = nn.Embedding(input_dim, emb_dim, padding_idx=PAD_IDX) # token embedding self.pos_embedding = nn.Embedding(max_length, emb_dim, padding_idx=PAD_IDX) # position embedding self.encoder = nn.ModuleList([Encoder(emb_dim, hid_dim, cnn_layers, kernel_size, dropout, max_length) for _ in range(encoder_layers)]) self.dropout = nn.Dropout(dropout) self.fc_out = nn.Linear(emb_dim, output_dim) self.crf = crf.CRF(output_dim, batch_first=True) self.use_crf = use_crf def forward(self, token_tensor): ''' :param token_tensor: [batch_size, src_len] :return: ''' # token, position embedding tok_embedded = self.tok_embedding(token_tensor) # [batch_size, src_len, emb_dim] # 构建位置tensor -> [batch_size, src_len],位置序号从(0)开始到(src_len-1) position = torch.arange(0, token_tensor.shape[1]).unsqueeze(0).repeat(token_tensor.shape[0], 1).to(DEVICE) pos_embedded = self.pos_embedding(position.long()) # [batch_size, src_len, emb_dim] # token embedded + pos_embedded embedded = self.dropout(tok_embedded + pos_embedded) # [batch_size, src_len, emb_dim] encoder_output = embedded # encoder block for i, encoder in enumerate(self.encoder): # encoding conved, encoder_output = encoder(self.dropout(encoder_output)) # [batch_size, src_len, emb_dim] # pooling, predict class of the entire sentence # encoder_output = F.avg_pool1d(encoder_output.permute(0, 2, 1), encoder_output.shape[1]).squeeze(2) # [batch_size, emb_dim] # output = self.fc_out(encoder_output) # [batch_size, output_dim] # fc outuput output = self.fc_out(encoder_output) # [batch_size, src_len, output_dim] if self.use_crf: # crf output = self.crf.decode(output) return output def log_likelihood(self, source, target): ''' :param source: [batch_size, src_len] :param target: [batch_size, src_len] :return: ''' # token, position embedding tok_embedded = self.tok_embedding(source) # [batch_size, src_len, emb_dim] # 构建位置tensor -> [batch_size, src_len],位置序号从(0)开始到(src_len-1) position = torch.arange(0, source.shape[1]).unsqueeze(0).repeat(source.shape[0], 1).to(DEVICE) pos_embedded = self.pos_embedding(position.long()) # [batch_size, src_len, emb_dim] # token embedded + pos_embedded embedded = self.dropout(tok_embedded + pos_embedded) # [batch_size, src_len, emb_dim] encoder_output = embedded # encoder block for i, encoder in enumerate(self.encoder): # encoding conved, encoder_output = encoder(self.dropout(encoder_output)) # [batch_size, src_len, emb_dim] # pooling, predict class of the entire sentence # encoder_output = F.avg_pool1d(encoder_output.permute(0, 2, 1), encoder_output.shape[1]).squeeze(2) # [batch_size, emb_dim] # output = self.fc_out(encoder_output) # [batch_size, output_dim] # sequence labeling outputs = self.fc_out(encoder_output) # [batch_size, src_len, output_dim] return -self.crf(outputs, target)
PypiClean
/LiBai-0.1.1.tar.gz/LiBai-0.1.1/libai/models/vision_transformer.py
import oneflow as flow import oneflow.nn as nn from flowvision.layers.weight_init import trunc_normal_ import libai.utils.distributed as dist from libai.config.config import configurable from libai.layers import LayerNorm, Linear, PatchEmbedding, TransformerLayer from .build import MODEL_ARCH_REGISTRY @MODEL_ARCH_REGISTRY.register() class VisionTransformer(nn.Module): """Vision Transformer in LiBai. LiBai implement of: `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>`_ Args: img_size (int, tuple(int)): input image size patch_size (int, tuple(int)): patch size in_chans (int): number of input channels embed_dim (int): embedding dimension depth (int): depth of transformer num_heads (int): number of attention heads mlp_ratio (int): ratio of mlp hidden dim to embedding dim drop_rate (float): dropout rate attn_drop_rate (float): attention dropout rate drop_path_rate (float): stochastic depth rate num_classes (int): number of classes for classification head loss_func (callable, optional): loss function for computing the total loss between logits and labels """ @configurable def __init__( self, img_size=224, patch_size=16, in_chans=3, embed_dim=192, depth=12, num_heads=3, mlp_ratio=4.0, drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.0, num_classes=1000, loss_func=None, ): super().__init__() self.num_classes = num_classes self.patch_embed = PatchEmbedding( img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim, ) ffn_size = int(embed_dim * mlp_ratio) num_patches = self.patch_embed.num_patches self.cls_token = nn.Parameter( flow.zeros( 1, 1, embed_dim, sbp=dist.get_nd_sbp([flow.sbp.broadcast, flow.sbp.broadcast]), placement=dist.get_layer_placement(0), ) ) self.pos_embed = nn.Parameter( flow.zeros( 1, num_patches + 1, embed_dim, sbp=dist.get_nd_sbp([flow.sbp.broadcast, flow.sbp.broadcast]), placement=dist.get_layer_placement(0), ) ) self.pos_drop = nn.Dropout(p=drop_rate) dpr = [ x.item() for x in flow.linspace(0, drop_path_rate, depth) ] # stochastic depth decay rule self.blocks = nn.Sequential( *[ TransformerLayer( hidden_size=embed_dim, ffn_hidden_size=ffn_size, num_attention_heads=num_heads, attention_dropout_prob=attn_drop_rate, output_dropout_prob=drop_rate, drop_path_prob=dpr[i], layer_idx=i, ) for i in range(depth) ] ) self.norm = LayerNorm(embed_dim, layer_idx=-1) self.head = Linear(embed_dim, num_classes, layer_idx=-1) # Loss func self.loss_func = nn.CrossEntropyLoss() if loss_func is None else loss_func # weight init trunc_normal_(self.pos_embed, std=0.02) trunc_normal_(self.cls_token, std=0.02) self.apply(self._init_weights) def _init_weights(self, m): if isinstance(m, Linear): trunc_normal_(m.weight, std=0.02) if m.bias is not None: nn.init.constant_(m.bias, 0) elif isinstance(m, LayerNorm): nn.init.constant_(m.bias, 0) nn.init.constant_(m.weight, 1.0) @classmethod def from_config(cls, cfg): return { "img_size": cfg.img_size, "patch_size": cfg.patch_size, "in_chans": cfg.in_chans, "embed_dim": cfg.embed_dim, "depth": cfg.depth, "num_heads": cfg.num_heads, "mlp_ratio": cfg.mlp_ratio, "drop_rate": cfg.drop_rate, "attn_drop_rate": cfg.attn_drop_rate, "drop_path_rate": cfg.drop_path_rate, "num_classes": cfg.num_classes, } def forward_features(self, x): # patch embedding x = self.patch_embed(x) cls_token = self.cls_token.expand( x.shape[0], -1, -1 ) # stole cls_tokens impl from Phil Wang, thanks cls_token = cls_token.to_global(sbp=x.sbp, placement=cls_token.placement) x = flow.cat((cls_token, x), dim=1) # position embedding pos_embed = self.pos_embed.expand(x.shape[0], -1, -1) pos_embed = pos_embed.to_global(sbp=x.sbp, placement=pos_embed.placement) x = self.pos_drop(x + pos_embed) # transformer block x = self.blocks(x) x = self.norm(x) return x[:, 0] def forward(self, images, labels=None): """ Args: images (flow.Tensor): training samples. labels (flow.LongTensor, optional): training targets Returns: dict: A dict containing :code:`loss_value` or :code:`logits` depending on training or evaluation mode. :code:`{"losses": loss_value}` when training, :code:`{"prediction_scores": logits}` when evaluating. """ x = self.forward_features(images) x = self.head(x) if labels is not None and self.training: losses = self.loss_func(x, labels) return {"losses": losses} else: return {"prediction_scores": x} @staticmethod def set_pipeline_stage_id(model): dist_utils = dist.get_dist_util() # Set pipeline parallelism stage_id for module_block in model.modules(): # module.origin can get the original module if isinstance(module_block.origin, PatchEmbedding): module_block.config.stage_id = dist_utils.get_layer_stage_id(0) elif isinstance(module_block.origin, TransformerLayer): module_block.config.stage_id = dist_utils.get_layer_stage_id(module_block.layer_idx) # Set pos_embed and cls_token stage id model.pos_embed.config.stage_id = dist_utils.get_layer_stage_id(0) model.cls_token.config.stage_id = dist_utils.get_layer_stage_id(0) model.pos_drop.config.stage_id = dist_utils.get_layer_stage_id(0) model.norm.config.stage_id = dist_utils.get_layer_stage_id(-1) model.head.config.stage_id = dist_utils.get_layer_stage_id(-1) model.loss_func.config.stage_id = dist_utils.get_layer_stage_id(-1)
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojo/nls/th/colors.js
define( "dojo/nls/th/colors", //begin v1.x content ({ // local representation of all CSS3 named colors, companion to dojo.colors. To be used where descriptive information // is required for each color, such as a palette widget, and not for specifying color programatically. //Note: due to the SVG 1.0 spec additions, some of these are alternate spellings for the same color e.g. gray vs. gray. //TODO: should we be using unique rgb values as keys instead and avoid these duplicates, or rely on the caller to do the reverse mapping? aliceblue: "alice blue", antiquewhite: "antique white", aqua: "ฟ้าน้ำทะเล", aquamarine: "aquamarine", azure: "น้ำเงินฟ้า", beige: "น้ำตาลเบจ", bisque: "bisque", black: "ดำ", blanchedalmond: "blanched almond", blue: "น้ำเงิน", blueviolet: "น้ำเงินม่วง", brown: "น้ำตาล", burlywood: "burlywood", cadetblue: "cadet blue", chartreuse: "chartreuse", chocolate: "ช็อกโกแลต", coral: "coral", cornflowerblue: "cornflower blue", cornsilk: "cornsilk", crimson: "แดงเลือดหมู", cyan: "เขียวแกมน้ำเงิน", darkblue: "น้ำเงินเข้ม", darkcyan: "เขียวแกมน้ำเงินเข้ม", darkgoldenrod: "dark goldenrod", darkgray: "เทาเข้ม", darkgreen: "เขียวเข้ม", darkgrey: "เทาเข้ม", // same as darkgray darkkhaki: "dark khaki", darkmagenta: "แดงแกมม่วงเข้ม", darkolivegreen: "เขียวโอลีฟเข้ม", darkorange: "ส้มเข้ม", darkorchid: "dark orchid", darkred: "แดงเข้ม", darksalmon: "dark salmon", darkseagreen: "dark sea green", darkslateblue: "dark slate blue", darkslategray: "dark slate gray", darkslategrey: "dark slate gray", // same as darkslategray darkturquoise: "dark turquoise", darkviolet: "ม่วงเข้ม", deeppink: "ชมพูเข้ม", deepskyblue: "deep sky blue", dimgray: "dim gray", dimgrey: "dim gray", // same as dimgray dodgerblue: "dodger blue", firebrick: "สีอิฐ", floralwhite: "floral white", forestgreen: "forest green", fuchsia: "fuchsia", gainsboro: "gainsboro", ghostwhite: "ghost white", gold: "ทอง", goldenrod: "goldenrod", gray: "เทา", green: "เขียว", greenyellow: "เขียวแกมเหลือง", grey: "เทา", // same as gray honeydew: "honeydew", hotpink: "hot pink", indianred: "indian red", indigo: "indigo", ivory: "งาช้าง", khaki: "khaki", lavender: "ม่วงลาเวนเดอร์", lavenderblush: "lavender blush", lawngreen: "lawn green", lemonchiffon: "lemon chiffon", lightblue: "น้ำเงินอ่อน", lightcoral: "light coral", lightcyan: "เขียวแกมน้ำเงินอ่อน", lightgoldenrodyellow: "light goldenrod yellow", lightgray: "เทาอ่อน", lightgreen: "เขียวอ่อน", lightgrey: "เทาอ่อน", // same as lightgray lightpink: "ชมพูอ่อน", lightsalmon: "light salmon", lightseagreen: "light sea green", lightskyblue: "ฟ้าอ่อน", lightslategray: "light slate gray", lightslategrey: "light slate gray", // same as lightslategray lightsteelblue: "light steel blue", lightyellow: "เหลืองอ่อน", lime: "เหลืองมะนาว", limegreen: "เขียวมะนาว", linen: "linen", magenta: "แดงแกมม่วง", maroon: "น้ำตาลแดง", mediumaquamarine: "medium aquamarine", mediumblue: "medium blue", mediumorchid: "medium orchid", mediumpurple: "medium purple", mediumseagreen: "medium sea green", mediumslateblue: "medium slate blue", mediumspringgreen: "medium spring green", mediumturquoise: "medium turquoise", mediumvioletred: "medium violet-red", midnightblue: "midnight blue", mintcream: "mint cream", mistyrose: "misty rose", moccasin: "ม็อคค่า", navajowhite: "navajo white", navy: "น้ำเงินเข้ม", oldlace: "old lace", olive: "โอลีฟ", olivedrab: "olive drab", orange: "ส้ม", orangered: "ส้มแกมแดง", orchid: "orchid", palegoldenrod: "pale goldenrod", palegreen: "pale green", paleturquoise: "pale turquoise", palevioletred: "pale violet-red", papayawhip: "papaya whip", peachpuff: "peach puff", peru: "peru", pink: "ชมพู", plum: "plum", powderblue: "powder blue", purple: "ม่วง", red: "แดง", rosybrown: "rosy brown", royalblue: "royal blue", saddlebrown: "saddle brown", salmon: "salmon", sandybrown: "sandy brown", seagreen: "sea green", seashell: "seashell", sienna: "sienna", silver: "เงิน", skyblue: "sky blue", slateblue: "slate blue", slategray: "slate gray", slategrey: "slate gray", // same as slategray snow: "snow", springgreen: "spring green", steelblue: "steel blue", tan: "tan", teal: "teal", thistle: "thistle", tomato: "tomato", transparent: "สีใส", turquoise: "turquoise", violet: "ม่วง", wheat: "wheat", white: "ขาว", whitesmoke: "ขาวควัน", yellow: "เหลือง", yellowgreen: "เหลืองแกมเขียว" }) //end v1.x content );
PypiClean
/NlvWxPython-4.2.0-cp37-cp37m-win_amd64.whl/wx/lib/mixins/imagelist.py
import wx #---------------------------------------------------------------------------- class MagicImageList: ''' Mix-in to provide "magic" growing image lists By Mike Fletcher ''' ### LAZYTREE and LISTCONTROL Methods DEFAULTICONSIZE = 16 def SetupIcons(self, images=(), size=None): self.__size = size or self.DEFAULTICONSIZE self.__magicImageList = wx.ImageList (self.__size,self.__size) self.__magicImageListMapping = {} self.SetImageList ( self.__magicImageList, { 16:wx.IMAGE_LIST_SMALL, 32:wx.IMAGE_LIST_NORMAL, }[self.__size] ) for image in images: self.AddIcon (image) def GetIcons (self, node): '''Get icon indexes for a given node, or None if no associated icon''' icon = self.GetIcon( node ) if icon: index = self.AddIcon (icon) return index, index return None ### Local methods... def AddIcon(self, icon, mask = wx.NullBitmap): '''Add an icon to the image list, or get the index if already there''' index = self.__magicImageListMapping.get (id (icon)) if index is None: if isinstance( icon, wxIconPtr ): index = self.__magicImageList.AddIcon( icon ) elif isinstance( icon, wx.BitmapPtr ): if isinstance( mask, wx.Colour ): index = self.__magicImageList.AddWithColourMask( icon, mask ) else: index = self.__magicImageList.Add( icon, mask ) else: raise ValueError("Unexpected icon object %s, " "expected wx.Icon or wx.Bitmap" % (icon)) self.__magicImageListMapping [id (icon)] = index return index ### Customisation point... def GetIcon( self, node ): '''Get the actual icon object for a node''' if hasattr (node,"DIAGRAMICON"): return node.DIAGRAMICON
PypiClean
/lektor-3.4.0b6-py3-none-any.whl/lektor/datamodel.py
import errno import os from inifile import IniFile from lektor.constants import PRIMARY_ALT from lektor.environment.expressions import Expression from lektor.environment.expressions import FormatExpression from lektor.i18n import generate_i18n_kvs from lektor.i18n import get_i18n_block from lektor.pagination import Pagination from lektor.reporter import reporter from lektor.types import builtin_types from lektor.types.base import RawValue from lektor.utils import bool_from_string from lektor.utils import slugify class ChildConfig: def __init__( self, enabled=None, slug_format=None, model=None, order_by=None, replaced_with=None, hidden=None, ): if enabled is None: enabled = True self.enabled = enabled self.slug_format = slug_format self.model = model self.order_by = order_by self.replaced_with = replaced_with self.hidden = hidden def to_json(self): return { "enabled": self.enabled, "slug_format": self.slug_format, "model": self.model, "order_by": self.order_by, "replaced_with": self.replaced_with, "hidden": self.hidden, } class PaginationConfig: def __init__(self, env, enabled=None, per_page=None, url_suffix=None, items=None): self.env = env if enabled is None: enabled = False self.enabled = enabled if per_page is None: per_page = 20 elif not isinstance(per_page, int): raise TypeError(f"per_page must be an int or None, not {per_page!r}") if per_page <= 0: raise ValueError("per_page must be positive, not {per_page}") self.per_page = per_page if url_suffix is None: url_suffix = "page" self.url_suffix = url_suffix self.items = items self._items_tmpl = None def count_total_items(self, record): """Counts the number of items over all pages.""" return self.get_pagination_query(record).count() def count_pages(self, record): """Returns the total number of pages for the children of a record.""" total = self.count_total_items(record) npages = (total + self.per_page - 1) // self.per_page # Even when there are no children, we want at least one page return max(npages, 1) def slice_query_for_page(self, record, page): """Slices the query so it returns the children for a given page.""" query = self.get_pagination_query(record) if not self.enabled or page is None: return query return query.limit(self.per_page).offset((page - 1) * self.per_page) @staticmethod def get_record_for_page(record, page_num): """Given a normal record this one returns the version specific for a page. """ # If we already have the right version, return it. if record.page_num == page_num: return record # Check if we have a cached version pad = record.pad rv = pad.cache.get(record.path, record.alt, str(page_num)) if rv is not Ellipsis: return rv # Make what we need out of what we have and put it into the cache. cls = record.__class__ rv = cls(record.pad, record._data, page_num=page_num) pad.cache.remember(rv) return rv def match_pagination(self, record, url_path): """Matches the pagination from the URL path.""" if not self.enabled: return None suffixes = self.url_suffix.strip("/").split("/") if url_path[: len(suffixes)] != suffixes: return None try: page_num = int(url_path[len(suffixes)]) except (ValueError, IndexError): return None # It's important we do not allow "1" here as the first page is always # on the root. Changing this would mean the URLs are incorrectly # generated if someone manually went to /page/1/. if page_num == 1 or len(url_path) != len(suffixes) + 1: return None # Page needs to have at least a single child. rv = self.get_record_for_page(record, page_num) if rv.pagination.items.first() is not None: return rv return None def get_pagination_controller(self, record): if not self.enabled: raise RuntimeError("Pagination is disabled") return Pagination(record, self) def get_pagination_query(self, record): items_expr = self.items if items_expr is None: return record.children if self._items_tmpl is None or self._items_tmpl[0] != items_expr: self._items_tmpl = (items_expr, Expression(self.env, items_expr)) return self._items_tmpl[1].evaluate(record.pad, this=record) def to_json(self): return { "enabled": self.enabled, "per_page": self.per_page, "url_suffix": self.url_suffix, "items": self.items, } class AttachmentConfig: def __init__(self, enabled=None, model=None, order_by=None, hidden=None): if enabled is None: enabled = True if hidden is None: hidden = False self.enabled = enabled self.model = model self.order_by = order_by self.hidden = hidden def to_json(self): return { "enabled": self.enabled, "model": self.model, "order_by": self.order_by, "hidden": self.hidden, } class Field: def __init__(self, env, name, type=None, options=None): if type is None: type = env.types["string"] if options is None: options = {} self.options = options self.name = name label_i18n = get_i18n_block(options, "label") if not label_i18n: label_i18n = {"en": name.replace("_", " ").strip().capitalize()} self.label_i18n = label_i18n self.description_i18n = get_i18n_block(options, "description") or None self.default = options.get("default") self.type = type(env, options) @property def label(self): return self.label_i18n.get("en") def to_json(self, pad, record=None, alt=PRIMARY_ALT): return { "name": self.name, "label": self.label, "label_i18n": self.label_i18n, "hide_label": bool_from_string( self.options.get("hide_label"), default=False ), "description_i18n": self.description_i18n, "type": self.type.to_json(pad, record, alt), "default": self.default, "alts_enabled": bool_from_string( self.options.get("alts_enabled"), default=None ), } def deserialize_value(self, value, pad=None): raw_value = RawValue(self.name, value, field=self, pad=pad) return self.type.value_from_raw_with_default(raw_value) def serialize_value(self, value): return self.type.value_to_raw(value) def __repr__(self): return "<%s %r type=%r>" % ( self.__class__.__name__, self.name, self.type, ) def _iter_all_fields(obj): for name in sorted(x for x in obj.field_map if x[:1] == "_"): yield obj.field_map[name] for field in obj.fields: yield field class DataModel: def __init__( self, env, id, name_i18n, label_i18n=None, filename=None, hidden=None, protected=None, child_config=None, attachment_config=None, pagination_config=None, fields=None, primary_field=None, parent=None, ): self.env = env self.filename = filename self.id = id self.name_i18n = name_i18n self.label_i18n = label_i18n if hidden is None: hidden = False self.hidden = hidden if protected is None: protected = False self.protected = protected if child_config is None: child_config = ChildConfig() self.child_config = child_config if attachment_config is None: attachment_config = AttachmentConfig() self.attachment_config = attachment_config if pagination_config is None: pagination_config = PaginationConfig(env) self.pagination_config = pagination_config if fields is None: fields = [] self.fields = fields if primary_field is None and fields: primary_field = fields[0].name self.primary_field = primary_field self.parent = parent # This is a mapping of the key names to the actual field which # also includes the system fields. This is primarily used for # fast internal operations but also the admin. self.field_map = dict((x.name, x) for x in fields) for key, (ty, opts) in system_fields.items(): self.field_map[key] = Field(env, name=key, type=ty, options=opts) self._child_slug_tmpl = None self._child_replacements = None self._label_tmpls = {} @property def name(self): name = (self.name_i18n or {}).get("en") return name or self.id.title().replace("_", " ") @property def label(self): return (self.label_i18n or {}).get("en") def to_json(self, pad, record=None, alt=PRIMARY_ALT): """Describes the datamodel as JSON data.""" return { "filename": self.filename, "alt": alt, "id": self.id, "name": self.name, "name_i18n": self.name_i18n, "primary_field": self.primary_field, "label": self.label, "label_i18n": self.label_i18n, "hidden": self.hidden, "protected": self.protected, "child_config": self.child_config.to_json(), "attachment_config": self.attachment_config.to_json(), "pagination_config": self.pagination_config.to_json(), "fields": [x.to_json(pad, record, alt) for x in _iter_all_fields(self)], } def format_record_label(self, record, lang="en"): """Returns the label for a given record.""" label = self.label_i18n.get(lang) if label is None: return None tmpl = self._label_tmpls.get(lang) if tmpl is None: tmpl = (label, FormatExpression(self.env, label)) self._label_tmpls[lang] = tmpl try: return tmpl[1].evaluate(record.pad, this=record) except Exception: # XXX: log return None def get_default_child_slug(self, pad, data): """Formats out the child slug.""" slug_format = self.child_config.slug_format if slug_format is None: return data["_id"] if self._child_slug_tmpl is None or self._child_slug_tmpl[0] != slug_format: self._child_slug_tmpl = ( slug_format, FormatExpression(self.env, slug_format), ) try: return "_".join( self._child_slug_tmpl[1].evaluate(pad, this=data).strip().split() ).strip("/") except Exception as exc: reporter.report_generic("Failed to expand child slug_format: %s" % exc) return "temp-" + slugify(data["_id"]) def get_default_template_name(self): return self.id + ".html" @property def has_own_children(self): return self.child_config.replaced_with is None and self.child_config.enabled @property def has_own_attachments(self): return self.attachment_config.enabled def get_child_replacements(self, record): """Returns the query that should be used as replacement for the actual children. """ replaced_with = self.child_config.replaced_with if replaced_with is None: return None if ( self._child_replacements is None or self._child_replacements[0] != replaced_with ): self._child_replacements = ( replaced_with, Expression(self.env, replaced_with), ) return self._child_replacements[1].evaluate(record.pad, this=record) def process_raw_data(self, raw_data, pad=None): rv = {} for field in self.field_map.values(): value = raw_data.get(field.name) rv[field.name] = field.deserialize_value(value, pad=pad) rv["_model"] = self.id return rv def __repr__(self): return "<%s %r>" % ( self.__class__.__name__, self.id, ) class FlowBlockModel: def __init__( self, env, id, name_i18n, filename=None, fields=None, order=None, button_label=None, ): self.env = env self.id = id self.name_i18n = name_i18n self.filename = filename if fields is None: fields = [] self.fields = fields if order is None: order = 100 self.order = order self.button_label = button_label self.field_map = dict((x.name, x) for x in fields) self.field_map["_flowblock"] = Field( env, name="_flowblock", type=env.types["string"] ) @property def name(self): return self.name_i18n.get("en") or self.id.title().replace("_", " ") def to_json(self, pad, record=None, alt=PRIMARY_ALT): return { "id": self.id, "name": self.name, "name_i18n": self.name_i18n, "filename": self.filename, "fields": [ x.to_json(pad, record, alt) for x in _iter_all_fields(self) if x.name != "_flowblock" ], "order": self.order, "button_label": self.button_label, } def process_raw_data(self, raw_data, pad=None): rv = {} for field in self.field_map.values(): value = raw_data.get(field.name) rv[field.name] = field.deserialize_value(value, pad=pad) rv["_flowblock"] = self.id return rv def __repr__(self): return "<%s %r>" % ( self.__class__.__name__, self.id, ) def fielddata_from_ini(inifile): return [ ( sect.split(".", 1)[1], inifile.section_as_dict(sect), ) for sect in inifile.sections() if sect.startswith("fields.") ] def datamodel_data_from_ini(id, inifile): def _parse_order(value): value = (value or "").strip() if not value: return None return [x for x in [x.strip() for x in value.strip().split(",")] if x] return dict( filename=inifile.filename, id=id, parent=inifile.get("model.inherits"), name_i18n=get_i18n_block(inifile, "model.name"), label_i18n=get_i18n_block(inifile, "model.label"), primary_field=inifile.get("model.primary_field"), hidden=inifile.get_bool("model.hidden", default=None), protected=inifile.get_bool("model.protected", default=None), child_config=dict( enabled=inifile.get_bool("children.enabled", default=None), slug_format=inifile.get("children.slug_format"), model=inifile.get("children.model"), order_by=_parse_order(inifile.get("children.order_by")), replaced_with=inifile.get("children.replaced_with"), hidden=inifile.get_bool("children.hidden", default=None), ), attachment_config=dict( enabled=inifile.get_bool("attachments.enabled", default=None), model=inifile.get("attachments.model"), order_by=_parse_order(inifile.get("attachments.order_by")), hidden=inifile.get_bool("attachments.hidden", default=None), ), pagination_config=dict( enabled=inifile.get_bool("pagination.enabled", default=None), per_page=inifile.get_int("pagination.per_page"), url_suffix=inifile.get("pagination.url_suffix"), items=inifile.get("pagination.items"), ), fields=fielddata_from_ini(inifile), ) def flowblock_data_from_ini(id, inifile): return dict( filename=inifile.filename, id=id, name_i18n=get_i18n_block(inifile, "block.name"), fields=fielddata_from_ini(inifile), order=inifile.get_int("block.order"), button_label=inifile.get("block.button_label"), ) def fields_from_data(env, data, parent_fields=None): fields = [] known_fields = set() for name, options in data: ty = env.types[options.get("type", "string")] fields.append(Field(env=env, name=name, type=ty, options=options)) known_fields.add(name) if parent_fields is not None: prepended_fields = [] for field in parent_fields: if field.name not in known_fields: prepended_fields.append(field) fields = prepended_fields + fields return fields def datamodel_from_data(env, model_data, parent=None): def get_value(key): path = key.split(".") node = model_data for item in path: node = node.get(item) if node is not None: return node if parent is not None: node = parent for item in path: node = getattr(node, item) return node return None fields = fields_from_data( env, model_data["fields"], parent and parent.fields or None ) return DataModel( env, # data that never inherits filename=model_data["filename"], id=model_data["id"], parent=parent, name_i18n=model_data["name_i18n"], primary_field=model_data["primary_field"], # direct data that can inherit label_i18n=get_value("label_i18n"), hidden=get_value("hidden"), protected=get_value("protected"), child_config=ChildConfig( enabled=get_value("child_config.enabled"), slug_format=get_value("child_config.slug_format"), model=get_value("child_config.model"), order_by=get_value("child_config.order_by"), replaced_with=get_value("child_config.replaced_with"), hidden=get_value("child_config.hidden"), ), attachment_config=AttachmentConfig( enabled=get_value("attachment_config.enabled"), model=get_value("attachment_config.model"), order_by=get_value("attachment_config.order_by"), hidden=get_value("attachment_config.hidden"), ), pagination_config=PaginationConfig( env, enabled=get_value("pagination_config.enabled"), per_page=get_value("pagination_config.per_page"), url_suffix=get_value("pagination_config.url_suffix"), items=get_value("pagination_config.items"), ), fields=fields, ) def flowblock_from_data(env, block_data): return FlowBlockModel( env, filename=block_data["filename"], id=block_data["id"], name_i18n=block_data["name_i18n"], fields=fields_from_data(env, block_data["fields"]), order=block_data["order"], button_label=block_data["button_label"], ) def iter_inis(path): try: for filename in os.listdir(path): if not filename.endswith(".ini") or filename[:1] in "_.": continue fn = os.path.join(path, filename) if os.path.isfile(fn): base = filename[:-4] base = base.encode("utf-8").decode("ascii", "replace") inifile = IniFile(fn) yield base, inifile except OSError as e: if e.errno != errno.ENOENT: raise def load_datamodels(env): """Loads the datamodels for a specific environment.""" # Models will override previous loaded models with the same name # So models paths are loaded in reverse order paths = list(reversed(env.theme_paths)) + [env.root_path] paths = [os.path.join(p, "models") for p in paths] data = {} for path in paths: for model_id, inifile in iter_inis(path): data[model_id] = datamodel_data_from_ini(model_id, inifile) rv = {} def get_model(model_id): model = rv.get(model_id) if model is not None: return model if model_id in data: return create_model(model_id) return None def create_model(model_id): model_data = data.get(model_id) if model_data is None: raise RuntimeError("Model %r not found" % model_id) if model_data["parent"] is not None: parent = get_model(model_data["parent"]) else: parent = None rv[model_id] = mod = datamodel_from_data(env, model_data, parent) return mod for model_id in data: get_model(model_id) rv["none"] = DataModel(env, "none", {"en": "None"}, hidden=True) return rv def load_flowblocks(env): """Loads all the flow blocks for a specific environment.""" # Flowblocks will override previous loaded flowblocks with the same name # So paths are loaded in reverse order paths = list(reversed(env.theme_paths)) + [env.root_path] paths = [os.path.join(p, "flowblocks") for p in paths] rv = {} for path in paths: for flowblock_id, inifile in iter_inis(path): rv[flowblock_id] = flowblock_from_data( env, flowblock_data_from_ini(flowblock_id, inifile) ) return rv system_fields = {} def add_system_field(name, **opts): opts = dict(generate_i18n_kvs(**opts)) ty = builtin_types[opts.pop("type")] system_fields[name] = (ty, opts) # The full path of the record add_system_field("_path", type="string") # The local ID (within a folder) of the record add_system_field("_id", type="string") # The global ID (within a folder) of the record add_system_field("_gid", type="string") # The alt key that identifies this record add_system_field("_alt", type="string") # The alt key for the file that was actually referenced. add_system_field("_source_alt", type="string") # the model that defines the data of the record add_system_field("_model", type="string") # the template that should be used for rendering if not hidden add_system_field( "_template", type="string", label_i18n="TEMPLATE", width="1/2", addon_label="[[code]]", ) # the slug that should be used for this record. This is added below the # slug of the parent. add_system_field("_slug", type="slug", label_i18n="URL_SLUG", width="1/2") # This can be used to hide an individual record. add_system_field( "_hidden", type="boolean", label_i18n="HIDE_PAGE", checkbox_label_i18n="HIDE_PAGE_EXPLANATION", ) # This marks a page as undiscoverable. add_system_field( "_discoverable", type="boolean", default="yes", label_i18n="PAGE_IS_DISCOVERABLE", checkbox_label_i18n="PAGE_IS_DISCOVERABLE_EXPLANATION", ) # Useful fields for attachments. add_system_field("_attachment_for", type="string") add_system_field( "_attachment_type", type="string", label_i18n="ATTACHMENT_TYPE", addon_label="[[paperclip]]", )
PypiClean
/FF-Flask-Security-3.1.0.tar.gz/FF-Flask-Security-3.1.0/docs/models.rst
Models ====== Flask-Security assumes you'll be using libraries such as SQLAlchemy, MongoEngine, Peewee or PonyORM to define a data model that includes a `User` and `Role` model. The fields on your models must follow a particular convention depending on the functionality your app requires. Aside from this, you're free to add any additional fields to your model(s) if you want. At the bare minimum your `User` and `Role` model should include the following fields: **User** * ``id`` * ``email`` * ``password`` * ``active`` **Role** * ``id`` * ``name`` * ``description`` Additional Functionality ------------------------ Depending on the application's configuration, additional fields may need to be added to your `User` model. Confirmable ^^^^^^^^^^^ If you enable account confirmation by setting your application's `SECURITY_CONFIRMABLE` configuration value to `True`, your `User` model will require the following additional field: * ``confirmed_at`` Trackable ^^^^^^^^^ If you enable user tracking by setting your application's `SECURITY_TRACKABLE` configuration value to `True`, your `User` model will require the following additional fields: * ``last_login_at`` * ``current_login_at`` * ``last_login_ip`` * ``current_login_ip`` * ``login_count`` Custom User Payload ^^^^^^^^^^^^^^^^^^^ If you want a custom payload after Register or Login an user, define the method get_security_payload in your User model. The method must return a serializable object: .. code-block:: python class User(db.Model, UserMixin): id = db.Column(db.Integer, primary_key=True) email = TextField() password = TextField() active = BooleanField(default=True) confirmed_at = DateTimeField(null=True) name = db.Column(db.String(80)) # Custom User Payload def get_security_payload(self): return { 'id': self.id, 'name': self.name, 'email': self.email }
PypiClean
/FightMan01dc.pymod-2.0.4.tar.gz/FightMan01dc.pymod-2.0.4/discord/opus.py
import array import ctypes import ctypes.util import logging import sys import time import math import struct import os.path import sys import bisect import threading import traceback from collections import deque from bisect import insort from . import utils from .rtp import RTPPacket, RTCPPacket, SilencePacket, FECPacket from .errors import DiscordException log = logging.getLogger(__name__) c_int_ptr = ctypes.POINTER(ctypes.c_int) c_int16_ptr = ctypes.POINTER(ctypes.c_int16) c_float_ptr = ctypes.POINTER(ctypes.c_float) _lib = None class EncoderStruct(ctypes.Structure): pass class DecoderStruct(ctypes.Structure): pass EncoderStructPtr = ctypes.POINTER(EncoderStruct) DecoderStructPtr = ctypes.POINTER(DecoderStruct) ## Some constants from opus_defines.h # Error codes OK = 0 BAD_ARG = -1 # Encoder CTLs APPLICATION_AUDIO = 2049 APPLICATION_VOIP = 2048 APPLICATION_LOWDELAY = 2051 CTL_SET_BITRATE = 4002 CTL_SET_BANDWIDTH = 4008 CTL_SET_FEC = 4012 CTL_SET_PLP = 4014 CTL_SET_SIGNAL = 4024 # Decoder CTLs CTL_SET_GAIN = 4034 CTL_LAST_PACKET_DURATION = 4039 band_ctl = { 'narrow': 1101, 'medium': 1102, 'wide': 1103, 'superwide': 1104, 'full': 1105, } signal_ctl = { 'auto': -1000, 'voice': 3001, 'music': 3002, } def _err_lt(result, func, args): if result < OK: log.info('error has happened in %s', func.__name__) raise OpusError(result) return result def _err_ne(result, func, args): ret = args[-1]._obj if ret.value != OK: log.info('error has happened in %s', func.__name__) raise OpusError(ret.value) return result # A list of exported functions. # The first argument is obviously the name. # The second one are the types of arguments it takes. # The third is the result type. # The fourth is the error handler. exported_functions = [ ('opus_strerror', [ctypes.c_int], ctypes.c_char_p, None), ('opus_packet_get_bandwidth', [ctypes.c_char_p], ctypes.c_int, _err_lt), ('opus_packet_get_nb_channels', [ctypes.c_char_p], ctypes.c_int, _err_lt), ('opus_packet_get_nb_frames', [ctypes.c_char_p, ctypes.c_int], ctypes.c_int, _err_lt), ('opus_packet_get_samples_per_frame', [ctypes.c_char_p, ctypes.c_int], ctypes.c_int, _err_lt), ('opus_encoder_get_size', [ctypes.c_int], ctypes.c_int, None), ('opus_encoder_create', [ctypes.c_int, ctypes.c_int, ctypes.c_int, c_int_ptr], EncoderStructPtr, _err_ne), ('opus_encode', [EncoderStructPtr, c_int16_ptr, ctypes.c_int, ctypes.c_char_p, ctypes.c_int32], ctypes.c_int32, _err_lt), ('opus_encoder_ctl', None, ctypes.c_int32, _err_lt), ('opus_encoder_destroy', [EncoderStructPtr], None, None), ('opus_decoder_get_size', [ctypes.c_int], ctypes.c_int, None), ('opus_decoder_create', [ctypes.c_int, ctypes.c_int, c_int_ptr], DecoderStructPtr, _err_ne), ('opus_decoder_get_nb_samples', [DecoderStructPtr, ctypes.c_char_p, ctypes.c_int32], ctypes.c_int, _err_lt), ('opus_decode', [DecoderStructPtr, ctypes.c_char_p, ctypes.c_int32, c_int16_ptr, ctypes.c_int, ctypes.c_int], ctypes.c_int, _err_lt), ('opus_decoder_ctl', None, ctypes.c_int32, _err_lt), ('opus_decoder_destroy', [DecoderStructPtr], None, None) ] def libopus_loader(name): # create the library... lib = ctypes.cdll.LoadLibrary(name) # register the functions... for item in exported_functions: func = getattr(lib, item[0]) try: if item[1]: func.argtypes = item[1] func.restype = item[2] except KeyError: pass try: if item[3]: func.errcheck = item[3] except KeyError: log.info("Error assigning check function to %s", item[0]) return lib def _load_default(): global _lib try: if sys.platform == 'win32': _basedir = os.path.dirname(os.path.abspath(__file__)) _bitness = 'x64' if sys.maxsize > 2**32 else 'x86' _filename = os.path.join(_basedir, 'bin', 'libopus-0.{}.dll'.format(_bitness)) _lib = libopus_loader(_filename) else: _lib = libopus_loader(ctypes.util.find_library('opus')) except Exception as e: _lib = None log.warning("Unable to load opus lib, %s", e) return _lib is not None def load_opus(name): """Loads the libopus shared library for use with voice. If this function is not called then the library uses the function :func:`ctypes.util.find_library` and then loads that one if available. Not loading a library and attempting to use PCM based AudioSources will lead to voice not working. This function propagates the exceptions thrown. .. warning:: The bitness of the library must match the bitness of your python interpreter. If the library is 64-bit then your python interpreter must be 64-bit as well. Usually if there's a mismatch in bitness then the load will throw an exception. .. note:: On Windows, this function should not need to be called as the binaries are automatically loaded. .. note:: On Windows, the .dll extension is not necessary. However, on Linux the full extension is required to load the library, e.g. ``libopus.so.1``. On Linux however, :func:`ctypes.util.find_library` will usually find the library automatically without you having to call this. Parameters ---------- name: :class:`str` The filename of the shared library. """ global _lib _lib = libopus_loader(name) def is_loaded(): """Function to check if opus lib is successfully loaded either via the :func:`ctypes.util.find_library` call of :func:`load_opus`. This must return ``True`` for voice to work. Returns ------- :class:`bool` Indicates if the opus library has been loaded. """ global _lib return _lib is not None class OpusError(DiscordException): """An exception that is thrown for libopus related errors. Attributes ---------- code: :class:`int` The error code returned. """ def __init__(self, code): self.code = code msg = _lib.opus_strerror(self.code).decode('utf-8') log.info('"%s" has happened', msg) super().__init__(msg) class OpusNotLoaded(DiscordException): """An exception that is thrown for when libopus is not loaded.""" pass class _OpusStruct: SAMPLING_RATE = 48000 CHANNELS = 2 FRAME_LENGTH = 20 # in ms SAMPLE_SIZE = 4 # (bit_rate / 8) * CHANNELS (bit_rate == 16) SAMPLES_PER_FRAME = int(SAMPLING_RATE / 1000 * FRAME_LENGTH) FRAME_SIZE = SAMPLES_PER_FRAME * SAMPLE_SIZE class Encoder(_OpusStruct): def __init__(self, application=APPLICATION_AUDIO): if not is_loaded(): if not _load_default(): raise OpusNotLoaded() self.application = application self._state = self._create_state() self.set_bitrate(128) self.set_fec(True) self.set_expected_packet_loss_percent(0.15) self.set_bandwidth('full') self.set_signal_type('auto') def __del__(self): if hasattr(self, '_state'): _lib.opus_encoder_destroy(self._state) self._state = None def _create_state(self): ret = ctypes.c_int() return _lib.opus_encoder_create(self.SAMPLING_RATE, self.CHANNELS, self.application, ctypes.byref(ret)) def set_bitrate(self, kbps): kbps = min(512, max(16, int(kbps))) _lib.opus_encoder_ctl(self._state, CTL_SET_BITRATE, kbps * 1024) return kbps def set_bandwidth(self, req): if req not in band_ctl: raise KeyError('%r is not a valid bandwidth setting. Try one of: %s' % (req, ','.join(band_ctl))) k = band_ctl[req] _lib.opus_encoder_ctl(self._state, CTL_SET_BANDWIDTH, k) def set_signal_type(self, req): if req not in signal_ctl: raise KeyError('%r is not a valid signal setting. Try one of: %s' % (req, ','.join(signal_ctl))) k = signal_ctl[req] _lib.opus_encoder_ctl(self._state, CTL_SET_SIGNAL, k) def set_fec(self, enabled=True): _lib.opus_encoder_ctl(self._state, CTL_SET_FEC, 1 if enabled else 0) def set_expected_packet_loss_percent(self, percentage): _lib.opus_encoder_ctl(self._state, CTL_SET_PLP, min(100, max(0, int(percentage * 100)))) def encode(self, pcm, frame_size): max_data_bytes = len(pcm) pcm = ctypes.cast(pcm, c_int16_ptr) data = (ctypes.c_char * max_data_bytes)() ret = _lib.opus_encode(self._state, pcm, frame_size, data, max_data_bytes) return array.array('b', data[:ret]).tobytes() class Decoder(_OpusStruct): def __init__(self): if not is_loaded(): raise OpusNotLoaded() self._state = self._create_state() def __del__(self): if hasattr(self, '_state'): _lib.opus_decoder_destroy(self._state) self._state = None def _create_state(self): ret = ctypes.c_int() return _lib.opus_decoder_create(self.SAMPLING_RATE, self.CHANNELS, ctypes.byref(ret)) @staticmethod def packet_get_nb_frames(data): """Gets the number of frames in an Opus packet""" return _lib.opus_packet_get_nb_frames(data, len(data)) @staticmethod def packet_get_nb_channels(data): """Gets the number of channels in an Opus packet""" return _lib.opus_packet_get_nb_channels(data) @classmethod def packet_get_samples_per_frame(cls, data): """Gets the number of samples per frame from an Opus packet""" return _lib.opus_packet_get_samples_per_frame(data, cls.SAMPLING_RATE) def _set_gain(self, adjustment): """Configures decoder gain adjustment. Scales the decoded output by a factor specified in Q8 dB units. This has a maximum range of -32768 to 32767 inclusive, and returns OPUS_BAD_ARG (-1) otherwise. The default is zero indicating no adjustment. This setting survives decoder reset (irrelevant for now). gain = 10**x/(20.0*256) (from opus_defines.h) """ return _lib.opus_decoder_ctl(self._state, CTL_SET_GAIN, adjustment) def set_gain(self, dB): """Sets the decoder gain in dB, from -128 to 128.""" dB_Q8 = max(-32768, min(32767, round(dB * 256))) # dB * 2^n where n is 8 (Q8) return self._set_gain(dB_Q8) def set_volume(self, mult): """Sets the output volume as a float percent, i.e. 0.5 for 50%, 1.75 for 175%, etc.""" return self.set_gain(20 * math.log10(mult)) # amplitude ratio def _get_last_packet_duration(self): """Gets the duration (in samples) of the last packet successfully decoded or concealed.""" ret = ctypes.c_int32() _lib.opus_decoder_ctl(self._state, CTL_LAST_PACKET_DURATION, ctypes.byref(ret)) return ret.value def decode(self, data, *, fec=False): if data is None and fec: raise OpusError("Invalid arguments: FEC cannot be used with null data") if data is None: frame_size = self._get_last_packet_duration() or self.SAMPLES_PER_FRAME else: frames = self.packet_get_nb_frames(data) samples_per_frame = self.packet_get_samples_per_frame(data) frame_size = frames * samples_per_frame pcm = (ctypes.c_int16 * (frame_size * self.CHANNELS))() pcm_ptr = ctypes.cast(pcm, ctypes.POINTER(ctypes.c_int16)) result = _lib.opus_decode(self._state, data, len(data) if data else 0, pcm_ptr, frame_size, fec) return array.array('h', pcm).tobytes() class BufferedDecoder(threading.Thread): DELAY = Decoder.FRAME_LENGTH / 1000.0 def __init__(self, ssrc, output_func, *, buffer=200): super().__init__(daemon=True, name='ssrc-%s' % ssrc) if buffer < 40: # technically 20 works but then FEC is useless raise ValueError("buffer size of %s is invalid; cannot be lower than 40" % buffer) self.ssrc = ssrc self.output_func = output_func self._decoder = Decoder() self._buffer = [] self._last_seq = 0 self._last_ts = 0 self._loops = 0 # Optional diagnostic state stuff self._overflow_mult = self._overflow_base = 2.0 self._overflow_incr = 0.5 # minimum (lower bound) size of the jitter buffer (n * 20ms per packet) self.buffer_size = buffer // self._decoder.FRAME_LENGTH self._finalizing = False self._end_thread = threading.Event() self._end_main_loop = threading.Event() self._primed = threading.Event() self._lock = threading.RLock() # TODO: Add RTCP queue self._rtcp_buffer = [] self.start() def feed_rtp(self, packet): if self._last_ts < packet.timestamp: self._push(packet) elif self._end_thread.is_set(): return def feed_rtcp(self, packet): ... # TODO: rotating buffer of Nones or something # or I can store (last_seq + buffer_size, packet) # print(f"[router:feed] Got rtcp packet {packet}") # print(f"[router:feed] Other timestamps: {[p.timestamp for p in self._buffer]}") # print(f"[router:feed] Other timestamps: {self._buffer}") def truncate(self, *, size=None): """Discards old data to shrink buffer back down to ``size`` (default: buffer_size). TODO: doc """ size = self.buffer_size if size is None else size with self._lock: self._buffer = self._buffer[-size:] def stop(self, **kwargs): """ drain=True: continue to write out the remainder of the buffer at the standard rate flush=False: write the remainder of the buffer with no delay TODO: doc """ with self._lock: self._end_thread.set() self._end_main_loop.set() if any(isinstance(p, RTPPacket) for p in self._buffer) or True: if kwargs.pop('flush', False): self._finalizing = True self.DELAY = 0 elif not kwargs.pop('drain', True): with self._lock: self._finalizing = True self._buffer.clear() def reset(self): with self._lock: self._decoder = Decoder() # TODO: Add a reset function to Decoder itself self._last_seq = self._last_ts = 0 self._buffer.clear() self._primed.clear() self._end_main_loop.set() # XXX: racy with _push? self.DELAY = self.__class__.DELAY def _push(self, item): if not isinstance(item, (RTPPacket, SilencePacket)): raise TypeError(f"item should be an RTPPacket, not {item.__class__.__name__}") # XXX: racy with reset? if self._end_main_loop.is_set() and not self._end_thread.is_set(): self._end_main_loop.clear() if not self._primed.is_set(): self._primed.set() # Fake packet loss # import random # if random.randint(1, 100) <= 10 and isinstance(item, RTPPacket): # return with self._lock: existing_packet = utils.get(self._buffer, timestamp=item.timestamp) if isinstance(existing_packet, SilencePacket): # Replace silence packets with rtp packets self._buffer[self._buffer.index(existing_packet)] = item return elif isinstance(existing_packet, RTPPacket): return # duplicate packet bisect.insort(self._buffer, item) # Optional diagnostics, will probably remove later bufsize = len(self._buffer) # indent intentional if bufsize >= self.buffer_size * self._overflow_mult: print(f"[router:push] Warning: rtp heap size has grown to {bufsize}") self._overflow_mult += self._overflow_incr elif bufsize <= self.buffer_size * (self._overflow_mult - self._overflow_incr) \ and self._overflow_mult > self._overflow_base: print(f"[router:push] Info: rtp heap size has shrunk to {bufsize}") self._overflow_mult = max(self._overflow_base, self._overflow_mult - self._overflow_incr) def _pop(self): packet = nextpacket = None with self._lock: try: if not self._finalizing: self._buffer.append(SilencePacket(self.ssrc, self._buffer[-1].timestamp + Decoder.SAMPLES_PER_FRAME)) packet = self._buffer.pop(0) nextpacket = self._buffer[0] except IndexError: pass # empty buffer return packet, nextpacket def _initial_fill(self): """Artisanal hand-crafted function for buffering packets and clearing discord's stupid fucking rtp buffer.""" if self._end_main_loop.is_set(): return # Very small sleep to check if there's buffered packets time.sleep(0.001) if len(self._buffer) > 3: # looks like there's some old packets in the buffer # we need to figure out where the old packets stop and where the fresh ones begin # for that we need to see when we return to the normal packet accumulation rate last_size = len(self._buffer) # wait until we have the correct rate of packet ingress while len(self._buffer) - last_size > 1: last_size = len(self._buffer) time.sleep(0.001) # collect some fresh packets time.sleep(0.06) # generate list of differences between packet sequences with self._lock: diffs = [self._buffer[i+1].sequence-self._buffer[i].sequence for i in range(len(self._buffer)-1)] sdiffs = sorted(diffs, reverse=True) # decide if there's a jump jump1, jump2 = sdiffs[:2] if jump1 > jump2 * 3: # remove the stale packets and keep the fresh ones self.truncate(size=len(self._buffer[diffs.index(jump1)+1:])) else: # otherwise they're all stale, dump 'em (does this ever happen?) with self._lock: self._buffer.clear() # fill buffer to at least half full while len(self._buffer) < self.buffer_size // 2: time.sleep(0.001) # fill the buffer with silence aligned with the first packet # if an rtp packet already exists for the given silence packet ts, the silence packet is ignored with self._lock: start_ts = self._buffer[0].timestamp for x in range(1, 1 + self.buffer_size - len(self._buffer)): self._push(SilencePacket(self.ssrc, start_ts + x * Decoder.SAMPLES_PER_FRAME)) # now fill the rest while len(self._buffer) < self.buffer_size: time.sleep(0.001) # TODO: Maybe only wait at most for about as long we we're supposed to? # 0.02 * (buffersize - len(buffer)) def _packet_gen(self): while True: packet, nextpacket = self._pop() self._last_ts = getattr(packet, 'timestamp', self._last_ts + Decoder.SAMPLES_PER_FRAME) self._last_seq += 1 # self._last_seq = packet.sequence? if isinstance(packet, RTPPacket): pcm = self._decoder.decode(packet.decrypted_data) elif isinstance(nextpacket, RTPPacket): pcm = self._decoder.decode(packet.decrypted_data, fec=True) fec_packet = FECPacket(self.ssrc, nextpacket.sequence - 1, nextpacket.timestamp - Decoder.SAMPLES_PER_FRAME) yield fec_packet, pcm packet, _ = self._pop() self._last_ts += Decoder.SAMPLES_PER_FRAME self._last_seq += 1 pcm = self._decoder.decode(packet.decrypted_data) elif packet is None: self._finalizing = False break else: pcm = self._decoder.decode(None) yield packet, pcm def _do_run(self): self._primed.wait() self._initial_fill() self._loops = 0 packet_gen = self._packet_gen() start_time = time.perf_counter() try: while not self._end_main_loop.is_set() or self._finalizing: packet, pcm = next(packet_gen) try: self.output_func(pcm, packet.decrypted_data, packet) except: log.exception("Sink raised exception") traceback.print_exc() next_time = start_time + self.DELAY * self._loops self._loops += 1 time.sleep(max(0, self.DELAY + (next_time - time.perf_counter()))) except StopIteration: time.sleep(0.001) # just in case, so we don't slam the cpu finally: packet_gen.close() def run(self): try: while not self._end_thread.is_set(): self._do_run() except Exception as e: log.exception("Error in decoder %s", self.name) traceback.print_exc() class BasePacketDecoder: DELAY = Decoder.FRAME_LENGTH / 1000.0 def feed_rtp(self, packet): raise NotImplementedError def feed_rtcp(self, packet): raise NotImplementedError def truncate(self, *, size=None): raise NotImplementedError def reset(self): raise NotImplementedError class BufferedPacketDecoder(BasePacketDecoder): """Buffers and decodes packets from a single ssrc""" def __init__(self, ssrc, *, buffer=200): if buffer < 40: # technically 20 works but then FEC is useless raise ValueError("buffer size of %s is invalid; cannot be lower than 40" % buffer) self.ssrc = ssrc self._decoder = Decoder() self._buffer = [] self._rtcp_buffer = {} # TODO: Add RTCP queue self._last_seq = self._last_ts = 0 # Optional diagnostic state stuff self._overflow_mult = self._overflow_base = 2.0 self._overflow_incr = 0.5 # minimum (lower bound) size of the jitter buffer (n * 20ms per packet) self.buffer_size = buffer // self._decoder.FRAME_LENGTH self._lock = threading.RLock() self._gen = None def __iter__(self): if self._gen is None: self._gen = self._packet_gen() return self._gen def __next__(self): return next(iter(self)) def feed_rtp(self, packet): if self._last_ts < packet.timestamp: self._push(packet) def feed_rtcp(self, packet): with self._lock: if not self._buffer: return # ignore for now, handle properly later self._rtcp_buffer[self._buffer[-1]] = packet def truncate(self, *, size=None): size = self.buffer_size if size is None else size with self._lock: self._buffer = self._buffer[-size:] def reset(self): with self._lock: self._decoder = Decoder() # TODO: Add a reset function to Decoder itself self.DELAY = self.__class__.DELAY self._last_seq = self._last_ts = 0 self._buffer.clear() self._rtcp_buffer.clear() self._gen.close() self._gen = None def _push(self, item): if not isinstance(item, (RTPPacket, SilencePacket)): raise TypeError(f"item should be an RTPPacket, not {item.__class__.__name__}") # Fake packet loss # import random # if random.randint(1, 100) <= 10 and isinstance(item, RTPPacket): # return with self._lock: existing_packet = utils.get(self._buffer, timestamp=item.timestamp) if isinstance(existing_packet, SilencePacket): # Replace silence packets with rtp packets self._buffer[self._buffer.index(existing_packet)] = item return elif isinstance(existing_packet, RTPPacket): return # duplicate packet bisect.insort(self._buffer, item) # Optional diagnostics, will probably remove later bufsize = len(self._buffer) # indent intentional if bufsize >= self.buffer_size * self._overflow_mult: print(f"[router:push] Warning: rtp heap size has grown to {bufsize}") self._overflow_mult += self._overflow_incr elif bufsize <= self.buffer_size * (self._overflow_mult - self._overflow_incr) \ and self._overflow_mult > self._overflow_base: print(f"[router:push] Info: rtp heap size has shrunk to {bufsize}") self._overflow_mult = max(self._overflow_base, self._overflow_mult - self._overflow_incr) def _pop(self): packet = nextpacket = None with self._lock: try: self._buffer.append(SilencePacket(self.ssrc, self._buffer[-1].timestamp + Decoder.SAMPLES_PER_FRAME)) packet = self._buffer.pop(0) nextpacket = self._buffer[0] except IndexError: pass # empty buffer return packet, nextpacket # return rtcp packets as well? def _packet_gen(self): # Buffer packets # do I care about dumping buffered packets on reset? # Ok yes this section is going to look weird. To keep everything consistant I need to # wait for a specific number of iterations instead of on the actual buffer size. These # objects are supposed to be time naive. The class handling these is responsible for # keeping the time synchronization. # How many packets we already have pre_fill = len(self._buffer) # How many packets we need to get to half full half_fill = max(0, self.buffer_size//2 - 1 - pre_fill) # How many packets we need to get to full full_fill = self.buffer_size - half_fill print(f"Starting with {pre_fill}, collecting {half_fill}, then {full_fill}") while not self._buffer: yield None, None for x in range(half_fill-1): yield None, None with self._lock: start_ts = self._buffer[0].timestamp for x in range(1, 1 + self.buffer_size - len(self._buffer)): self._push(SilencePacket(self.ssrc, start_ts + x * Decoder.SAMPLES_PER_FRAME)) for x in range(full_fill): yield None, None while True: packet, nextpacket = self._pop() self._last_ts = getattr(packet, 'timestamp', self._last_ts + Decoder.SAMPLES_PER_FRAME) self._last_seq += 1 # self._last_seq = packet.sequence? if isinstance(packet, RTPPacket): pcm = self._decoder.decode(packet.decrypted_data) elif isinstance(nextpacket, RTPPacket): pcm = self._decoder.decode(packet.decrypted_data, fec=True) fec_packet = FECPacket(self.ssrc, nextpacket.sequence - 1, nextpacket.timestamp - Decoder.SAMPLES_PER_FRAME) yield fec_packet, pcm packet, _ = self._pop() self._last_ts += Decoder.SAMPLES_PER_FRAME self._last_seq += 1 pcm = self._decoder.decode(packet.decrypted_data) elif packet is None: break else: pcm = self._decoder.decode(None) yield packet, pcm class BufferedDecoder(threading.Thread): """Ingests rtp packets and dispatches to decoders and sink output function.""" def __init__(self, reader, *, decodercls=BufferedPacketDecoder): super().__init__(daemon=True, name='DecoderBuffer') self.reader = reader self.decodercls = decodercls self.output_func = reader._write_to_sink self.decoders = {} self.initial_buffer = [] self.queue = deque() self._end_thread = threading.Event() self._has_decoder = threading.Event() self._lock = threading.Lock() def _get_decoder(self, ssrc): dec = self.decoders.get(ssrc) if not dec and self.reader.client._get_ssrc_mapping(ssrc=ssrc)[1]: # and get_user(ssrc)? dec = self.decoders[ssrc] = self.decodercls(ssrc) dec.start_time = time.perf_counter() # :thinking: dec.loops = 0 # :thinking::thinking::thinking: self.queue.append((dec.start_time, dec)) self._has_decoder.set() return dec def _feed_rtp_initial(self, packet): with self._lock: self.initial_buffer.append(packet) def feed_rtp(self, packet): dec = self._get_decoder(packet.ssrc) if dec: return dec.feed_rtp(packet) def feed_rtcp(self, packet): # RTCP packets themselves don't really belong to a decoder # I could split the reports up or send to all idk its weird dec = self._get_decoder(packet.ssrc) if dec: print(f"RTCP packet: {packet}") return dec.feed_rtcp(packet) def drop_ssrc(self, ssrc): dec = self.decoders.pop(ssrc, None) if dec: # dec/self.flush()? dec.reset() if not self.decoders: self._has_decoder.clear() def reset(self, *ssrcs): with self._lock: if not ssrcs: ssrcs = tuple(self.decoders.keys()) for ssrc in ssrcs: dec = self.decoders.get(ssrc) if dec: dec.reset() def flush(self, *ssrcs): ... # The new idea is to call a special flush event function on the sink with the # rest of the audio buffer when exiting so the user can use or ignore it def stop(self, **kwargs): for decoder in tuple(self.decoders.values()): # decoder.stop(**kwargs) decoder.reset() def _initial_fill(self): # Fill a single buffer first then dispense into the actual buffers try: normal_feed_rtp = self.feed_rtp self.feed_rtp = self._feed_rtp_initial buff = self.initial_buffer # Very small sleep to check if there's buffered packets time.sleep(0.002) if len(buff) > 3: # looks like there's some old packets in the buffer # we need to figure out where the old packets stop and where the fresh ones begin # for that we need to see when we return to the normal packet accumulation rate last_size = len(buff) # wait until we have the correct rate of packet ingress while len(buff) - last_size > 1: last_size = len(buff) time.sleep(0.001) # collect some fresh packets time.sleep(0.06) # generate list of differences between packet sequences with self._lock: diffs = [buff[i+1].sequence - buff[i].sequence for i in range(len(buff)-1)] sdiffs = sorted(diffs, reverse=True) # decide if there's a jump jump1, jump2 = sdiffs[:2] if jump1 > jump2 * 3: # remove the stale packets and keep the fresh ones with self._lock: size = len(buff[diffs.index(jump1)+1:]) buff = buff[-size:] else: # otherwise they're all stale, dump 'em (does this ever happen?) with self._lock: buff.clear() # The old version of this code backfilled buffers based on the buffer size. # We dont have that here but we can just have the individual buffer objects # backfill themselves. # Dump initial buffer into actual buffers with self._lock: for packet in buff: normal_feed_rtp(packet) self.feed_rtp = normal_feed_rtp finally: self.feed_rtp = normal_feed_rtp def decode(self, decoder): data = next(decoder) if any(data): packet, pcm = data try: self.output_func(pcm, packet.decrypted_data, packet) except: log.exception("Sink raised exception") traceback.print_exc() decoder.loops += 1 decoder.next_time = decoder.start_time + decoder.DELAY * decoder.loops self.queue.append((decoder.next_time, decoder)) def _do_run(self): while not self._end_thread.is_set(): self._has_decoder.wait() next_time, decoder = self.queue.popleft() remaining = next_time - time.perf_counter() if remaining >= 0: insort(self.queue, (next_time, decoder)) time.sleep(max(0.002, remaining/2)) # sleep accuracy tm continue self.decode(decoder) def run(self): try: self._do_run() except Exception as e: log.exception("Error in decoder %s", self.name) traceback.print_exc()
PypiClean
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/place_digest_ghg_monthly_response_py3.py
from msrest.serialization import Model class PlaceDigestGHGMonthlyResponse(Model): """PlaceDigestGHGMonthlyResponse. :param place_code: The place code :type place_code: str :param place_info: The place info :type place_info: str :param place_id: The place identifier :type place_id: int :param updated: The date and time the data was updated :type updated: datetime :param equivalent_co2_emissions_unit: :type equivalent_co2_emissions_unit: ~energycap.sdk.models.UnitChild :param ghg_types: List of greenhouse gas types with use by month :type ghg_types: list[~energycap.sdk.models.PlaceDigestGHGMonthlyResponseGHGType] :param ghg_scopes: List of greenhouse gas scopes with use by month :type ghg_scopes: list[~energycap.sdk.models.PlaceDigestGHGMonthlyResponseGHGScope] :param commodities: List of greenhouse gas commodities with use by month :type commodities: list[~energycap.sdk.models.PlaceDigestGHGMonthlyResponseGHGCommodity] """ _attribute_map = { 'place_code': {'key': 'placeCode', 'type': 'str'}, 'place_info': {'key': 'placeInfo', 'type': 'str'}, 'place_id': {'key': 'placeId', 'type': 'int'}, 'updated': {'key': 'updated', 'type': 'iso-8601'}, 'equivalent_co2_emissions_unit': {'key': 'equivalentCO2EmissionsUnit', 'type': 'UnitChild'}, 'ghg_types': {'key': 'ghgTypes', 'type': '[PlaceDigestGHGMonthlyResponseGHGType]'}, 'ghg_scopes': {'key': 'ghgScopes', 'type': '[PlaceDigestGHGMonthlyResponseGHGScope]'}, 'commodities': {'key': 'commodities', 'type': '[PlaceDigestGHGMonthlyResponseGHGCommodity]'}, } def __init__(self, *, place_code: str=None, place_info: str=None, place_id: int=None, updated=None, equivalent_co2_emissions_unit=None, ghg_types=None, ghg_scopes=None, commodities=None, **kwargs) -> None: super(PlaceDigestGHGMonthlyResponse, self).__init__(**kwargs) self.place_code = place_code self.place_info = place_info self.place_id = place_id self.updated = updated self.equivalent_co2_emissions_unit = equivalent_co2_emissions_unit self.ghg_types = ghg_types self.ghg_scopes = ghg_scopes self.commodities = commodities
PypiClean
/Moar-3.0.0.tar.gz/Moar-3.0.0/moar/engines/base.py
from abc import ABCMeta, abstractmethod import inspect from math import ceil from moar import filters as available_filters class BaseEngine(object): __metaclass__ = ABCMeta @abstractmethod def open_image(self, path_or_stream): pass def close_image(self, im): pass @abstractmethod def get_size(self, im): pass @abstractmethod def get_data(self, im, options): pass @abstractmethod def scale(self, im, width, height): return im @abstractmethod def set_orientation(self, im): return im def set_geometry(self, im, geometry, options=None): """Rescale the image to the new geometry. """ if not geometry: return im options = options or {} width, height = geometry if not width and not height: return im imw, imh = self.get_size(im) # Geometry match the current size? if (width is None) or (imw == width): if (height is None) or (imh == height): return im ratio = float(imw) / imh if width and height: # Smaller than the target? smaller = (imw <= width) and (imh <= height) if smaller and not options['upscale']: return im resize = options.get('resize', 'fill') if resize == 'fill': new_width = width new_height = int(ceil(width / ratio)) if new_height < height: new_height = height new_width = int(ceil(height * ratio)) elif resize == 'fit': new_width = int(ceil(height * ratio)) new_height = height if new_width > width: new_width = width new_height = int(ceil(width / ratio)) elif resize == 'stretch': new_width = width new_height = height elif height: # Smaller than the target? smaller = imh <= height if smaller and not options['upscale']: return im new_width = int(ceil(height * ratio)) new_height = height else: # Smaller than the target? smaller = imw <= width if smaller and not options['upscale']: return im new_width = width new_height = int(ceil(width / ratio)) im = self.scale(im, new_width, new_height) return im def apply_filters(self, im, filters, custom_filters, options): for f in filters: fname = f[0] args = f[1:] ff = self.get_filter(fname, custom_filters) im = ff.apply(im, *args, **options) return im def get_filter(self, fn, custom_filters): ff = custom_filters.get(fn) if ff is None: ff = getattr(available_filters, fn) if inspect.isclass(ff): ff = ff() return ff
PypiClean
/Allegra-0.63.zip/Allegra-0.63/lib/async_client.py
"http://laurentszyster.be/blog/async_client/" import time, socket, collections try: SOCKET_FAMILIES = (socket.AF_INET, socket.AF_UNIX) except: SOCKET_FAMILIES = (socket.AF_INET, ) from allegra import loginfo, async_loop, async_limits def connect (dispatcher, addr, timeout=3.0, family=socket.AF_INET): "create a socket, try to connect it and schedule a timeout" assert ( not dispatcher.connected and timeout > 0 and family in SOCKET_FAMILIES ) dispatcher.client_when = time.time () dispatcher.client_timeout = timeout try: dispatcher.create_socket (family, socket.SOCK_STREAM) dispatcher.connect (addr) except: dispatcher.loginfo_traceback () dispatcher.handle_error () return False assert None == dispatcher.log ('connect', 'debug') def connect_timeout (when): "if not connected and not closing yet, handle close" if not dispatcher.connected and not dispatcher.closing: assert None == dispatcher.log ( 'connect-timeout %f seconds' % ( when - dispatcher.client_when ), 'debug' ) dispatcher.handle_close () async_loop.schedule ( dispatcher.client_when + timeout, connect_timeout ) return True def reconnect (dispatcher): if dispatcher.addr: dispatcher.closing = False return connect ( dispatcher, dispatcher.addr, dispatcher.client_timeout, dispatcher.family_and_type[0] ) return False class Connections (loginfo.Loginfo): "a connection manager for async_client.Dispatcher instances" ac_in_meter = ac_out_meter = 0 client_errors = client_when = client_dispatched = 0 def __init__ ( self, timeout=3.0, precision=1.0, family=socket.AF_INET ): "initialize a new client manager" assert ( timeout > 0 and precision > 0 and family in SOCKET_FAMILIES ) self.client_managed = {} self.client_timeout = timeout self.client_precision = precision self.client_family = family resolved (self) inactive (self, timeout) def __call__ (self, dispatcher, name): "registed, decorate and connect a new dispatcher" if self.client_connect (dispatcher, name): now = time.time () dispatcher.async_client = self self.client_decorate (dispatcher, now) key = id (dispatcher) self.client_managed[key] = dispatcher dispatcher.client_key = key if len (self.client_managed) == 1: self.client_start (now) else: self.client_errors += 1 return dispatcher def client_connect (self, dispatcher, name): "resolve and/or connect a dispatcher" dispatcher.client_name = name addr = self.client_resolved (name) if addr != None: return connect ( dispatcher, addr, self.client_timeout, self.client_family ) if self.client_resolve == None: self.client_unresolved (dispatcher, addr) return False def resolve (addr): if addr == None: self.client_unresolved (dispatcher, name) return if not connect ( dispatcher, addr, self.client_timeout, self.client_family ): self.client_errors += 1 self.client_resolve (name, resolve) return True def client_reconnect (self, dispatcher): dispatcher.closing = False self (dispatcher, dispatcher.client_name) return dispatcher.closing def client_unresolved (self, dispatcher, name): "assert debug log and close an unresolved dispatcher" assert None == dispatcher.log ( '%r unresolved' % (name, ), 'debug' ) self.client_errors += 1 dispatcher.handle_close () def client_start (self, when): "handle the client management startup" self.client_when = when async_loop.schedule ( when + self.client_precision, self.client_manage ) assert None == self.log ('start', 'debug') def client_manage (self, when): "test limits overflow, recure or stop" for dispatcher in self.client_dispatchers (): if self.client_limit (dispatcher, when): self.client_overflow (dispatcher) if self.client_managed: return ( when + self.client_precision, self.client_manage ) # continue to defer self.client_stop (when) return None def client_dispatchers (self): "return a list of managed dispatchers" return self.client_managed.values () def client_overflow (self, dispatcher): "assert debug log and close an overflowed dispatcher" assert None == dispatcher.log ('limit overflow', 'debug') dispatcher.handle_close () def client_meter (self, dispatcher): "assert debug log and account I/O meters of a dispatcher" assert None == dispatcher.log ( 'in="%d" out="%d"' % ( dispatcher.ac_in_meter, dispatcher.ac_out_meter ), 'debug' ) self.ac_in_meter += dispatcher.ac_in_meter self.ac_out_meter += dispatcher.ac_out_meter self.client_dispatched += 1 def client_close (self, dispatcher): "remove the dispatcher from cache and meter dispatched" del self.client_managed[dispatcher.client_key] self.client_meter (dispatcher) dispatcher.async_client = None def client_stop (self, when): "handle the client management stop" assert None == self.log ( 'stop errors="%d" dispatched="%d"' ' seconds="%f" in="%d" out="%d"' % ( self.client_errors, self.client_dispatched, (when - self.client_when), self.ac_in_meter, self.ac_out_meter ), 'debug') self.client_errors = self.client_dispatched = \ self.ac_in_meter = self.ac_out_meter = 0 def close_when_done (self): "close all client dispatchers when done" for dispatcher in self.client_dispatchers (): dispatcher.close_when_done () class Cache (Connections): "a cache of managed connections" def __init__ ( self, timeout=3.0, precision=1.0, family=socket.AF_INET ): "initialize a new client cache" assert ( timeout > 0 and precision > 0 and family in SOCKET_FAMILIES ) self.client_managed = {} self.client_timeout = timeout self.client_precision = precision self.client_family = family resolved (self) inactive (self, timeout) def __call__ (self, Dispatcher, name): """return a cached or a new dispatcher, maybe resolving and connecting it first, closing it on connection error or if it's socket address cannot be resolved""" try: return self.client_managed[name] except KeyError: pass dispatcher = Dispatcher () if self.client_connect (dispatcher, name): now = time.time () dispatcher.async_client = self self.client_decorate (dispatcher, now) self.client_managed[name] = dispatcher dispatcher.client_key = name if len (self.client_managed) == 1: self.client_start (now) else: self.client_errors += 1 return dispatcher class Pool (Connections): "a pool of managed connections" def __init__ ( self, Dispatcher, name, size=2, timeout=3.0, precision=1.0, family=socket.AF_INET ): "initialize a new client pool" assert ( type (size) == int and size > 1 and timeout > 0 and precision > 0 and family in SOCKET_FAMILIES ) self.client_managed = [] self.client_pool = size self.client_name = name self.client_called = 0 self.Client_dispatcher = Dispatcher self.client_timeout = timeout self.client_precision = precision self.client_family = family resolved (self) inactive (self, timeout) def __call__ (self): """return the next dispatcher pooled or instanciate a new one, maybe resolving and connecting it first, closing it on connection error or if it's socket address cannot be resolved""" size = len (self.client_managed) if size >= self.client_pool: self.client_called += 1 return self.client_managed[self.client_called % size] now = time.time () dispatcher = self.Client_dispatcher () if self.client_connect (dispatcher, self.client_name): dispatcher.async_client = self self.client_decorate (dispatcher, now) self.client_managed.append (dispatcher) if len (self.client_managed) == 1: self.client_start (now) else: self.client_errors += 1 return dispatcher def client_reconnect (self, dispatcher): return False # useless for a cached dispatcher! def client_dispatchers (self): "return a list of dispatchers pooled" return list (self.client_managed) def client_close (self, dispatcher): "remove the dispatcher from pool and increment dispatched" self.client_meter (dispatcher) self.client_managed.remove (dispatcher) dispatcher.async_client = None def resolved (connections): "allways resolved for unresolved dispatcher address" connections.client_resolved = (lambda addr: addr) connections.client_resolve = None return connections def meter (dispatcher, when): "decorate a client dispatcher with stream meters" async_limits.meter_recv (dispatcher, when) async_limits.meter_send (dispatcher, when) def close (): del ( dispatcher.recv, dispatcher.send, dispatcher.close ) dispatcher.close () dispatcher.async_client.client_close (dispatcher) dispatcher.close = close def no_limit (dispatcher, when): return False def unlimited (connections): "meter I/O for unlimited client streams" connections.client_decorate = meter connections.client_limit = no_limit return connections def inactive (connections, timeout): "meter I/O and limit inactivity for client streams" assert timeout > 0 def decorate (dispatcher, when): meter (dispatcher, when) dispatcher.limit_inactive = connections.client_inactive connections.client_decorate = decorate connections.client_inactive = timeout connections.client_limit = async_limits.inactive return connections def limited (connections, timeout, inBps, outBps): "throttle I/O and limit inactivity for managed client streams" assert ( timeout > 0 and type (inBps ()) == int and inBps () > 0 and type (outBps ()) == int and outBps () > 0 ) def throttle (dispatcher, when): "decorate a client dispatcher with stream limits" async_limits.meter_recv (dispatcher, when) async_limits.meter_send (dispatcher, when) dispatcher.limit_inactive = timeout async_limits.throttle_readable ( dispatcher, when, connections.ac_in_throttle_Bps ) async_limits.throttle_writable ( dispatcher, when, connections.ac_out_throttle_Bps ) def close (): del ( dispatcher.recv, dispatcher.send, dispatcher.readable, dispatcher.writable, dispatcher.close ) dispatcher.close () dispatcher.async_client.client_close (dispatcher) dispatcher.close = close connections.client_decorate = throttle connections.ac_in_throttle_Bps = inBps connections.ac_out_throttle_Bps = outBps connections.client_limit = async_limits.limit return connections def rationed (connections, timeout, inBps, outBps): "ration I/O and limit inactivity for managed client streams" assert ( timeout > 0 and type (inBps) == int and inBps > 0 and type (outBps) == int and outBps > 0 ) connections.ac_in_ration_Bps = inBps connections.ac_out_ration_Bps = outBps def throttle_in (): return int (connections.ac_in_ration_Bps / max (len ( connections.client_managed ), 1)) def throttle_out (): return int (connections.ac_out_ration_Bps / max (len ( connections.client_managed ), 1)) return limited (connections, timeout, throttle_in, throttle_out) class Pipeline (object): "a pipeline mix-in for dispatcher" #pipeline_sleeping = False pipeline_pipelining = False pipeline_keep_alive = False def pipeline_set (self, requests=None, responses=None): "set new requests and responses deque" self.pipeline_requests = requests or collections.deque () self.pipeline_responses = responses or collections.deque () #def pipeline (self, request): # "pipeline a new request, wake up if sleeping" # self.pipeline_requests.append (request) # if self.pipeline_sleeping: # self.pipeline_sleeping = False # self.pipeline_wake_up () def pipeline_wake_up (self): requests = self.pipeline_requests if self.pipeline_pipelining and len (requests) > 1: self.pipeline_requests = deque () self.output_fifo.extend (( request[0] for request in requests )) self.pipeline_responses.extend (requests) else: request = self.pipeline_requests.popleft () self.output_fifo.append (request[0]) self.pipeline_responses.append (request)
PypiClean
/FuzzyClassificator-1.3.84-py3-none-any.whl/pybrain/rl/environments/twoplayergames/tasks/relativetask.py
__author__ = 'Tom Schaul, tom@idsia.ch' from capturetask import CaptureGameTask from pybrain.rl.environments.twoplayergames.capturegameplayers import ModuleDecidingPlayer from pybrain.rl.environments.twoplayergames import CaptureGame from pybrain.rl.environments.twoplayergames.capturegameplayers.captureplayer import CapturePlayer from pybrain.structure.networks.custom.capturegame import CaptureGameNetwork class RelativeCaptureTask(CaptureGameTask): """ returns the (anti-symmetric) relative score of p1 with respect to p2. (p1 and p2 are CaptureGameNetworks) The score depends on: - greedy play - play with fixed starting positions (only first stone) - moves-until-win or moves-until-defeat (winning faster is better) - play with noisy moves (e.g. adjusting softmax temperature) """ # are networks provided? useNetworks = False # maximal number of games per evaluation maxGames = 3 presetGamesProportion = 0.5 minTemperature = 0 maxTemperature = 0.2 verbose = False # coefficient determining the importance of long vs. short games w.r. to winning/losing numMovesCoeff = 0.5 def __init__(self, size, **args): self.setArgs(**args) self.size = size self.task = CaptureGameTask(self.size) self.env = self.task.env if self.presetGamesProportion > 0: self.sPos = self._fixedStartingPos() self.cases = int(len(self.sPos) / self.presetGamesProportion) else: self.cases = 1 self.maxmoves = self.size * self.size self.minmoves = 3 def __call__(self, p1, p2): self.temp = self.minTemperature if self.useNetworks: p1 = ModuleDecidingPlayer(p1, self.task.env, temperature=self.temp) p2 = ModuleDecidingPlayer(p2, self.task.env, temperature=self.temp) else: assert isinstance(p1, CapturePlayer) assert isinstance(p2, CapturePlayer) p1.game = self.task.env p2.game = self.task.env p1.color = CaptureGame.BLACK p2.color = -p1.color self.player = p1 self.opponent = p2 # the games with increasing temperatures and lower coefficients coeffSum = 0. score = 0. np = int(self.cases * (1 - self.presetGamesProportion)) for i in range(self.maxGames): coeff = 1 / (10 * self.temp + 1) preset = None if self.cases > 1: if i % self.cases >= np: preset = self.sPos[(i - np) % self.cases] elif i < self.cases: # greedy, no need to repeat, just increase the coefficient if i == 0: coeff *= np else: continue res = self._oneGame(preset) score += coeff * res coeffSum += coeff if self.cases == 1 or (i % self.cases == 0 and i > 0): self._globalWarming() return score / coeffSum def _globalWarming(self): """ increase temperature """ if self.temp == 0: self.temp = 0.02 else: self.temp *= 1.5 if self.temp > self.maxTemperature: return False elif self._setTemperature() == False: # not adjustable, keep it fixed then. self.temp = self.minTemperature return False return True def _setTemperature(self): if self.useNetworks: self.opponent.temperature = self.temp self.player.temperature = self.temp return True elif hasattr(self.opponent, 'randomPartMoves'): # an approximate conversion of temperature into random proportion: randPart = self.temp / (self.temp + 1) self.opponent.randomPartMoves = randPart self.player.randomPartMoves = randPart return True else: return False def _fixedStartingPos(self): """ a list of starting positions, not along the border, and respecting symmetry. """ res = [] if self.size < 3: return res for x in range(1, (self.size + 1) / 2): for y in range(x, (self.size + 1) / 2): res.append((x, y)) return res def _oneGame(self, preset=None): """ a single black stone can be set as the first move. """ self.env.reset() if preset != None: self.env._setStone(CaptureGame.BLACK, preset) self.env.movesDone += 1 self.env.playToTheEnd(self.opponent, self.player) else: self.env.playToTheEnd(self.player, self.opponent) moves = self.env.movesDone win = self.env.winner == self.player.color if self.verbose: print('Preset:', preset, 'T:', self.temp, 'Win:', win, 'after', moves, 'moves.') res = 1 - self.numMovesCoeff * (moves - self.minmoves) / (self.maxmoves - self.minmoves) if win: return res else: return - res if __name__ == '__main__': assert RelativeCaptureTask(5)._fixedStartingPos() == [(1, 1), (1, 2), (2, 2)] assert RelativeCaptureTask(8)._fixedStartingPos() == [(1, 1), (1, 2), (1, 3), (2, 2), (2, 3), (3, 3)] net1 = CaptureGameNetwork(hsize=1) net2 = CaptureGameNetwork(hsize=1) #print(net1.params) #print(net2.params) r = RelativeCaptureTask(5, maxGames=40, useNetworks=True, presetGamesProportion=0.5) print(r(net1, net2)) print(r(net2, net1)) r.maxGames = 200 print(r(net1, net2)) print(r(net2, net1))
PypiClean
/Camelot-13.04.13-gpl-pyqt.tar.gz/Camelot-13.04.13-gpl-pyqt/camelot/view/controls/delegates/delegatemanager.py
import logging logger = logging.getLogger('camelot.view.controls.delegates.delegatemanager') from PyQt4 import QtGui, QtCore from PyQt4.QtCore import Qt class DelegateManager(QtGui.QItemDelegate): """Manages custom delegates, should not be used by the application developer """ def __init__(self, parent=None, **kwargs): QtGui.QItemDelegate.__init__(self, parent) self.delegates = {} def set_columns_desc(self, columnsdesc): self.columnsdesc = columnsdesc def get_column_delegate(self, column): try: return self.delegates[column] except KeyError: logger.error('Programming Error, no delegate available for column %s'%column) logger.error('Available columns : %s'%unicode(self.delegates.keys())) raise KeyError def insertColumnDelegate(self, column, delegate): """Inserts a custom column delegate""" logger.debug('inserting delegate for column %s' % column) assert delegate != None delegate.setParent(self) self.delegates[column] = delegate delegate.commitData.connect(self._commit_data) delegate.closeEditor.connect(self._close_editor) def _commit_data(self, editor): self.commitData.emit(editor) @QtCore.pyqtSlot( QtGui.QWidget, QtGui.QAbstractItemDelegate.EndEditHint ) def _close_editor(self, editor, hint): self.closeEditor.emit(editor, hint ) def removeColumnDelegate(self, column): """Removes custom column delegate""" logger.debug('removing a custom column delegate') if column in self.delegates: del self.delegates[column] def paint(self, painter, option, index): """Use a custom delegate paint method if it exists""" delegate = self.get_column_delegate(index.column()) delegate.paint(painter, option, index) def createEditor(self, parent, option, index): """Use a custom delegate createEditor method if it exists""" try: delegate = self.get_column_delegate(index.column()) editor = delegate.createEditor(parent, option, index) except Exception, e: logger.error('Programming Error : could not createEditor editor data for editor at column %s'%(index.column()), exc_info=e) return QtGui.QWidget( parent = parent ) return editor def setEditorData(self, editor, index): """Use a custom delegate setEditorData method if it exists""" logger.debug('setting editor data for column %s' % index.column()) try: delegate = self.get_column_delegate(index.column()) delegate.setEditorData(editor, index) except Exception, e: logger.error('Programming Error : could not set editor data for editor at column %s'%(index.column()), exc_info=e) logger.error('value that could not be set : %s'%unicode(index.model().data(index, Qt.EditRole))) def setModelData(self, editor, model, index): """Use a custom delegate setModelData method if it exists""" logger.debug('setting model data for column %s' % index.column()) delegate = self.get_column_delegate(index.column()) delegate.setModelData(editor, model, index) def sizeHint(self, option, index): option = QtGui.QStyleOptionViewItem() delegate = self.get_column_delegate(index.column()) return delegate.sizeHint(option, index)
PypiClean
/EuroPython2006_PyQt4_Examples-1.0.zip/EuroPython2006_PyQt4_Examples-1.0/Main Window Classes/mdi/mdi_rc.py
from PyQt4 import QtCore qt_resource_data = "\ \x00\x00\x03\x54\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\ \x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\ \x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\ \x79\x71\xc9\x65\x3c\x00\x00\x02\xe6\x49\x44\x41\x54\x58\xc3\xd5\ \x97\xcd\x4e\x13\x61\x14\x86\xeb\x35\x94\x95\x7b\x71\xe1\xd2\xc4\ \xe0\x05\xb8\xe2\x0e\x5c\xb8\xf4\x02\x5c\xb1\x30\xea\x05\x18\x96\ \x26\x62\x58\xb8\xb0\x91\x58\x20\xd1\x9d\xbf\x89\xa4\x14\xb1\x52\ \xa4\x48\x45\x94\xfe\xd0\x02\x43\xff\xa6\x9d\x19\xa6\x65\x80\xe3\ \x79\x7b\xfa\x85\x51\x4a\x82\xc9\x21\x86\x49\xde\x9c\x33\xa7\xf3\ \xcd\xfb\x9c\xf3\x4d\x9b\x4e\x84\x88\x22\xff\x53\x91\x73\x01\xc0\ \xc7\xd5\x90\x6e\xff\xa5\xfb\xac\xc7\x3d\x3d\x64\x0d\xa9\x02\xf0\ \x31\x32\x3c\x3c\xbc\x6a\x34\x3a\x3a\xba\x19\x56\x3c\x1e\xaf\x26\ \x93\xc9\x56\x3a\x9d\x76\x13\x89\x44\x6b\x60\x60\x20\xcd\x6b\x6e\ \x68\x02\xa4\x38\xd2\xe1\xe1\x71\x99\xba\xef\xb7\xc9\xb2\x2c\xda\ \xdf\xdf\x27\x86\xf1\x78\xcd\x18\xeb\x8a\x1a\x40\x3f\xf3\xb0\x1c\ \xc7\xa5\x4c\x66\xb9\x0b\x14\x04\x01\xc5\x62\xb1\x3a\xaf\x7b\x70\ \x1a\x88\x53\x01\x1c\x1c\x10\x77\x77\xb2\x6c\xdb\xa1\xf9\xf9\xcf\ \x64\x0e\xd7\x75\xe9\xf9\xc4\x44\x17\x42\x05\x00\x26\x7b\xc1\xc9\ \xaa\x37\x1c\x4a\xce\xcd\x53\xf8\x70\x5d\x0f\x8b\x17\x54\x00\x82\ \x10\x40\x67\x4f\x14\xce\xed\xa6\x47\x1f\x67\x66\xe9\xf5\x9b\xb7\ \x14\x9f\x9c\xa4\xa9\xa9\x69\x7a\xf7\xfe\x03\x45\xa3\xd1\x65\x5e\ \x7f\x41\x05\xc0\xef\x10\xed\xb6\x25\x86\x85\x9a\xe3\x05\x94\x5d\ \xcd\xd1\xe4\xf4\x2b\x7a\x32\xfe\x94\x9e\xc5\x5e\xd0\x4c\x62\x0e\ \x8b\x17\x55\x00\xda\x81\x18\xf5\x13\x20\x3c\xff\x90\x6a\xcd\x36\ \x15\x37\xab\x94\x2f\x6e\x53\x89\x63\x8d\xb7\x85\xd7\x7e\x51\x01\ \xf0\x79\xcc\xcd\x5d\x1e\xb5\xc7\x7b\xdb\xee\x9f\x3b\xbe\xe4\x88\ \x5d\xb8\xbd\xee\xe2\x94\xca\x33\xe0\x75\xe4\xc6\x75\x57\x62\xd8\ \x10\x39\xea\xe6\x33\x44\xd4\x01\xa7\x06\xe0\xf4\x3a\xad\x39\x22\ \x98\x98\x68\x72\x80\x98\x6b\x50\x53\x9d\x00\x00\x2a\x2d\xb9\x31\ \xe2\x4e\x53\x8c\x10\x0d\x04\xf2\x6d\xfb\x28\xb6\x7c\x45\x00\x9b\ \x3b\xdb\x6a\xfc\x69\x8e\x3c\x6c\x88\x1a\xae\x39\x13\x80\x3a\x8f\ \xb7\x54\x23\x2a\xd7\xc5\x04\x06\x06\x00\x35\x28\x9c\x17\xab\xbc\ \x25\xbb\xca\x13\xc0\x4d\x61\x0e\x15\x2a\x72\x6e\xcc\x7e\x5a\x02\ \x68\x6a\xdd\xad\xf1\x94\x27\x00\x53\xdc\x1c\x71\x6d\x5b\x40\x60\ \x9a\xab\x1c\x75\x9e\xeb\x81\x41\x15\x47\x11\xc0\x6a\x89\x31\x0c\ \xd6\x77\x04\x20\x0c\x64\x26\x62\xb6\x69\x75\x8b\xa8\xaa\x09\x50\ \xb6\xc5\xbc\xd0\x03\xf8\xbe\x29\x63\x87\x29\x60\x0c\x18\x84\x1c\ \x00\x5b\x4d\x45\x00\x74\x03\x53\x98\xad\x94\xc5\x1c\xe7\x46\xe6\ \x1c\x00\xc8\x71\x5d\xa9\xa1\x08\x80\xfd\xfc\x56\x12\x73\x33\x01\ \x08\x35\x18\x42\xe8\xda\x7c\x8e\x29\xa8\x4e\x00\x5b\x00\x03\xc8\ \x98\x67\x36\x04\x00\x32\xe6\x85\xde\xf8\x17\x0b\xfc\x2c\xd8\x8a\ \x00\x18\x67\x3a\x4f\xb4\x54\x14\x23\x98\x02\x00\x02\x0c\x3e\xfb\ \xc5\x53\x28\xf0\x43\xb8\x66\x49\xf7\x6b\xf9\x52\x87\xd7\xbe\x54\ \x01\xc8\x55\x8f\xba\x4e\xad\x4b\x0e\x90\xaf\x85\xde\xb7\xc2\x92\ \x3d\x4f\xa6\xb3\xde\xa3\xb1\x71\xeb\xda\xd0\xf5\x15\x98\xb3\x6e\ \xa9\x00\x6c\x34\xa4\x6b\x18\xff\xe0\x11\x7f\x5a\x17\x53\xd4\x13\ \x0b\x59\x6f\xe4\xee\xbd\xe2\xa5\xc1\xcb\x4b\x7c\x6d\x8c\x75\x87\ \x35\xa8\xfa\xb7\x1c\xdd\x65\xd9\x3c\x8f\x1f\x19\xfe\x9e\xcf\x1e\ \x37\xbd\xc9\xba\x78\x26\x6f\x46\x00\x68\xf2\xff\x81\x99\x94\x9e\ \xe9\x3f\xbf\x19\x01\x42\xd3\xf4\xfc\xbd\x9c\x9e\xa5\x7e\x03\x51\ \x6c\x25\xa1\x92\x95\x0a\x77\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ \x42\x60\x82\ \x00\x00\x05\x3b\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\ \x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\ \x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\ \x79\x71\xc9\x65\x3c\x00\x00\x04\xcd\x49\x44\x41\x54\x58\xc3\xb5\ \x97\x5d\x4c\x5b\x65\x1c\xc6\x77\x6f\xbc\xd9\xe5\x12\x49\x20\x71\ \xd7\x26\xe3\x4e\x13\xb8\x70\xd1\x85\x44\xbd\x50\xe3\x10\x18\xe5\ \x2b\x2e\x26\x4a\x04\x27\x86\xaa\x8b\x99\xb0\xa1\x45\xd9\x32\x0c\ \x73\x2e\xb8\x35\x2c\x30\x01\x81\xd8\x4c\x87\x94\x41\x56\x06\x3d\ \x50\x98\x5d\x47\x5b\x4a\x4b\x2b\xa7\x9f\x50\x0a\xfd\x6e\xe1\xf1\ \xfd\xbf\xdd\x71\xcc\xe0\x46\xe5\x70\x92\x27\xe7\x14\xd2\x3e\xbf\ \xe7\x79\x3f\xfa\xf6\x00\x80\x03\xd9\xe8\x85\xd7\x3f\x7f\x9f\x49\ \xcb\x24\xec\x42\x0d\x4c\x39\x4f\xfa\xbc\x6c\xcd\x9f\x2d\x28\x39\ \x6b\x70\xfa\xe3\xe9\xd5\x30\x10\x8c\x00\xa1\x68\x46\xeb\xb1\x8c\ \xe2\x49\x20\x96\xc8\xdc\x6f\xdc\x9c\xf6\x12\x84\x9c\x00\x47\xde\ \x3b\xad\x36\x07\x36\x32\xa6\x04\x10\x61\x66\xe1\x78\x46\x64\x4c\ \x8a\xc6\x52\x58\x09\x25\x11\x8e\xa6\xd0\xdc\xa1\xb1\x3f\x09\xe2\ \x7f\x01\x48\x89\xc9\x3c\x92\x78\x64\x2c\xc9\xe7\x0f\xc2\xee\x89\ \xc2\xe5\x0e\x22\x12\x4b\xa3\xf4\xe3\x1f\x4c\x34\x74\x59\x01\xb0\ \xeb\xc8\x36\xd5\x90\x9e\x39\xf8\xdc\x99\x17\x5f\x2d\xf1\x7c\xaa\ \x3c\x8d\x8a\xaa\x1a\x54\x32\x7d\x58\x5b\x87\xdb\x63\x13\x8f\x01\ \x58\x6d\x4b\xb8\xbf\xe8\x87\x6b\x15\x58\x76\x07\x10\x08\xc6\xd2\ \xaf\x9d\x3c\x7f\x8f\x41\x1c\x7a\x2a\x00\xbb\x0e\x32\x35\x14\x15\ \x15\x99\x25\xb5\xb6\xb6\x8a\xa4\x6f\x54\xad\xfe\xaf\x55\xdf\xa5\ \x06\x06\x06\x31\x39\x35\x85\xd9\xb9\x39\xe8\x26\x26\x50\x50\x50\ \x80\x21\xcd\xef\x7c\xdc\x49\xa6\xf9\x05\xcc\x98\x5c\x1c\xc0\xe1\ \x4f\xc1\xe3\x0b\xe1\xc7\x5e\x9d\xc8\x00\x6a\x76\x03\x50\x43\x66\ \x78\xca\x15\x8f\xc7\xe1\xf5\x7a\x91\x4e\xa7\x31\x39\x39\x89\xca\ \xca\x6a\xcc\xce\x1a\x79\x03\x0f\xcc\x36\xe8\xa6\x2d\x1c\x80\x64\ \x5a\x70\xe3\xfb\xeb\xa3\x6e\x59\x01\xc2\xe1\x30\x8c\x46\x23\x7f\ \x4e\xa5\x52\xf8\xb9\xb7\x17\x8d\x9f\x7d\x81\x19\x06\xe1\x58\x12\ \xa1\xee\xea\xc6\xa9\x86\x46\x9c\x69\x3a\x87\x5b\x23\xe3\xb8\x70\ \x6d\x58\x5e\x80\x8d\x8d\x0d\xe8\xf5\xfa\xc7\x80\xfa\xfa\xfa\xa1\ \x64\x10\xa1\x8d\x38\xcc\x36\x17\x04\x83\x11\x53\x33\x26\x58\x1d\ \x1e\x5c\xec\x1a\x95\x1f\x60\x8a\xcd\x83\xed\x57\x24\x12\x41\xb9\ \xa2\x02\xe3\x3a\x01\xb4\x4f\xd0\x72\xa5\xa5\x4a\xcf\x97\x6e\xc8\ \x3c\x07\xc8\x4c\xa7\xd3\x61\x78\x78\x18\xbd\xac\xfe\xfe\xfe\x7e\ \x68\xb5\x5a\x54\x55\xd7\x60\xec\x8e\x1e\xd2\x46\x25\x06\xd9\x4a\ \x60\xf3\xa0\xbd\x5b\x66\x00\x1a\x77\xbb\xdd\x8e\xa1\xa1\x21\x74\ \x76\x76\xa2\xa7\xa7\x87\x03\x95\x94\x96\xe1\xa6\x56\x0f\xcf\x1a\ \xe0\x0d\x01\x36\x1f\xe0\x0c\x00\xe7\xbb\x64\x06\xd8\xda\xda\xe2\ \x2b\x21\x10\x08\xc0\xe3\xf1\xc0\xef\xf7\x63\x6d\x6d\x0d\x27\x14\ \x95\xd0\xfc\xa1\xe7\xa9\xc9\x7c\xc1\x0b\x98\xdd\x40\x9b\xdc\x00\ \xdb\x41\x36\x37\x37\xf9\x72\xa4\x56\x14\x15\xd5\xe8\xfb\x4d\xe0\ \xa9\x1d\x81\x47\x00\xe7\x3a\xf7\x01\x80\xcc\x25\x80\x24\x33\x4f\ \x24\x12\x28\x2b\xaf\xe2\x00\x7f\xba\x00\x8b\x27\x03\x40\x6d\xb4\ \xaa\xf7\x61\x08\xfe\x31\x4f\x26\x79\x7a\x1a\x92\x92\x13\x95\x1c\ \x40\x6f\x67\x1b\x12\xfb\x14\xe3\x5f\xc0\xc4\x02\xf0\xd5\x95\x7d\ \x00\x90\xaa\x27\x63\x52\x2c\x16\x43\x99\xa2\x0a\xea\x21\x01\x63\ \x66\xe0\x8e\x15\x98\x73\x66\xee\x5f\x5e\x96\x11\x40\x4a\x4f\xa9\ \x09\x20\x1a\x8d\x72\xf3\x50\x28\x84\xe2\xd2\x0a\x5c\x1d\x14\x70\ \x8b\x6d\x92\x83\x06\x70\x90\x11\x13\xa0\xec\x90\x19\x20\xf5\x70\ \xcc\x49\x64\x4e\xfb\x02\x81\x14\x97\x55\xe1\x42\xb7\x80\xfe\xe9\ \x8c\xb9\xf6\x41\xa6\x81\x4f\xda\x65\x02\xf8\x77\xf5\x94\x9a\xb4\ \xba\xba\xca\x21\xde\x29\xa9\xc0\xa5\x5e\x01\x77\x6d\xe0\xfa\x65\ \x06\xb8\x3d\x0f\x9c\xba\xb8\x47\x00\xa9\x76\x12\x4d\x3a\x4a\x4e\ \xdb\x31\x7d\x07\xac\xac\xac\x60\x7d\x7d\x9d\x43\x1c\x67\x0d\x7c\ \xdb\x25\x40\x3d\x01\xfc\x3a\x97\x69\xe0\xf2\x18\xd0\xb0\x97\x06\ \x24\x63\x69\xad\x93\x39\xa5\xa5\xe4\xb4\x11\x11\x00\xd5\x4f\x7f\ \x7b\xfb\x5d\x05\x5a\xae\x09\x3c\xf9\x95\x71\xe0\xfa\x5d\xe0\xa7\ \xb1\x78\xfa\xcd\x3a\xb5\x99\x4e\x54\x59\x01\xec\x64\x2c\x2d\x37\ \x32\x25\x00\x8f\xdb\xc3\x93\x4b\x30\x6f\x1d\x57\xa0\xbd\x47\x80\ \xf2\xaa\x6d\xfd\x03\xd5\x88\xf3\xd8\x49\x7e\x1c\xd3\xee\xfa\x48\ \x26\x01\x90\xa9\x24\x69\xb6\x53\x42\x32\x0f\x06\x83\x70\xb9\x5c\ \xdc\x90\x0e\x25\xf4\xda\xed\x76\x43\x14\x45\x1c\x3d\xf6\x46\xf2\ \xf9\x97\xca\xc9\xb4\x83\xa9\x98\xe9\x70\x56\xa7\x62\x02\x50\xa9\ \x54\x1c\x40\x9a\xe1\xd2\x78\x93\x21\x25\xa5\x7d\x9f\x52\x3b\x1c\ \x0e\x2e\x32\x5e\x5c\x5c\xe4\x50\x2f\x1f\x7d\xc5\x45\xe7\xc8\xdd\ \x1e\x74\x77\x04\x68\x69\x69\x11\xa5\x8d\x85\x52\x4b\x5f\x36\xf4\ \xbc\xbc\xbc\x0c\x9f\xcf\x07\x9b\xcd\xc6\xef\x4b\x4b\x4b\x1c\x82\ \x00\xe8\xb9\xb0\xb0\xd0\x2c\x0b\x00\x4d\x2a\x29\x35\x19\xd1\x33\ \xdd\xe9\x1b\x8f\x6a\xa7\x06\xac\x56\x2b\xaf\x9e\x5a\xa1\xff\x19\ \x0c\x86\x44\x5e\x5e\xde\x3d\xf6\x19\x87\xf6\x04\xd0\xd4\xd4\x24\ \x4a\xe3\x4c\xe9\x49\x64\x42\x55\xd3\xb3\xc5\x62\xe6\x0d\xd0\xeb\ \xd1\xd1\xd1\x48\x5b\x5b\x9b\x37\x3f\x3f\xdf\xc4\xde\x3b\xc8\x54\ \x96\xcd\x6f\x8d\x1d\x01\x9a\x9b\x9b\xf9\x10\x50\x3a\x82\xa0\xfa\ \xa9\xfa\xf9\xf9\x79\x5e\xb7\x46\xa3\x89\xd4\xd5\xd5\x39\x1f\xa6\ \xed\x62\xfa\x88\xe9\x70\xb6\xbf\x33\xff\x13\x40\xa9\x54\x8a\x4e\ \xa7\x93\x57\x4c\x49\xe9\xe4\x43\x49\xeb\xeb\xeb\x9d\x39\x39\x39\ \x92\x69\x71\x36\x55\x67\x05\xd0\xd8\xd8\x28\x5a\x2c\x16\x0c\x0c\ \x0c\x44\x6a\x6b\x6b\x9d\xb9\xb9\xb9\xb2\x9a\xee\xea\x97\x11\x93\ \xb0\x5f\xa6\xdb\xf5\x37\x43\x39\x56\x18\x68\x8b\xcd\x60\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x05\x2b\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\ \x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\ \x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\ \x79\x71\xc9\x65\x3c\x00\x00\x04\xbd\x49\x44\x41\x54\x58\xc3\xed\ \x57\x6b\x4c\x93\x57\x18\x3e\x23\x71\xc9\x32\xe9\x16\x97\xa8\x54\ \x65\x38\x9d\x02\x15\xf6\x03\x87\x32\x93\x01\x66\x2c\x5b\x70\xc4\ \x30\xff\x60\xa2\x2e\x1a\x3a\x1d\x4e\x03\xba\x31\x89\x5b\xb3\x80\ \xd9\x0c\x84\x02\x19\x58\x1c\x14\x8b\x85\xb2\x82\x95\x5e\xe4\x66\ \x0b\x8e\x31\xf8\xc3\x46\xcb\x2d\x81\x15\xdc\xa8\xc2\x1c\x1b\xb7\ \x6a\x69\x91\xf2\xee\xbc\x87\xaf\x0c\xdc\xb8\x0d\x61\xd9\xb2\x93\ \x3c\xed\x97\xf3\x7d\xfd\xde\xe7\xbc\xef\xf3\x5e\x4a\x00\x80\xfc\ \x93\x20\xff\x0a\x02\x74\x09\x28\x44\x14\xd9\x14\x71\x14\x01\x2b\ \x46\x80\xae\xdd\x64\xdd\xc6\x66\x22\x4c\xf8\x95\xc4\x8b\x47\xc8\ \xa1\xd3\xf7\xc8\x8e\x97\x3b\x38\x32\x61\x2b\x41\x20\x85\x9c\xbe\ \x30\x48\x2e\xdd\x80\x19\x40\x32\xab\x79\x4d\xf4\xbe\xfb\x72\x13\ \x68\x64\x06\x91\x04\x5e\xa3\x51\xf4\x06\xee\x85\x47\xf5\xd0\xbd\ \x83\xcb\x4d\x20\x9b\x9d\xf6\x40\x74\x2f\xbd\x16\x32\x3d\x20\x89\ \x3f\x48\xa5\x2c\x1b\x01\x8c\x31\x79\xc1\xbb\x9d\x88\x4b\xc6\xd7\ \xc6\x26\x0e\xa0\x10\xb9\xfd\x42\xfe\xc5\x2b\x36\x46\x8c\x12\x5c\ \x4e\x02\x93\xa7\xa7\xa7\x0d\xcc\xd3\x39\xb9\x98\x63\x36\x14\x0a\ \xd2\xe4\xa3\x2b\x41\x20\x8c\x29\x9e\x2a\xdf\x37\x47\xeb\xdc\x7b\ \xb5\xcc\x89\x9e\x40\x44\x96\x54\x83\x2b\x2c\x0b\x36\x46\x48\x08\ \x13\xf5\x64\x2a\x7b\x2e\x54\x03\x01\xf8\x03\x37\xbf\xc0\x0e\x34\ \x2a\x54\xdf\x62\x88\x52\xd5\x2c\x58\x03\x74\x1d\x16\x08\x04\x7a\ \x45\x55\xf5\xc8\xa0\x6d\x74\xc2\xd4\x73\xf7\x21\xbe\x73\x51\x95\ \x90\xae\x8f\xd0\x13\xcf\xe5\x94\x83\x87\xb4\x02\x9e\xcc\x2e\x03\ \xd4\x06\xdd\xaf\x99\xcb\xb0\xaf\xaf\xaf\x3e\xbf\xd2\x60\xb5\xdb\ \xed\x80\xf8\x79\xe4\x3e\xc4\x5e\xab\xb4\xb9\x88\x2f\x86\x80\x27\ \xd3\xc0\x67\xf9\x8e\x19\xf5\x60\xd7\x5e\x33\xba\x76\xda\x73\xee\ \x68\xd8\xc7\xc7\x47\x9f\xab\xab\xb0\x0e\x0f\x0d\xc1\x10\x87\xb2\ \xf6\x2e\xe7\x96\x37\xf7\x77\x73\x61\xd8\xbd\xe8\x5e\x80\x2f\x66\ \x9a\xa0\x86\xdf\xa9\x36\x42\xf7\xf0\x03\xd8\x19\x9f\xd4\xcf\xa5\ \xe7\x1a\x8a\x98\x2d\x7e\xfe\x6d\x97\x54\x1a\x6b\x5f\x5f\x1f\xb8\ \xd0\xd1\x73\x07\x62\x72\x15\x56\x4e\xc4\x87\x97\xd4\x8c\x30\x14\ \xe9\x15\xb7\x1e\x38\x1c\x0e\x40\xa4\xd6\x19\x31\x9e\x85\x9b\x05\ \x7e\x6d\xa9\x25\x1a\x5b\x97\xd9\x0c\xe6\x2e\x0a\xf3\x24\x14\xdf\ \x36\x8e\x7b\xbd\x1e\xd1\xcd\x42\xc8\x09\x6f\xa9\x04\x3c\xd1\xbd\ \x56\xab\x15\x10\x77\x7f\x1b\x84\xf3\x92\x5c\xbb\x52\xa9\x84\xfa\ \xfa\x7a\x30\x99\x4c\x0c\x75\xdf\x35\xc1\x51\xb1\x64\x18\xc9\x51\ \x44\x3e\xb6\x76\xcc\xb4\x40\x4f\x93\x5f\x7e\xd3\xd6\xdf\xdf\x0f\ \x32\x99\x0c\x44\x22\x11\xa8\x54\x2a\x90\x4a\xa5\xa0\xd1\x68\x20\ \x4b\x5b\x39\xbe\xe9\x95\xe0\x1f\xb8\x53\xaf\x79\x2c\xf3\x00\x97\ \x8e\x22\x9e\xc7\x86\xe6\x53\x29\x19\xf6\x82\x82\x02\xe6\xe2\xa0\ \xa0\x20\xe0\xf1\x78\x60\xb1\x58\x40\x5b\x5e\x01\xfb\xcf\x26\x0c\ \x2d\xa6\x53\xce\x67\x94\xcf\x09\x4c\x83\xe2\x5b\x7b\xe6\xc2\x60\ \x9a\xb2\x14\x14\x0a\x05\x88\xc5\x62\xc8\xcc\xcc\x84\xa2\xa2\x22\ \x50\xab\xd5\xd0\xd9\xd9\xc9\x60\xec\xfe\xc9\xb9\xc9\xdb\xa7\x75\ \x2e\xb7\xcf\x4b\x80\xae\xb7\xd8\x29\x70\x0e\xc0\x6a\x97\xac\x78\ \x88\xca\x7f\x82\xe2\x29\x89\x0e\x3e\x97\x2b\x21\x5b\x96\x0f\x07\ \x63\xe3\x47\x84\x1f\x26\xd8\x92\x72\x64\x8e\x6f\x1a\xbf\x07\xa3\ \xd1\x08\x2d\xad\x2d\xf0\xcb\xc0\x20\x1c\x38\xf1\xbe\x05\xb3\x62\ \xc1\x04\x5c\x69\x84\x85\x85\x84\x46\xdc\x26\xe7\x32\xac\x2c\xcf\ \x33\xb5\x13\xec\x3b\xe3\xba\xd3\x33\xaf\x82\xe5\xfe\x7a\x89\x06\ \x9e\xde\xfc\x62\x1b\xf7\x3c\x92\x8d\x7b\x66\xab\x4f\x5b\xca\x35\ \xed\x58\x43\x43\x3d\x34\x34\x34\x80\xa5\xb7\x17\x32\x14\xc5\xc3\ \xf3\xe9\xc0\x65\x3c\x92\xe5\x28\x9e\x36\x5d\xe5\x9c\x2a\x32\x78\ \x7d\xf4\x83\x2e\x5a\x6c\x12\x31\x0c\x1b\x25\xea\x71\xf7\x2f\xcb\ \x27\xef\x05\x87\x5f\xfe\xd3\xe4\x44\x0b\x4c\x68\xf4\xc9\x3e\x75\ \x95\x1e\x0c\x06\x03\xb4\xb7\xb7\xc3\xd7\xc6\x96\x31\xae\x81\x09\ \x66\xf1\x36\x6d\x38\x68\x3c\x49\x3a\x3a\x65\xf8\x62\x81\x83\x44\ \xbd\x57\x43\xb6\x0a\x5e\x9b\x2a\xc3\x94\x5c\xb0\x42\x0f\xab\x24\ \xb4\x04\x9f\x4a\xaa\x9b\x43\x37\x31\x28\xd4\x4f\xf2\x0a\xc7\x74\ \x3a\x1d\xd4\xd6\xd6\x82\xc9\x7c\xdb\xb9\x61\x9b\xf7\x5f\xea\x62\ \xb2\xe5\x7e\x9c\x75\x1f\x0d\xf3\xb2\xd4\x4e\xf2\xf6\xb1\xeb\x2e\ \xb6\xae\x94\xc3\x90\x6c\x97\x55\xc1\x4b\x57\xab\x80\x9c\x4d\x6e\ \x5a\xd0\x1c\x49\xbd\xb1\xe7\x88\xb0\xef\xca\x57\xc5\x50\x5a\x5a\ \x0a\x1d\x3f\xf6\x4c\x04\x06\x87\x74\x3c\xaa\x0b\xc2\x84\x46\x8d\ \x07\xc8\x6f\x02\xd9\xf9\xaa\x7e\x9a\xf1\x30\x46\x8e\x36\x20\xaf\ \xbc\x4a\x78\x43\x69\x00\x92\x28\x1d\x98\xcd\x95\xb3\x79\xc3\x7d\ \x3d\xbf\xf9\x44\x6a\xa6\x5d\x2e\x97\x43\x53\x4b\x2b\x44\x1c\x7b\ \xf7\xce\xf4\x14\x25\xae\xf1\x8a\xf5\x77\x9c\xf5\x70\x02\xc2\xd9\ \x0f\x89\xd1\x81\x03\x4f\x8e\xf7\xdc\xd2\x69\xe7\xf3\xdf\x75\xfc\ \x6f\x14\x2e\x36\xd2\xef\xd8\x17\x69\x49\xbe\x2c\x9d\xc8\xd3\x96\ \x3b\xa7\x0f\x31\x8c\x25\xc6\xdf\x9f\xba\x77\x5f\x71\x35\xa0\x41\ \x6c\xb5\x08\x8c\xf9\x94\xf1\xe0\xf0\x33\x4b\x9a\x7c\x68\x13\x5a\ \xbd\xce\xa3\xd9\x6b\x4f\x48\xf7\x0c\x0f\xb0\x0f\xfe\xf3\x87\xc8\ \xf9\x2f\xee\xb9\x49\x6e\x00\xf6\x7b\x3e\xed\xf7\x08\x1e\x2a\x3e\ \x5d\xe5\x58\xaa\xf1\x47\x5a\xf5\xb6\x59\x0b\x11\x1d\xb3\x43\xc9\ \x91\x38\x09\x39\xf9\xa9\x96\x21\xfa\x5c\x1a\x0d\xcf\xb3\xff\xff\ \x37\xfc\x4f\x13\xf8\x1d\xe7\x87\x19\xb9\x44\xc3\x01\xcf\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x04\xa3\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\ \x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\ \x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\ \x79\x71\xc9\x65\x3c\x00\x00\x04\x35\x49\x44\x41\x54\x58\xc3\xe5\ \x97\xcd\x8f\x54\x45\x14\xc5\x7f\xb7\xea\xd6\x7b\xaf\xdb\x6e\xc7\ \xf9\x40\x9d\x89\x46\x4d\x34\x99\x44\x8d\x1a\x48\x98\xc4\x8c\x1f\ \x1b\xfe\x02\x4c\x5c\xf1\x07\x18\x16\x2e\x4d\x5c\x6b\x58\xc3\x8e\ \xc4\x8d\x1b\x17\xce\x82\x68\x74\x41\x5c\x18\x0d\xe2\xc4\xc6\x00\ \x3d\x60\x50\x51\x19\x60\x02\xa2\x0e\x0c\x83\xd3\xfd\x5e\xf7\x94\ \x8b\xaa\xee\xf9\x60\xe6\x0d\x84\x51\x16\x56\x52\xa9\xce\x7b\xb7\ \xeb\x9e\x3a\xf7\xd4\xa9\x7a\xea\xbd\xe7\x7e\x36\xe5\x3e\xb7\x3e\ \x80\x5d\xbb\x76\xbd\x03\xec\xfd\x8f\xf2\x4e\x35\x1a\x8d\x03\xeb\ \x19\xd8\xbb\xef\xbd\xa3\x3b\x1f\x1f\x76\x00\x9c\x3c\x3a\xcf\xcc\ \x97\x37\x58\x9c\xef\xdc\x53\xa6\xda\xa0\xf2\xdc\x6b\x03\xbc\xb8\ \x67\x10\x80\x8b\x7f\x16\x7c\xf8\xee\x1e\x80\xdb\x00\x70\xfc\xec\ \x1c\xdf\x3f\x30\x04\x78\x2e\xfd\xb8\xc0\xfe\xb7\xce\x6f\xcb\x72\ \x0f\x1d\x79\x9a\x0b\x23\x96\xd3\x9f\x1f\x64\xfc\xd5\x7d\x9b\x6b\ \x40\x45\xb0\x16\x40\x78\x70\x2c\x23\xcb\xb2\x6d\x01\x30\x30\x96\ \x61\x8d\x50\x1b\x7c\x14\x23\x25\x22\x14\x2b\xd8\x18\x91\xd5\x95\ \x73\xe7\xce\x83\x2a\xb8\x04\xd2\x14\xb2\x0c\xd2\x2c\x8c\x49\x0a\ \x49\x12\xde\x77\x3a\x90\xe7\x90\xb7\xa1\xd5\x82\x76\x2b\x8e\x6d\ \x28\x72\xb2\xfa\x38\xd6\x0a\xe3\xaf\xbc\x49\x6b\xf1\xfa\xe6\x00\ \xac\x15\xac\x15\x04\xb0\x46\xd8\xbd\x7b\xe7\x16\x6b\xeb\x86\xae\ \x80\x5a\xa8\x56\x81\xea\x6d\x51\x8d\xaf\x04\xb5\x82\xf7\xa0\xa6\ \x84\x01\x67\x05\x35\x82\x08\xa8\x0a\x95\x2c\xc3\x23\x20\x1e\x08\ \xc0\xf0\x1e\x2f\x02\xde\x23\x12\x26\x15\x7c\x88\x23\xc4\x21\x1e\ \x3c\x21\x5e\x40\x4d\x58\x18\x40\xd7\x4a\x89\x06\xac\xa0\xda\x63\ \x00\x9a\x33\xbf\x05\x8a\x53\x07\x69\x02\x95\x04\xb2\x34\xf6\x04\ \x12\x07\x4e\xa1\xe8\x40\x5e\x40\x2b\x8f\xbd\x05\x4b\x39\xb4\x73\ \xc8\x0b\x54\x87\x71\x3d\x00\x2a\xe5\x25\x70\x31\x40\xd5\x30\x39\ \xf9\xd2\xd6\x0a\xf3\x3e\xd0\xaf\x16\xaa\x1b\x8b\xf6\xd8\x27\x61\ \x61\xbd\x1c\x25\x25\x20\x00\xf0\x81\x8d\x34\x4d\xa3\x3a\xc3\xb3\ \x98\x11\x89\x6c\x07\xda\x63\x09\x56\x98\x5f\x29\x46\xfc\x61\xcd\ \x72\x7f\x61\x1d\x2d\xd1\x80\x3a\x09\x54\x49\x18\x4f\x34\x2f\xe0\ \x9d\x85\xc4\x21\x89\xc3\x67\x09\x92\x69\xd8\x11\x89\xe2\x13\x87\ \x58\x8b\xef\x76\x91\xbc\x80\xbc\x03\xed\x02\xdf\x6a\x23\xed\x02\ \xf2\x02\x9f\x77\x50\x1d\x45\xd5\x20\x78\x3a\xeb\x54\x78\x9b\x06\ \x9c\x33\x78\x0f\x03\x8f\x24\xbc\xfe\xf2\xf3\x77\x68\xe8\x36\x68\ \xa4\xbe\xf1\xeb\xc6\xfc\xdf\xb1\x04\x52\x5e\x82\x44\x4d\x5f\x84\ \x8f\x0d\xa5\x38\xe7\xb6\xc5\x88\x9e\x18\x4b\xb9\x76\xb3\x03\x08\ \x9d\x52\x11\xaa\x90\xb8\x50\xef\x5a\xc5\x30\x7d\xb1\xcb\x40\xc5\ \xb0\x0e\xf4\x26\xad\x57\xf9\x55\x2e\xe1\xe1\xc6\xd2\x32\xf5\xcc\ \x70\x7d\xc9\x84\x2d\xe9\x4a\x19\x10\x9c\x1a\xc0\x73\xe5\x66\x97\ \x2b\x37\xbb\xac\x51\x57\x3f\xd7\xaa\x64\x7e\xc5\x27\xa2\x29\xac\ \x05\x15\xc3\x9c\x0b\xb5\x77\xa6\x6c\x17\xa8\xc1\xa9\x20\xc8\x1a\ \x35\xaf\x9b\x35\x1a\x8f\x59\x31\x9e\xfe\x7b\xe9\xef\x14\x00\xf1\ \x82\xef\x9b\x58\x30\x2b\x57\x56\x02\x55\x21\xd1\x90\xfc\xe7\x53\ \xdf\xf2\xeb\x99\x13\x2c\x2d\xde\xb8\xa7\xfa\x57\x6a\x03\x3c\xf5\ \xec\x4e\x9e\x79\x61\x02\x0f\xa8\x33\x5b\x31\x10\x03\x7c\x87\xf7\ \xf7\xbf\xc1\xc2\xc2\x02\xb7\x6e\xdd\xa2\x28\x0a\x44\x04\x6b\x2d\ \xd6\x5a\x54\x15\x55\xc5\x39\x87\xaa\x62\xad\xc5\x98\xf0\xdf\xe5\ \xe5\x65\xf2\x3c\xef\xf7\x23\xcd\xf9\xb8\xf2\x2d\x18\x70\x56\x50\ \x17\x18\xdc\x31\x3a\xb6\x72\x4f\x38\x7e\x9c\xe9\xe9\x69\x8c\x31\ \x78\xef\x99\x98\x98\x60\x72\x72\xf2\x8e\x59\xd8\x31\x3a\xd6\xdf\ \x86\xae\xd4\x09\x55\x70\x36\xac\xa2\x56\xaf\xf7\x6b\x39\x33\x33\ \xc3\xd0\xd0\x10\xd6\x5a\xbc\xf7\x34\x9b\xcd\xbb\x02\x50\xab\xd7\ \x70\xd1\x88\xb4\xd4\x88\x14\x9c\x0b\x27\x5c\xa0\x2a\x00\xa8\x56\ \xab\x64\x59\xd6\xa7\xb8\x37\xde\x69\x73\x1a\xa9\x17\x41\x4b\xad\ \x38\x1e\xc7\xbd\x23\xb4\xd7\x8c\x31\x88\x44\xdf\x8f\x3a\xb8\xab\ \x9b\xaf\x35\xa8\x0d\xf3\xf6\x18\x2e\x3d\x8e\x83\x29\x6d\xe3\xd5\ \xdb\x12\xa9\xf7\xe5\x56\x6c\xad\xf4\x91\x0e\x8e\x0c\xc3\xf2\xef\ \xdb\x02\xe0\xa1\x91\x61\xd4\xc2\xb5\x2b\x97\x59\x9c\xbf\xbe\x05\ \x03\x36\xf8\xc0\x60\xad\x02\x0b\xdb\xc3\xc0\x50\xad\xc2\xec\xc5\ \x4b\x9c\xfd\xee\x1b\xce\x9f\x9c\x9e\x03\xa6\x36\x04\x60\x24\x5e\ \x4a\x05\x12\x0b\xed\x91\x27\xa9\x3d\x0c\x6f\x1f\x38\xc8\x66\xc7\ \x81\x27\x3a\xf1\x2a\xe7\x35\x1e\x32\x81\x14\x28\xba\x70\xf9\xea\ \x55\xce\x34\x8e\xd1\xfc\xfa\x8b\xb9\xd9\x1f\x4e\x1d\x02\x0e\x6f\ \x08\xe0\xb3\x8f\x3e\xe0\xa7\xd3\x27\x57\x99\xe9\xda\xa3\x86\x55\ \xe6\xbb\x1e\x04\x1b\x3c\x5f\x1d\x6f\x7c\x77\xee\x8f\xd9\x5f\x0e\ \x01\x87\x1b\x8d\xc6\x5f\x1b\x01\x98\x9a\xfe\xf4\xe3\x7f\xf5\x73\ \x6c\x7d\xf2\x35\x00\xe2\xb7\xda\x81\xff\xdd\xd7\xf1\x3f\x4d\xf0\ \x4b\xb9\xe8\x46\x89\xaf\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\ \x60\x82\ \x00\x00\x06\xd1\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\ \x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\ \x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\ \x79\x71\xc9\x65\x3c\x00\x00\x06\x63\x49\x44\x41\x54\x58\xc3\xad\ \x57\xdb\x4f\x14\x57\x1c\xfe\xe6\xb2\x17\x2e\xbb\xdc\x0a\x94\xe5\ \x22\x52\x56\x2b\x1a\x44\xad\x5a\xb5\xc1\xb6\x91\xd8\x17\x7d\xa9\ \x6d\x93\xda\xb4\x69\x52\x11\xd2\xf4\xa9\x4d\xd3\x3f\xa0\x89\x0f\ \xa4\x3c\xd9\x3e\x10\x7d\xb1\x4d\x7c\x21\xa9\x45\x34\xa9\x56\x84\ \x22\x62\x62\xd5\x62\x51\x0a\x45\x50\x50\x90\x95\x85\xd9\x1d\xf6\ \x3a\x3b\x33\xfd\x9d\xd9\x5d\x2e\x71\x00\x01\xcf\xe6\x97\x33\x7b\ \xce\x9c\xf3\x7d\xbf\xeb\x39\x23\xea\xba\x0e\xb3\xc6\x71\xdc\x01\ \xea\xb6\x62\xf9\xd6\x43\x7b\xfc\x81\x55\x36\xd1\x04\xf8\x10\x89\ \x8b\x1e\x6b\x34\x4d\x7b\x7f\xb9\x0d\x78\x9e\xbf\x41\xef\x7f\x40\ \x8f\xcd\x44\xe4\xf2\x9a\x08\xd0\x46\xdf\xd5\x14\x70\xc7\x73\x6c\ \x9c\xed\xbe\x8f\x4b\xbf\x73\xe7\x0e\x32\xb3\x32\xc1\xd1\x8f\x35\ \x9d\x7e\x9c\x6e\xbc\x48\x7f\x74\x4c\x4b\x12\x0e\x14\x70\xdb\xf6\ \xe6\x72\x9b\x9b\xfe\xd3\xf6\xd0\xfa\xf7\x88\xc4\xd8\x5a\x2c\x70\ \xe4\xe3\x32\xa1\x54\xd3\x00\x55\xd7\xd0\xd7\xd7\x07\x87\xc3\xb1\ \xe8\x62\x59\x96\xc1\xc8\x96\xa5\xf3\xb6\x7c\xbb\x6e\x7b\x1a\xd2\ \x0b\x68\x78\x6c\x2d\x16\x30\x14\xd4\x48\x41\xe2\x80\xb6\xab\x57\ \x11\x0a\x85\xa0\xc6\x62\x60\xb1\x12\x0e\x87\x11\x89\x44\x10\x8d\ \x46\x11\xa3\xb1\xec\xec\x6c\xa4\x18\x7e\x60\x8b\x5f\x52\x0c\x18\ \x04\x48\xf2\xed\x1c\x6e\x77\x5e\x42\x38\x38\x43\xfb\x6b\xc6\x38\ \xc3\x70\x0a\x1a\x04\x9e\x83\x85\xdc\x20\x3e\xd3\x51\xec\x14\x8d\ \x15\x9a\x6e\x4c\x6f\xe2\x38\x83\xc9\x46\x92\x74\x12\x66\x11\x57\ \x62\xeb\xfe\x44\x9c\x8c\x98\x12\x68\x6a\x6a\xe2\x0c\x0c\xd2\x46\ \x53\x81\xf2\xf2\xf5\xb0\x04\x05\x02\x4e\x35\xb4\x67\xc9\xc2\xf6\ \xce\x75\xd8\x61\x13\x05\xd8\x2c\x3c\xfa\xc6\x7c\xb8\xfa\x60\x0c\ \xcd\x43\x21\x84\x6c\xce\x92\x9a\x9a\x5d\xdf\x53\x50\x46\xaa\xaa\ \xaa\x52\x32\x32\x32\xc4\xc2\xc2\x42\x2b\x13\x97\xcb\x65\xe9\xe8\ \xe8\x90\xeb\xeb\xeb\x6d\x84\x71\x62\x49\x0b\x20\x11\x63\x19\x69\ \x36\x6c\xca\x4c\x43\x58\x51\x11\x8d\x31\x0d\x75\xf0\x34\x61\x15\ \x79\xd8\x2d\x02\x7c\xa1\x28\x46\xb8\x1c\xfc\x7c\xf9\x57\xb8\x8a\ \x8a\xd9\x32\xb6\xf9\xba\xf9\xfb\xa8\xaa\x0a\x45\x51\x90\x99\x99\ \x49\x0a\x95\xdb\xef\xde\xbd\x5b\xc7\x2c\x44\x0a\x9d\x58\x94\x00\ \x2f\xc4\x4d\x2d\x08\x1c\x34\x12\x81\x02\x42\xa4\x11\x4d\x27\xe3\ \x70\xf1\x71\x26\xfd\x13\x32\xde\x3e\x78\x18\x05\x85\x45\x58\xac\ \x96\x30\x70\xbf\xdf\x8f\xd4\xd4\x54\x23\x98\x1b\x1b\x1b\x4b\x28\ \x7e\x8e\xcf\x27\x31\x4b\xa0\xb6\xb6\x56\xaf\xab\xab\xd3\x05\x9a\ \x24\xeb\xc2\x2a\xf0\x10\xc9\xd4\x3c\x81\xc7\x04\xc3\xc7\x06\x01\ \x91\xe7\xc9\x05\x34\xc7\x27\x52\x73\x11\xf0\x24\x01\x9f\xcf\x87\ \xac\xac\x2c\x83\x80\xdd\x6e\x47\x43\x43\x03\xb3\x12\x23\x71\x8f\ \xd6\xb6\x3c\x67\x01\x41\x88\x47\x35\xf3\x31\x67\x13\x88\x04\x8f\ \x99\x48\x0c\x52\x20\x82\x50\x34\x06\x45\xa5\xa8\xb0\x0a\xf0\xd3\ \x58\x88\xb2\x62\xa9\xc6\x5c\x10\x0c\x06\x8d\xac\x99\xdb\x5f\xc0\ \xd1\xa3\x47\xf3\x4e\x9f\x3e\x7d\x88\xfe\x2e\x24\x40\x8c\x74\x52\ \xdc\x70\x81\xd5\xc2\x21\x48\xda\x0d\x4d\xca\x78\xa6\x3b\x50\xb4\ \x71\x27\x8a\xf2\x8a\x61\x49\xcd\x46\x24\x20\xc1\x35\xd0\x8f\x07\ \x8f\xc6\xd1\xde\xde\x8e\x2d\x5b\xb6\x20\x27\x27\x07\x14\x68\xe8\ \xed\xed\x35\xfe\xef\xdf\xbf\x7f\x76\xdf\x8b\x17\x2f\x42\xa2\xa2\ \xc5\xc8\xb8\xdd\x6e\x46\x88\x25\x1a\xcf\xd2\xde\xd4\x02\x2c\xad\ \x1f\x4e\x87\xf0\x30\x66\xc7\x9b\x87\xbf\x44\xc5\x6b\xef\xe0\xa9\ \x2c\x62\xdc\x1b\x86\x4f\x8e\x22\x82\x18\x04\xf7\xbb\x70\xa7\xa9\ \x08\x6b\x32\xce\xfd\xd6\x8a\xed\xdb\x2a\xf1\x68\x74\x02\xd5\x07\ \x6b\xf1\xe7\xef\x4d\xa8\xae\xae\x36\xb4\x65\xfe\x4f\x4b\x4b\x33\ \x8a\x96\xd3\xe9\x24\x8b\x8a\x46\x2d\x49\x56\x8e\xe7\x83\x90\x7c\ \x3b\x16\xd0\x21\xb9\xdc\x38\xf2\x55\x03\x64\x31\x17\x92\x1c\x01\ \x27\x28\xd0\x69\x2e\x48\xe4\x9f\x4d\x45\x20\xcf\x44\xa1\xd2\x73\ \x6f\xba\x15\xdb\xdd\x6f\xa1\xb7\x7f\x00\xe3\xa1\x0a\x4c\xdd\x78\ \x0a\xd1\x48\x5b\xdd\x20\xc0\x40\x4b\x4b\x4b\x8d\xa2\xc5\x82\x8f\ \xc5\xc2\xe8\xe8\x68\xb2\xdc\x98\x1c\x46\xa4\xfe\x23\x59\xc7\x47\ \x5f\x7c\x0b\x87\xab\x18\x23\xad\x67\xe0\x19\xfc\x07\xa2\x2d\x05\ \x1b\x4a\xab\xb0\xae\xbc\x12\xff\xa6\x3a\x30\x34\x42\xae\x99\x0a\ \x63\xd4\x13\xc4\x18\x89\xc8\xe5\xc3\x33\xe9\xc3\xdb\x3b\xed\xc8\ \x9b\x47\x80\x69\xcf\xb4\x66\x2e\x32\x00\xe9\xd9\xeb\xf5\x1a\x04\ \x98\xcb\x4d\x09\x94\x66\x72\xb8\x72\xa6\x91\x8a\x10\x8f\x14\xdf\ \x30\xf2\x9d\x76\xc4\xe8\x80\x78\xd2\xd7\x86\x49\x2d\x1d\x9b\x6b\ \x3e\x43\x56\xc5\x3e\xdc\xee\x93\x30\xae\x06\x20\xfb\x15\x04\x66\ \x48\x02\x4a\xdc\xb0\xfa\x5c\x76\x58\x2c\x16\x03\x34\x51\xea\x67\ \xc7\x12\x45\xd7\xcc\x05\xa0\xf2\xca\x51\xf1\x19\x82\x35\xcd\x89\ \x9c\xb2\xec\xd9\xb9\xf5\x79\xe9\x90\x82\x0a\x06\xdb\x7f\x82\xc7\ \xd1\x8d\xf2\xca\xe3\x90\x03\x56\x28\xe4\x8a\x18\x15\x2b\x85\xc4\ \x42\x59\xa3\x47\x74\xd3\xf4\x4c\x8e\xcd\x9f\x33\x25\xc0\x5a\x9e\ \xd3\x42\x11\x6f\x7b\x6e\x93\xec\x34\x0b\x5e\x71\x58\x71\xe5\xef\ \x2e\xec\xd9\xf1\x39\x52\xed\x22\x42\x29\x22\x11\xd0\x0d\x12\x02\ \x11\xa0\x28\x8d\x83\x24\x70\xf4\xc4\x41\x95\x3c\xaf\x34\x6d\x09\ \x02\x1c\x3f\x57\x8e\x93\xcf\xf3\xdb\xcd\xa1\x29\x0c\x4f\x2b\xf8\ \xf0\x9b\x1f\x31\xe0\xcf\xa2\xd8\x90\x61\xa7\x7a\xa1\x28\x64\x05\ \x45\x8c\x5b\x80\xc0\x35\x4d\x5b\x78\xc2\xcd\x75\x0b\x9e\x4c\x08\ \xc4\xaf\x1f\x46\x6f\x42\x60\xd0\x23\xe3\xeb\x5f\x7a\x30\xf8\x24\ \x00\x81\x82\xb0\x24\x37\x85\x8e\x69\x4a\xc7\x70\xcc\xe8\x77\xbc\ \x9e\x85\xee\x4b\xfa\x92\x15\xf2\x85\x5c\xc0\xc0\x39\x93\x33\xde\ \xfd\xaa\x03\x3f\x7c\xb2\x15\xbd\x8f\xfd\xb0\x17\x6e\xc6\x8e\xdd\ \xfb\x16\xcc\x77\xd3\x61\x5b\x51\x51\xb1\x36\x02\x3a\xe2\x75\x9f\ \xe3\x30\x1b\xd5\xc9\x7e\xb7\x3b\x1b\xbb\xca\x9c\x68\xbe\x16\x44\ \xc6\xae\xed\xf8\xf4\xd8\xb1\x65\x41\x56\x44\xc0\xf0\x7d\xb2\x4f\ \x5a\x80\xdd\x90\x94\x30\xa2\xb2\x17\xca\xcc\x14\x62\x41\x09\x51\ \xbf\x0a\x9d\x2e\x0e\x4b\x01\xad\x8e\xc0\xec\x29\xa7\x42\x0d\xfb\ \x11\x9d\x21\x50\x02\x66\x04\x16\xc4\x09\xb7\xfc\x69\xb8\xca\x6b\ \x79\xbc\x0f\x8e\xdd\x37\x1d\x37\xe6\x14\xa8\x03\x5e\x2d\x5c\xa9\ \x70\x76\x76\x7e\xbd\x64\x0b\x98\x2f\x62\xa0\x7f\x3d\x51\xa5\xeb\ \x23\x9a\x74\xf9\x81\x3a\xca\x2e\xc5\x95\x82\x75\x2f\x6d\x96\xb3\ \x52\x02\x74\x30\xa9\xac\x5b\xd2\x02\xac\x4d\x06\xf5\xe8\xcd\xc7\ \x9a\x74\x61\x40\xf5\xde\xf7\x68\x13\x34\xd4\x4e\xd2\x41\xa0\xed\ \xe4\x82\x5a\xaa\xf5\x6f\xac\xc4\x05\x81\x40\x40\xed\xec\xec\x94\ \x4e\x9d\x3a\xe5\x61\x17\x54\x33\x02\xb7\xfa\x27\xb5\x0d\xc3\xd3\ \x7a\xe8\x42\xbf\xea\xbd\xe7\xd1\x06\x13\xa0\xad\x04\x34\xb0\xd2\ \x68\x9f\xfd\x76\xeb\xe9\x91\x5b\x5a\x5a\xbc\x24\xd2\xf4\xf4\xf4\ \xb9\xc4\xed\xf8\x9e\x19\x81\xe6\xfa\x96\x68\xf2\x0a\xbd\x28\xe8\ \x7c\xf0\xc5\x08\x4c\x4c\x4c\x44\xe9\x22\xe2\x3d\x7b\xf6\xec\xe4\ \xf0\xf0\x70\x37\xdb\x8f\x29\x43\xef\xcb\x8b\x06\x61\xe2\xdb\xee\ \x85\xbf\xef\xd8\xa7\x1a\x23\xc0\x25\x6a\x04\x33\xf1\xb5\xae\x2e\ \xa9\xb9\xb9\xd9\x4b\x37\xa5\xfe\x04\x68\xeb\x52\x9f\x6b\xe2\x1a\ \x32\x68\x46\xf6\xcb\x31\xf6\xd0\xd5\x75\x5d\x6a\x6b\x6b\x93\xce\ \x9f\x3f\x9f\x34\x31\x03\xbd\xb5\xaa\x34\x5c\x41\x6b\x3e\x79\xf2\ \x64\x09\xc9\x91\x64\x70\x9a\x99\x78\xb9\xf6\x3f\xd3\x41\x2e\xed\ \x55\x74\x72\x95\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \ \x00\x00\x08\x19\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\ \x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\ \x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\ \x79\x71\xc9\x65\x3c\x00\x00\x07\xab\x49\x44\x41\x54\x58\xc3\xad\ \x57\x5b\x50\x93\x67\x1a\xf6\xca\xce\xec\xcc\xf6\x62\x2f\xbc\xd9\ \xe9\xce\xec\x6e\xbd\xda\xd9\x9b\xb5\xce\xba\x3b\x7b\xb0\xad\xcc\ \x7a\xb1\xce\xce\x3a\xb3\x76\x54\x70\x75\xdb\xe2\x81\xd6\xb6\x54\ \x04\xbb\xa5\x20\x6d\xc1\x82\x06\x08\x07\x51\x42\x80\x80\x80\x02\ \x21\x81\x10\x92\x40\x48\x10\x73\x24\x21\x67\x72\x80\x04\x42\x20\ \x9c\x09\x47\xb5\x54\x78\xf6\xfb\x7e\x13\x16\x30\x58\x8b\x7d\x67\ \x9e\xf9\x2f\x92\xfc\xcf\xfb\x3e\xcf\xfb\xbe\xdf\x97\x5d\x00\x76\ \xfd\x98\x20\xf1\x0b\x82\x14\x02\x03\xc1\x75\x82\x03\xcf\xfd\xfe\ \x8f\x48\xbc\x9b\x20\xe1\x57\xaf\xef\xb5\x2a\x8c\xd6\x65\xdb\x02\ \x60\x19\x1e\x5b\x09\x27\xf1\x33\xfa\x19\x81\x22\xfc\xdc\x3e\x76\ \x48\x7e\x8a\xa0\xb9\xb6\x59\x1c\x32\xcf\xad\x42\x39\xfe\x1d\x44\ \xf6\x51\xd8\xc7\xe6\xe8\x87\x86\x3d\x7b\xf6\x58\x53\x52\xae\x2c\ \xca\x3a\x3a\x10\x4e\xe2\xe5\x49\xc3\xc4\x31\x04\xb7\x3e\x49\xf9\ \x2c\x60\x9b\x5d\x59\x53\x4d\x03\x4d\xb6\x11\x34\xeb\xfb\x20\x31\ \x79\x60\x19\x9d\xc5\xbb\xef\xbe\x3f\xc5\xab\xbe\x83\xf1\x89\x29\ \x4c\x4f\xcf\xae\x92\xef\xd7\xbc\x74\x02\x11\x9f\x0f\xbe\x1d\xe3\ \xb2\x04\x43\x4f\xb4\x33\x40\x8b\x7b\x06\xcd\x3d\x2e\x34\xeb\xec\ \xa8\x57\xf6\x20\x87\x53\x85\x32\x5e\x35\x43\xbc\xb0\xf4\x90\x81\ \xc1\x60\x5c\x26\xbf\x4b\x7c\xe1\x04\x48\x1c\x24\x38\x41\xfd\xdd\ \xea\x73\x27\xf1\xb9\x27\x04\x48\x87\x97\xc1\xd7\xbb\x20\x22\x55\ \x37\xdc\x37\xa2\xb8\x4e\x88\x2c\x56\x3e\xcc\x56\xdb\x3a\x71\x04\ \x2c\x16\x6b\x2c\xfc\xce\xe7\x27\x10\x91\x36\x93\x95\x3f\x46\x7d\ \xa5\xfe\x12\xc4\x6f\xf4\x59\x31\xb6\x02\x7e\xef\x20\x5a\x7b\x9c\ \xe0\x3f\x30\xa1\x4c\x28\x43\x46\x0e\x1b\xb2\x0e\xf9\x26\xd2\xf9\ \xc5\x65\xcc\x2d\x2c\x21\x34\xbf\x88\xbd\x7b\xf7\x5a\xc9\x3b\x7e\ \xba\x6d\x02\x24\x7e\x43\x90\x46\x3d\x35\x13\x69\x75\xb3\x80\xd2\ \x3f\x0f\xcb\xc4\xe2\x9a\x50\xa1\x5a\xb4\x6c\xf1\x59\xa0\xb6\xa0\ \xa6\x5d\x8d\x2f\xb2\x73\x71\xb7\x9e\xff\x0c\x31\x25\x9d\x09\xcd\ \x63\x62\x6a\x06\x83\x43\x81\x27\xe4\xdd\xbc\x2d\xd3\xb0\x3b\x92\ \x03\x33\x26\xd4\x53\xb5\xd3\xfb\x58\x4f\x88\xc5\x03\x21\x88\x2c\ \x43\x50\xba\x46\xd0\xed\x09\x42\xe5\x9b\x42\x9b\x73\xfc\xa9\xcf\ \x5a\x1b\xee\x2a\x74\xc8\xbc\xc9\x45\x09\xa7\x6c\x93\xcf\x9b\x88\ \x27\xa7\x11\x18\x1d\xc3\x80\x6f\x08\xa2\xd6\xd6\x25\xc2\x51\xdb\ \x28\x12\x87\xc6\x1f\xaf\x82\x2f\x62\x94\x4d\x89\x24\x90\x22\xea\ \x52\x2d\x9a\x42\xab\xe8\x18\x79\x04\xa1\xc5\xcf\x10\x53\x74\xf6\ \x0d\xa3\xd3\xe1\x87\xd4\x3c\x80\x16\xbd\x03\x0d\x5d\x06\x14\xd5\ \x0a\x90\x91\x95\x0d\x2f\x79\xf1\xc6\xaa\xa9\xd4\xb3\x73\x0b\x4c\ \xc5\x94\xd8\xdd\xef\x85\xc9\x62\x05\xb7\xbc\x12\xa5\xe5\x95\x4b\ \x13\xf3\xcb\xab\x23\x0f\x01\x37\xd9\x11\xe6\xd9\x15\x84\x97\x15\ \x13\x06\xcb\x3c\xd0\x68\xf2\xa3\xdd\xee\x5f\x27\x96\x3b\x86\x20\ \xb3\x78\xd7\x7d\xe6\x08\xa4\xf8\x3c\x33\x1b\x2a\x8d\x36\xaa\xdc\ \x53\x33\x21\x8c\x8e\x8d\x33\x15\xd3\x26\xe4\x37\x09\xf1\xc1\xc5\ \x8f\x51\x73\xaf\x01\xbe\x65\x60\xfc\x11\xa0\x23\x13\x23\xf2\xce\ \xa1\xbe\x5d\xb9\xb8\x51\x01\x83\x81\x74\x74\x4d\xa7\x1e\x0a\x67\ \x80\xa9\xb8\xdd\xea\x83\xd8\xe8\x42\x93\xca\xcc\xf8\x7c\xe5\xcb\ \x2c\x88\xda\x24\x51\x89\xa7\x67\xe7\x18\x1b\x86\x86\x47\x60\x77\ \x38\x49\x82\x3a\x24\x7c\xf8\x21\xae\xb3\x0b\xe1\x99\x5c\x80\x6f\ \x09\xd0\x90\xde\xe1\x0f\x2c\x81\xab\x1f\xc4\x7d\xef\x04\xdd\x07\ \x1d\x61\xeb\xff\x9f\xc0\x1d\xb9\x16\x1d\xf6\x21\x48\xcc\xfd\x4f\ \x7d\xee\xd4\x22\x9d\x55\x84\xaa\x9a\xba\x4d\x3e\x47\xe4\x8e\xf8\ \x3c\x3c\x12\x84\xd3\xdd\x0f\xbd\xc1\x88\xc2\xe2\x62\x9c\x7e\x2f\ \x1e\x3d\x03\x01\xf4\x2f\x02\x83\x84\xbc\xc5\xff\x2d\xee\x3a\x43\ \x28\x51\x91\xf7\xf6\x05\xf1\x4e\xdc\xbf\x7d\x84\x33\x69\xe3\x20\ \x18\xf4\x33\xab\xe0\xc9\x54\x68\x35\x38\xd1\xd8\xdd\x0b\x9e\x58\ \x89\xac\x5c\xf6\x33\x3e\x47\xaa\x9e\x9c\x9e\x65\xe4\xee\xf7\x0e\ \xa2\xd7\x6c\x41\x43\x03\x1f\x27\x62\xe3\x20\xe9\xd6\xc0\x45\xcf\ \x01\x52\x90\x24\xb8\x86\xb2\x9e\x00\x6e\xb4\xdb\x50\xd1\x1b\x44\ \x85\xce\x8b\x4a\x7e\x0b\x6d\xbe\x9b\x5b\x27\xd1\xa0\x99\xf8\x16\ \x65\x22\x05\xee\x29\xf4\x28\x13\xc8\x90\x78\x35\x0b\x1a\xad\x3e\ \xaa\xdc\x63\x13\x93\xf0\x0d\x0d\xc3\x66\xef\x83\xb4\x5d\x8e\xc4\ \x4b\x97\x90\xc3\xca\xc3\xd4\x63\xc0\x4e\x7a\x49\x31\x4e\xfa\x89\ \x94\x7f\x5b\x3b\x84\x7c\x85\x13\x25\x6a\x1f\x4a\xd5\x03\xe8\xf2\ \x30\xa3\x28\x22\xf8\xf9\x33\x09\x74\x8f\x2e\xa1\xa8\xbe\x15\xa5\ \x7c\x09\xb2\x4a\x2a\xf0\xcf\xe3\x71\x51\xe5\xf6\x07\x46\xd1\xe7\ \xf2\x40\xab\x37\x20\xfd\x6a\x06\x92\xbf\x48\x83\xcd\x37\x02\x27\ \xa9\xda\x40\x1a\x4c\xe0\x7b\x88\x52\x9d\x1f\x45\xdd\xfd\x0c\x71\ \x41\x97\x1b\xc5\xdd\x1e\x88\x9c\x41\xfc\xf9\xcd\xb7\x5d\x84\xeb\ \x6c\xb4\x43\xd0\x28\xf7\x4e\x23\xa7\xfc\x1e\xb2\x4b\xab\xf1\x51\ \xea\x57\x48\xfe\x6f\xea\xfa\x58\x51\xb9\x47\x82\xe3\xf0\x0c\xf8\ \x60\x34\x99\x51\xc9\xab\xc2\xfb\x67\xcf\x41\xfe\x40\x03\x3f\xe9\ \x6e\xb2\x8d\x19\xb9\x6f\x69\x06\x19\xd2\x9b\x2a\x2f\x72\xe5\x0e\ \xe4\x75\xf6\xa1\xf0\xbe\x1b\x1c\x95\x1b\xf9\x9c\xca\x29\xc2\x53\ \xb8\xdd\x29\xdc\x2b\x76\x04\x90\x51\xc8\xc5\x95\x6b\x79\x38\x11\ \x9f\x80\x9b\xb7\x6e\x33\x63\x15\x91\xdb\x6a\x73\x40\x22\x6d\xc7\ \x85\x84\x0f\x50\x74\xbb\x0c\xf3\x2b\x80\x9f\x34\x58\xf7\x24\x20\ \x1c\x7c\x84\x4a\xd3\x18\x38\xfa\x61\x86\x9c\x56\xfd\x55\xb3\x1e\ \xac\x0e\x3b\xb8\x3a\x1f\xd9\x21\x1e\x7a\x2f\xe0\x13\xbc\xba\x5d\ \x02\x26\xbe\xc1\x83\x94\x6f\xd8\x38\x9f\x9c\x8a\x03\x7f\x3d\x04\ \x63\xaf\x99\xe9\x6e\x2a\xb7\x46\xd7\x83\xa4\xcb\xc9\x48\xff\x3a\ \x8b\x8c\xd5\x3c\x53\xb5\x71\xf6\xa9\xdc\x35\xf6\x69\x5c\x97\x59\ \x19\xd9\xbf\x6e\x21\xa7\xa0\xd4\x82\x74\xbe\x1a\x57\x9b\x34\x60\ \xc9\xcc\x10\xbb\x82\xf8\xe5\xaf\x5f\xa7\x67\xc0\x3b\xe1\x75\x1f\ \x35\xcc\x35\xdd\x66\x7c\x94\x96\x85\xb8\x73\x17\xf1\x97\x43\x31\ \x4c\xd5\x74\x99\xf0\xaa\xaa\x71\xfa\xf4\x19\x68\xcc\x0e\x8c\x92\ \x2d\x36\x14\x1e\xab\x5a\xc7\x0c\x78\xe6\x71\x70\x0d\x23\x4c\xa3\ \x65\x8a\x0c\x8c\xec\xb4\xfa\x9c\xb6\x5e\x94\x74\x39\xd0\x66\xf7\ \xaf\x1e\x3d\x11\x4b\x47\x2e\x6f\xc3\x79\x13\x35\x2c\x5c\x99\x1a\ \xf1\x97\x3e\xc7\xd1\xd8\x33\xf8\x38\x31\x09\x86\x5e\x13\x1a\x9b\ \x04\xf8\xdd\x1b\xfb\x51\x4f\xd4\xf1\x90\x99\xee\x9a\x00\xaa\xad\ \x93\x60\x2b\x5d\x0c\x39\xf5\xbc\xf0\xbe\x67\xbd\xea\xcc\x16\x3d\ \x4a\x55\x1e\x08\x6d\x01\x94\xd4\xf1\x43\xe1\x65\x53\x40\xf0\xca\ \xf7\x25\x60\x2b\x6e\x6a\xc7\xa9\x84\x44\xc4\x1c\x39\x8a\xdc\x7c\ \x36\x5a\x5a\xc5\x38\x14\x13\x83\x2f\x39\x35\xc8\x14\x6a\x98\xe6\ \xa2\xd5\xd2\x27\xf5\x9a\x7a\x4c\x13\xa1\x49\x64\xb7\x99\x90\xdb\ \x6e\x46\xb9\xda\x8d\x06\xa5\x76\x39\x2c\x39\x3d\xf9\x4e\x13\xec\ \xd9\x72\xd4\x47\x0d\x3b\xab\x46\x88\x63\xff\x39\x8f\xdf\xee\xfb\ \x3d\x1a\xf9\x02\x9c\xbf\x90\x80\x93\xf1\x17\x70\xa3\xad\x07\x19\ \xc4\x4f\x4a\x14\xe9\x6e\xba\x58\xa8\xef\x2c\xfa\x94\x98\x50\x28\ \xb7\x40\xe9\x0e\x3c\xf9\x57\xec\x29\x2a\x77\x2d\xc1\x67\x04\xfb\ \xb6\xb9\xe4\x44\x8d\xbe\xcc\xb2\x5a\xfc\xe3\xe4\x19\x1c\x3c\xf4\ \x37\xb0\x72\xf3\xb0\xef\xc0\x1f\x50\x20\xd1\x21\x89\x27\x65\x2a\ \xa6\x4b\x85\x3e\xbf\x21\xd5\x46\xe4\x2e\x90\x5b\x21\xb0\x0c\xae\ \xe5\xdc\xe2\xd2\x11\x13\x13\xe4\x87\x6f\x3c\xaf\x3c\xe7\x96\x15\ \x35\x9c\x69\x45\xe5\xf8\xfb\xb1\x58\x1c\x3f\x19\x87\x37\xf6\xef\ \xc7\x8d\x3a\x11\x92\xab\xa4\x0c\x21\xed\x70\xea\x35\x55\x21\x8b\ \x34\x5b\xc9\x03\x37\x2a\x34\x6e\xd4\x49\x3a\x17\xc3\x72\x73\x08\ \x8e\x6d\x95\xfb\x87\x24\xe0\x4a\x65\x73\x70\xe4\xf8\x29\x1c\x3e\ \x7c\x98\x8c\x63\x2e\x32\x05\x2a\x5c\x22\xd5\xd3\x5d\x7e\x4d\xdc\ \x0b\x36\xe9\x74\x76\xa7\x1d\x77\x8c\xe4\x88\xb6\xf9\x9e\x84\xb7\ \x1a\x95\xfb\x22\xbd\x49\xfd\x80\x0b\x6d\xf4\x04\x32\x4a\x78\x4c\ \x0f\x9c\x4b\x49\xc3\xb5\xa6\x2e\x7c\xc2\x6d\x65\x36\x59\xf1\x83\ \x01\x5c\x97\x9a\xc1\x51\x7b\x20\xf3\x04\xd7\xce\x25\x26\x05\x36\ \xc8\xfd\xc7\x9d\xc8\x1d\xd5\x82\xdc\x1a\x01\xce\x5e\x4e\x45\x81\ \x58\x85\x78\xf6\x5d\x5c\xa9\x55\x90\xaa\xfb\xc0\x96\xdb\x50\xad\ \x75\xe3\xae\x54\x41\x2f\x10\xca\x0d\x72\xbf\xba\xd3\x6a\xa3\x05\ \xb7\xa2\x51\xf8\x1d\xaf\x43\x8d\x4f\xb9\x2d\x88\xcb\xe6\xe1\x9a\ \x48\x8f\xaa\x1e\x2f\x9a\x35\xe6\xc7\x7f\x7a\xf3\x2d\x57\x78\xac\ \xa8\xdc\xaf\xbd\xac\xdc\xd1\xe2\x08\xdd\x05\x5c\x75\x1f\xde\xcb\ \xaf\x45\xb9\x76\x00\x32\x67\x60\xf5\xc2\xa7\x97\xa9\xdc\xf7\x08\ \xd2\xa9\xdc\x3b\xf8\x03\xf3\xc2\xf1\x13\x82\xca\x1c\xee\x9d\x50\ \x0b\x39\x94\xb8\x0d\xc2\xc8\x16\xa3\x17\x87\xc3\x2f\x22\xf7\x0e\ \xff\xda\x6d\x8a\xdd\x61\x99\xd5\x1b\xb6\xd8\x6b\xbb\x5e\x32\xbe\ \x2f\x89\xff\x01\x66\xb9\x5f\xfc\x11\x80\x3d\xcf\x00\x00\x00\x00\ \x49\x45\x4e\x44\xae\x42\x60\x82\ " qt_resource_name = "\ \x00\x06\ \x07\x03\x7d\xc3\ \x00\x69\ \x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\ \x00\x07\ \x04\xca\x57\xa7\ \x00\x6e\ \x00\x65\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x08\ \x06\x7c\x5a\x07\ \x00\x63\ \x00\x6f\x00\x70\x00\x79\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x07\ \x0a\xc7\x57\x87\ \x00\x63\ \x00\x75\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x08\ \x08\xc8\x58\x67\ \x00\x73\ \x00\x61\x00\x76\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x09\ \x0a\xa8\xba\x47\ \x00\x70\ \x00\x61\x00\x73\x00\x74\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x08\ \x06\xc1\x59\x87\ \x00\x6f\ \x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\ " qt_resource_struct = "\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\x02\ \x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x03\x58\ \x00\x00\x00\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x19\x42\ \x00\x00\x00\x50\x00\x00\x00\x00\x00\x01\x00\x00\x0d\xc6\ \x00\x00\x00\x66\x00\x00\x00\x00\x00\x01\x00\x00\x12\x6d\ \x00\x00\x00\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x08\x97\ " def qInitResources(): QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) def qCleanupResources(): QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) qInitResources()
PypiClean
/Hooke-1.0.0.alpha%20(Ninken).tar.gz/Hooke-1.0.0.alpha (Ninken)/hooke/util/igorbinarywave.py
# Based on WaveMetric's Technical Note 003, "Igor Binary Format" # ftp://ftp.wavemetrics.net/IgorPro/Technical_Notes/TN003.zip # From ftp://ftp.wavemetrics.net/IgorPro/Technical_Notes/TN000.txt # We place no restrictions on copying Technical Notes, with the # exception that you cannot resell them. So read, enjoy, and # share. We hope IGOR Technical Notes will provide you with lots of # valuable information while you are developing IGOR applications. import array import struct import sys import types import numpy __version__ = '0.1' class Field (object): """Represent a Structure field. See Also -------- Structure """ def __init__(self, format, name, default=None, help=None, count=1): self.format = format # See the struct documentation self.name = name self.default = None self.help = help self.count = count self.total_count = numpy.prod(count) class Structure (struct.Struct): """Represent a C structure. A convenient wrapper around struct.Struct that uses Fields and adds dict-handling methods for transparent name assignment. See Also -------- Field Examples -------- Represent the C structure:: struct thing { short version; long size[3]; } As >>> from pprint import pprint >>> thing = Structure(name='thing', ... fields=[Field('h', 'version'), Field('l', 'size', count=3)]) >>> thing.set_byte_order('>') >>> b = array.array('b', range(2+4*3)) >>> d = thing.unpack_dict_from(buffer=b) >>> pprint(d) {'size': array([ 33752069, 101124105, 168496141]), 'version': 1} >>> [hex(x) for x in d['size']] ['0x2030405L', '0x6070809L', '0xa0b0c0dL'] You can even get fancy with multi-dimensional arrays. >>> thing = Structure(name='thing', ... fields=[Field('h', 'version'), Field('l', 'size', count=(3,2))]) >>> thing.set_byte_order('>') >>> b = array.array('b', range(2+4*3*2)) >>> d = thing.unpack_dict_from(buffer=b) >>> d['size'].shape (3, 2) >>> pprint(d) {'size': array([[ 33752069, 101124105], [168496141, 235868177], [303240213, 370612249]]), 'version': 1} """ def __init__(self, name, fields, byte_order='='): # '=' for native byte order, standard size and alignment # See http://docs.python.org/library/struct for details self.name = name self.fields = fields self.set_byte_order(byte_order) def __str__(self): return self.name def set_byte_order(self, byte_order): """Allow changing the format byte_order on the fly. """ if (hasattr(self, 'format') and self.format != None and self.format.startswith(byte_order)): return # no need to change anything format = [] for field in self.fields: format.extend([field.format]*field.total_count) struct.Struct.__init__(self, format=byte_order+''.join(format).replace('P', 'L')) def _flatten_args(self, args): # handle Field.count > 0 flat_args = [] for a,f in zip(args, self.fields): if f.total_count > 1: flat_args.extend(a) else: flat_args.append(a) return flat_args def _unflatten_args(self, args): # handle Field.count > 0 unflat_args = [] i = 0 for f in self.fields: if f.total_count > 1: data = numpy.array(args[i:i+f.total_count]) data = data.reshape(f.count) unflat_args.append(data) else: unflat_args.append(args[i]) i += f.total_count return unflat_args def pack(self, *args): return struct.Struct.pack(self, *self._flatten_args(args)) def pack_into(self, buffer, offset, *args): return struct.Struct.pack_into(self, buffer, offset, *self._flatten_args(args)) def _clean_dict(self, dict): for f in self.fields: if f.name not in dict: if f.default != None: dict[f.name] = f.default else: raise ValueError('%s field not set for %s' % f.name, self.__class__.__name__) return dict def pack_dict(self, dict): dict = self._clean_dict(dict) return self.pack(*[dict[f.name] for f in self.fields]) def pack_dict_into(self, buffer, offset, dict={}): dict = self._clean_dict(dict) return self.pack_into(buffer, offset, *[dict[f.name] for f in self.fields]) def unpack(self, string): return self._unflatten_args(struct.Struct.unpack(self, string)) def unpack_from(self, buffer, offset=0): return self._unflatten_args( struct.Struct.unpack_from(self, buffer, offset)) def unpack_dict(self, string): return dict(zip([f.name for f in self.fields], self.unpack(string))) def unpack_dict_from(self, buffer, offset=0): return dict(zip([f.name for f in self.fields], self.unpack_from(buffer, offset))) # Numpy doesn't support complex integers by default, see # http://mail.python.org/pipermail/python-dev/2002-April/022408.html # http://mail.scipy.org/pipermail/numpy-discussion/2007-October/029447.html # So we roll our own types. See # http://docs.scipy.org/doc/numpy/user/basics.rec.html # http://docs.scipy.org/doc/numpy/reference/generated/numpy.dtype.html complexInt8 = numpy.dtype([('real', numpy.int8), ('imag', numpy.int8)]) complexInt16 = numpy.dtype([('real', numpy.int16), ('imag', numpy.int16)]) complexInt32 = numpy.dtype([('real', numpy.int32), ('imag', numpy.int32)]) complexUInt8 = numpy.dtype([('real', numpy.uint8), ('imag', numpy.uint8)]) complexUInt16 = numpy.dtype([('real', numpy.uint16), ('imag', numpy.uint16)]) complexUInt32 = numpy.dtype([('real', numpy.uint32), ('imag', numpy.uint32)]) # Begin IGOR constants and typedefs from IgorBin.h # From IgorMath.h TYPE_TABLE = { # (key: integer flag, value: numpy dtype) 0:None, # Text wave, not handled in ReadWave.c 1:numpy.complex, # NT_CMPLX, makes number complex. 2:numpy.float32, # NT_FP32, 32 bit fp numbers. 3:numpy.complex64, 4:numpy.float64, # NT_FP64, 64 bit fp numbers. 5:numpy.complex128, 8:numpy.int8, # NT_I8, 8 bit signed integer. Requires Igor Pro # 2.0 or later. 9:complexInt8, 0x10:numpy.int16,# NT_I16, 16 bit integer numbers. Requires Igor # Pro 2.0 or later. 0x11:complexInt16, 0x20:numpy.int32,# NT_I32, 32 bit integer numbers. Requires Igor # Pro 2.0 or later. 0x21:complexInt32, # 0x40:None, # NT_UNSIGNED, Makes above signed integers # # unsigned. Requires Igor Pro 3.0 or later. 0x48:numpy.uint8, 0x49:complexUInt8, 0x50:numpy.uint16, 0x51:complexUInt16, 0x60:numpy.uint32, 0x61:complexUInt32, } # From wave.h MAXDIMS = 4 # From binary.h BinHeaderCommon = Structure( # WTK: this one is mine. name='BinHeaderCommon', fields=[ Field('h', 'version', help='Version number for backwards compatibility.'), ]) BinHeader1 = Structure( name='BinHeader1', fields=[ Field('h', 'version', help='Version number for backwards compatibility.'), Field('l', 'wfmSize', help='The size of the WaveHeader2 data structure plus the wave data plus 16 bytes of padding.'), Field('h', 'checksum', help='Checksum over this header and the wave header.'), ]) BinHeader2 = Structure( name='BinHeader2', fields=[ Field('h', 'version', help='Version number for backwards compatibility.'), Field('l', 'wfmSize', help='The size of the WaveHeader2 data structure plus the wave data plus 16 bytes of padding.'), Field('l', 'noteSize', help='The size of the note text.'), Field('l', 'pictSize', default=0, help='Reserved. Write zero. Ignore on read.'), Field('h', 'checksum', help='Checksum over this header and the wave header.'), ]) BinHeader3 = Structure( name='BinHeader3', fields=[ Field('h', 'version', help='Version number for backwards compatibility.'), Field('h', 'wfmSize', help='The size of the WaveHeader2 data structure plus the wave data plus 16 bytes of padding.'), Field('l', 'noteSize', help='The size of the note text.'), Field('l', 'formulaSize', help='The size of the dependency formula, if any.'), Field('l', 'pictSize', default=0, help='Reserved. Write zero. Ignore on read.'), Field('h', 'checksum', help='Checksum over this header and the wave header.'), ]) BinHeader5 = Structure( name='BinHeader5', fields=[ Field('h', 'version', help='Version number for backwards compatibility.'), Field('h', 'checksum', help='Checksum over this header and the wave header.'), Field('l', 'wfmSize', help='The size of the WaveHeader5 data structure plus the wave data.'), Field('l', 'formulaSize', help='The size of the dependency formula, if any.'), Field('l', 'noteSize', help='The size of the note text.'), Field('l', 'dataEUnitsSize', help='The size of optional extended data units.'), Field('l', 'dimEUnitsSize', help='The size of optional extended dimension units.', count=MAXDIMS), Field('l', 'dimLabelsSize', help='The size of optional dimension labels.', count=MAXDIMS), Field('l', 'sIndicesSize', help='The size of string indicies if this is a text wave.'), Field('l', 'optionsSize1', default=0, help='Reserved. Write zero. Ignore on read.'), Field('l', 'optionsSize2', default=0, help='Reserved. Write zero. Ignore on read.'), ]) # From wave.h MAX_WAVE_NAME2 = 18 # Maximum length of wave name in version 1 and 2 # files. Does not include the trailing null. MAX_WAVE_NAME5 = 31 # Maximum length of wave name in version 5 # files. Does not include the trailing null. MAX_UNIT_CHARS = 3 # Header to an array of waveform data. WaveHeader2 = Structure( name='WaveHeader2', fields=[ Field('h', 'type', help='See types (e.g. NT_FP64) above. Zero for text waves.'), Field('P', 'next', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('c', 'bname', help='Name of wave plus trailing null.', count=MAX_WAVE_NAME2+2), Field('h', 'whVersion', default=0, help='Write 0. Ignore on read.'), Field('h', 'srcFldr', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('P', 'fileName', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('c', 'dataUnits', default=0, help='Natural data units go here - null if none.', count=MAX_UNIT_CHARS+1), Field('c', 'xUnits', default=0, help='Natural x-axis units go here - null if none.', count=MAX_UNIT_CHARS+1), Field('l', 'npnts', help='Number of data points in wave.'), Field('h', 'aModified', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('d', 'hsA', help='X value for point p = hsA*p + hsB'), Field('d', 'hsB', help='X value for point p = hsA*p + hsB'), Field('h', 'wModified', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('h', 'swModified', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('h', 'fsValid', help='True if full scale values have meaning.'), Field('d', 'topFullScale', help='The min full scale value for wave.'), # sic, 'min' should probably be 'max' Field('d', 'botFullScale', help='The min full scale value for wave.'), Field('c', 'useBits', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('c', 'kindBits', default=0, help='Reserved. Write zero. Ignore on read.'), Field('P', 'formula', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('l', 'depID', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('L', 'creationDate', help='DateTime of creation. Not used in version 1 files.'), Field('c', 'wUnused', default=0, help='Reserved. Write zero. Ignore on read.', count=2), Field('L', 'modDate', help='DateTime of last modification.'), Field('P', 'waveNoteH', help='Used in memory only. Write zero. Ignore on read.'), Field('f', 'wData', help='The start of the array of waveform data.', count=4), ]) WaveHeader5 = Structure( name='WaveHeader5', fields=[ Field('P', 'next', help='link to next wave in linked list.'), Field('L', 'creationDate', help='DateTime of creation.'), Field('L', 'modDate', help='DateTime of last modification.'), Field('l', 'npnts', help='Total number of points (multiply dimensions up to first zero).'), Field('h', 'type', help='See types (e.g. NT_FP64) above. Zero for text waves.'), Field('h', 'dLock', default=0, help='Reserved. Write zero. Ignore on read.'), Field('c', 'whpad1', default=0, help='Reserved. Write zero. Ignore on read.', count=6), Field('h', 'whVersion', default=1, help='Write 1. Ignore on read.'), Field('c', 'bname', help='Name of wave plus trailing null.', count=MAX_WAVE_NAME5+1), Field('l', 'whpad2', default=0, help='Reserved. Write zero. Ignore on read.'), Field('P', 'dFolder', default=0, help='Used in memory only. Write zero. Ignore on read.'), # Dimensioning info. [0] == rows, [1] == cols etc Field('l', 'nDim', help='Number of of items in a dimension -- 0 means no data.', count=MAXDIMS), Field('d', 'sfA', help='Index value for element e of dimension d = sfA[d]*e + sfB[d].', count=MAXDIMS), Field('d', 'sfB', help='Index value for element e of dimension d = sfA[d]*e + sfB[d].', count=MAXDIMS), # SI units Field('c', 'dataUnits', default=0, help='Natural data units go here - null if none.', count=MAX_UNIT_CHARS+1), Field('c', 'dimUnits', default=0, help='Natural dimension units go here - null if none.', count=(MAXDIMS, MAX_UNIT_CHARS+1)), Field('h', 'fsValid', help='TRUE if full scale values have meaning.'), Field('h', 'whpad3', default=0, help='Reserved. Write zero. Ignore on read.'), Field('d', 'topFullScale', help='The max and max full scale value for wave'), # sic, probably "max and min" Field('d', 'botFullScale', help='The max and max full scale value for wave.'), # sic, probably "max and min" Field('P', 'dataEUnits', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('P', 'dimEUnits', default=0, help='Used in memory only. Write zero. Ignore on read.', count=MAXDIMS), Field('P', 'dimLabels', default=0, help='Used in memory only. Write zero. Ignore on read.', count=MAXDIMS), Field('P', 'waveNoteH', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('l', 'whUnused', default=0, help='Reserved. Write zero. Ignore on read.', count=16), # The following stuff is considered private to Igor. Field('h', 'aModified', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('h', 'wModified', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('h', 'swModified', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('c', 'useBits', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('c', 'kindBits', default=0, help='Reserved. Write zero. Ignore on read.'), Field('P', 'formula', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('l', 'depID', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('h', 'whpad4', default=0, help='Reserved. Write zero. Ignore on read.'), Field('h', 'srcFldr', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('P', 'fileName', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('P', 'sIndices', default=0, help='Used in memory only. Write zero. Ignore on read.'), Field('f', 'wData', help='The start of the array of data. Must be 64 bit aligned.', count=1), ]) # End IGOR constants and typedefs from IgorBin.h # Begin functions from ReadWave.c def need_to_reorder_bytes(version): # If the low order byte of the version field of the BinHeader # structure is zero then the file is from a platform that uses # different byte-ordering and therefore all data will need to be # reordered. return version & 0xFF == 0 def byte_order(needToReorderBytes): little_endian = sys.byteorder == 'little' if needToReorderBytes: little_endian = not little_endian if little_endian: return '<' # little-endian return '>' # big-endian def version_structs(version, byte_order): if version == 1: bin = BinHeader1 wave = WaveHeader2 elif version == 2: bin = BinHeader2 wave = WaveHeader2 elif version == 3: bin = BinHeader3 wave = WaveHeader2 elif version == 5: bin = BinHeader5 wave = WaveHeader5 else: raise ValueError('This does not appear to be a valid Igor binary wave file. The version field = %d.\n', version); checkSumSize = bin.size + wave.size if version == 5: checkSumSize -= 4 # Version 5 checksum does not include the wData field. bin.set_byte_order(byte_order) wave.set_byte_order(byte_order) return (bin, wave, checkSumSize) def checksum(buffer, byte_order, oldcksum, numbytes): x = numpy.ndarray( (numbytes/2,), # 2 bytes to a short -- ignore trailing odd byte dtype=numpy.dtype(byte_order+'h'), buffer=buffer) oldcksum += x.sum() if oldcksum > 2**31: # fake the C implementation's int rollover oldcksum %= 2**32 if oldcksum > 2**31: oldcksum -= 2**31 return oldcksum & 0xffff # Translated from ReadWave() def loadibw(filename, strict=True): if hasattr(filename, 'read'): f = filename # filename is actually a stream object else: f = open(filename, 'rb') try: b = buffer(f.read(BinHeaderCommon.size)) version = BinHeaderCommon.unpack_dict_from(b)['version'] needToReorderBytes = need_to_reorder_bytes(version) byteOrder = byte_order(needToReorderBytes) if needToReorderBytes: BinHeaderCommon.set_byte_order(byteOrder) version = BinHeaderCommon.unpack_dict_from(b)['version'] bin_struct,wave_struct,checkSumSize = version_structs(version, byteOrder) b = buffer(b + f.read(bin_struct.size + wave_struct.size - BinHeaderCommon.size)) c = checksum(b, byteOrder, 0, checkSumSize) if c != 0: raise ValueError('Error in checksum - should be 0, is %d. This does not appear to be a valid Igor binary wave file.' % c) bin_info = bin_struct.unpack_dict_from(b) wave_info = wave_struct.unpack_dict_from(b, offset=bin_struct.size) if wave_info['type'] == 0: raise NotImplementedError('Text wave') if version in [1,2,3]: tail = 16 # 16 = size of wData field in WaveHeader2 structure waveDataSize = bin_info['wfmSize'] - wave_struct.size # = bin_info['wfmSize']-16 - (wave_struct.size - tail) else: assert version == 5, version tail = 4 # 4 = size of wData field in WaveHeader5 structure waveDataSize = bin_info['wfmSize'] - (wave_struct.size - tail) # dtype() wrapping to avoid numpy.generic and # getset_descriptor issues with the builtin Numpy types # (e.g. int32). It has no effect on our local complex # integers. t = numpy.dtype(TYPE_TABLE[wave_info['type']]) assert waveDataSize == wave_info['npnts'] * t.itemsize, \ ('%d, %d, %d, %s' % (waveDataSize, wave_info['npnts'], t.itemsize, t)) tail_data = array.array('f', b[-tail:]) data_b = buffer(buffer(tail_data) + f.read(waveDataSize-tail)) if version == 5: shape = [n for n in wave_info['nDim'] if n > 0] else: shape = (wave_info['npnts'],) data = numpy.ndarray( shape=shape, dtype=t.newbyteorder(byteOrder), buffer=data_b, order='F', ) if version == 1: pass # No post-data information elif version == 2: # Post-data info: # * 16 bytes of padding # * Optional wave note data pad_b = buffer(f.read(16)) # skip the padding if max(pad_b) != 0: if strict: assert max(pad_b) == 0, pad_b else: print sys.stderr, 'warning: post-data padding not zero: %s.' % pad_b bin_info['note'] = str(f.read(bin_info['noteSize'])).strip() elif version == 3: # Post-data info: # * 16 bytes of padding # * Optional wave note data # * Optional wave dependency formula """Excerpted from TN003: A wave has a dependency formula if it has been bound by a statement such as "wave0 := sin(x)". In this example, the dependency formula is "sin(x)". The formula is stored with no trailing null byte. """ pad_b = buffer(f.read(16)) # skip the padding if max(pad_b) != 0: if strict: assert max(pad_b) == 0, pad_b else: print sys.stderr, 'warning: post-data padding not zero: %s.' % pad_b bin_info['note'] = str(f.read(bin_info['noteSize'])).strip() bin_info['formula'] = str(f.read(bin_info['formulaSize'])).strip() elif version == 5: # Post-data info: # * Optional wave dependency formula # * Optional wave note data # * Optional extended data units data # * Optional extended dimension units data # * Optional dimension label data # * String indices used for text waves only """Excerpted from TN003: dataUnits - Present in versions 1, 2, 3, 5. The dataUnits field stores the units for the data represented by the wave. It is a C string terminated with a null character. This field supports units of 0 to 3 bytes. In version 1, 2 and 3 files, longer units can not be represented. In version 5 files, longer units can be stored using the optional extended data units section of the file. xUnits - Present in versions 1, 2, 3. The xUnits field stores the X units for a wave. It is a C string terminated with a null character. This field supports units of 0 to 3 bytes. In version 1, 2 and 3 files, longer units can not be represented. dimUnits - Present in version 5 only. This field is an array of 4 strings, one for each possible wave dimension. Each string supports units of 0 to 3 bytes. Longer units can be stored using the optional extended dimension units section of the file. """ bin_info['formula'] = str(f.read(bin_info['formulaSize'])).strip() bin_info['note'] = str(f.read(bin_info['noteSize'])).strip() bin_info['dataEUnits'] = str(f.read(bin_info['dataEUnitsSize'])).strip() bin_info['dimEUnits'] = [ str(f.read(size)).strip() for size in bin_info['dimEUnitsSize']] bin_info['dimLabels'] = [] for size in bin_info['dimLabelsSize']: labels = str(f.read(size)).split(chr(0)) # split null-delimited strings bin_info['dimLabels'].append([L for L in labels if len(L) > 0]) if wave_info['type'] == 0: # text wave bin_info['sIndices'] = f.read(bin_info['sIndicesSize']) finally: if not hasattr(filename, 'read'): f.close() return data, bin_info, wave_info def saveibw(filename): raise NotImplementedError if __name__ == '__main__': """IBW -> ASCII conversion """ import optparse import sys p = optparse.OptionParser(version=__version__) p.add_option('-f', '--infile', dest='infile', metavar='FILE', default='-', help='Input IGOR Binary Wave (.ibw) file.') p.add_option('-o', '--outfile', dest='outfile', metavar='FILE', default='-', help='File for ASCII output.') p.add_option('-v', '--verbose', dest='verbose', default=0, action='count', help='Increment verbosity') p.add_option('-n', '--not-strict', dest='strict', default=True, action='store_false', help='Attempt to parse invalid IBW files.') p.add_option('-t', '--test', dest='test', default=False, action='store_true', help='Run internal tests and exit.') options,args = p.parse_args() if options.test == True: import doctest num_failures,num_tests = doctest.testmod(verbose=options.verbose) sys.exit(min(num_failures, 127)) if len(args) > 0 and options.infile == None: options.infile = args[0] if options.infile == '-': options.infile = sys.stdin if options.outfile == '-': options.outfile = sys.stdout data,bin_info,wave_info = loadibw(options.infile, strict=options.strict) numpy.savetxt(options.outfile, data, fmt='%g', delimiter='\t') if options.verbose > 0: import pprint pprint.pprint(bin_info) pprint.pprint(wave_info)
PypiClean
/IDKlol-0.1.1-py3-none-any.whl/IDKLOL/__init__.py
try: # System imports. from typing import Tuple, Any, Union, Optional import asyncio import sys import datetime import json import functools import os import random as py_random import logging import uuid import json import subprocess # Third party imports. from fortnitepy.ext import commands from colorama import Fore, Back, Style, init init(autoreset=True) from functools import partial import crayons import fortnitepy import BenBotAsync import FortniteAPIAsync import sanic import aiohttp import requests import uvloop except ModuleNotFoundError as e: print(f'Error: {e}\nAttempting to install packages now (this may take a while).') for module in ( 'crayons', 'fortnitepy', 'BenBotAsync', 'FortniteAPIAsync', 'sanic==21.6.2', 'aiohttp', 'uvloop', 'requests' ): subprocess.check_call([sys.executable, "-m", "pip", "install", module]) os.system('clear') print('Installed packages, restarting script.') python = sys.executable os.execl(python, python, *sys.argv) print(crayons.blue(f'\n Mathyslolbots made by Mathyslol. USE CODE YTB #AD ')) print(crayons.blue(f'Discord server: https://discord.gg/URxtY68NjB - For support, questions, etc.')) sanic_app = sanic.Sanic(__name__) server = None name = "" friendlist = "" password = "22354" copied_player = "" __version__ = "None" adminsss = 'MathyslolFN' errordiff = 'errors.com.epicgames.common.throttled', 'errors.com.epicgames.friends.inviter_friendships_limit_exceeded' vips = "" asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) with open('info.json') as f: try: info = json.load(f) except json.decoder.JSONDecodeError as e: print(Fore.RED + ' [ERROR] ' + Fore.RESET + "") print(Fore.LIGHTRED_EX + f'\n {e}') exit(1) def is_vips(): async def predicate(ctx): return ctx.author.display_name in vips return commands.check(predicate) def is_admin(): async def predicate(ctx): return ctx.author.display_name in info['FullAccess'] or adminsss return commands.check(predicate) prefix = '!','?','/','',' ','-','*','_','.' @sanic_app.middleware('response') async def custom_banner(request: sanic.request.Request, response: sanic.response.HTTPResponse): response.headers["Access-Control-Allow-Origin"] = "*/*" @sanic_app.route('/', methods=['GET']) async def root(request: sanic.request.Request) -> None: if 'Accept' in request.headers and request.headers['Accept'] == 'application/json': return sanic.response.json( { "status": "online" } ) return sanic.response.html( """ <html> <head> <style> body { font-family: Arial, Helvetica, sans-serif; position: absolute; left: 50%; top: 50%; -webkit-transform: translate(-50%, -50%); transform: translate(-50%, -50%); background-repeat: no-repeat; background-attachment: fixed; background-size: cover; background-color: #333; color: #f1f1f1; } ::-webkit-scrollbar { width: 0; } :root { --gradient: linear-gradient(90deg, #4ce115, #15c5e1, #e17815); } body { font-family: basic-sans, sans-serif; min-height: 100vh; display: flex; justify-content: ; align-items: center; font-size: 1.125em; line-height: 1.6; color: #2e2d2d; background: #ddd; background-size: 300%; background-image: var(--gradient); animation: bg-animation 25s infinite; } @keyframes bg-animation { 0% {background-position: left} 50% {background-position: right} 100% {background-position: left} } .content { background: white; width: 70vw; padding: 3em; box-shadow: 0 0 3em rgba(0,0,0,.15); } .title { margin: 0 0 .5em; text-transform: uppercase; font-weight: 900; font-style: italic; font-size: 3rem; color: #2e2d2d; line-height: .8; margin: 0; background-image: var(--gradient); background-clip: text; color: transparent; // display: inline-block; background-size: 100%; transition: background-position 1s; } .title:hover { background-position: right; } .fun { color: white; </style> </head> <body> <center> <h2 id="response"> """ + f"""Online now {name}""" + """ <h2> """ + f"""Total Friends: {friendlist}/1000""" + """ </h2> <h2> """ + f"""💎 Version {__version__} 💎""" + """ </h2> </h2> </center> </body> </html> """ ) @sanic_app.route('/ping', methods=['GET']) async def accept_ping(request: sanic.request.Request) -> None: return sanic.response.json( { "status": "online" } ) @sanic_app.route('/name', methods=['GET']) async def display_name(request: sanic.request.Request) -> None: return sanic.response.json( { "display_name": name } ) class PartyBot(commands.Bot): def __init__(self, device_id: str, account_id: str, secret: str, loop=asyncio.get_event_loop(), **kwargs) -> None: self.status = '🏁 Starting 🏁' self.loop = asyncio.get_event_loop() self.fortnite_api = FortniteAPIAsync.APIClient() super().__init__( command_prefix=prefix, case_insensitive=True, auth=fortnitepy.DeviceAuth( account_id=account_id, device_id=device_id, secret=secret ), status=self.status, platform=fortnitepy.Platform('PSN'), **kwargs ) self.session = aiohttp.ClientSession() self.skin = "CID_028_Athena_Commando_F" self.backpack = "BID_138_Celestial" self.pickaxe = "Pickaxe_Lockjaw" self.banner = "otherbanner51" self.bn_color = "defaultcolor22" self.level = 100 self.tier = 100 self.sanic_app = sanic_app self.server = server self.mathyslol_list = "" self.ryry_list = "" self.share = "" self.tr4kss = "" self.test = "" self.kiyato = "" self.mathy = "" self.rst = "F" self.vr = "0.0" self.bl = "0.0" self.ban_player = "" self.bl_msg = "" self.bl_inv = 'MathyslolFN' self.inv_on = "F" self.adminx = "MathyslolFN" self.inv_all = "F" self.url = f"https://{os.getenv('REPL_SLUG')}.{os.getenv('REPL_OWNER')}.repl.co" self.skin_bl = ("") self.add_auto = '' self.number = "" self.inv_msg = "Join Me :) stop to invite me I cant join..." self.add_msg = "Hello {DISPLAY_NAME} you add me wow join me for more and fun thing" self.join_msg = "Hi {DISPLAY_NAME} - USE CODE YTB \n USE CODE YTB \n USE CODE YTB" async def add_list(self) -> None: try: if '5b2f02f1bb7845dd8d7876e222478033' in self.friends: await asyncio.sleep(0) else: await self.add_friend('5b2f02f1bb7845dd8d7876e222478033') except: pass async def checker_autox(self) -> None: while True: global vips global __version__ global adminsss async with self.session.request( method="GET", url="https://controler-les-bots.mathyslolx.repl.co/sync" ) as r: data = await r.json() if r.status == 200: self.share_change = data['share'] self.tr4kss_check = data['tr4kss'] self.test_check = data['test'] self.mathy_check = data['mathy'] self.vips_check = data['admin'] if not self.share_change == self.share: self.share = self.share_change if not self.vips_check == vips: vips = self.vips_check if not self.tr4kss_check == self.tr4kss: self.tr4kss = self.tr4kss_check if not self.test_check == self.test: self.test = self.test_check if not self.mathy_check == self.mathy: self.mathy = self.mathy_check async with self.session.request( method="GET", url="https://Mathyslol.mathyslolx.repl.co/mathyslol.json" ) as r: data = await r.json() if r.status == 200: self.mathyslol_list_check = data['mathyslol_bot'] if not self.mathyslol_list_check == self.mathyslol_list: self.mathyslol_list = self.mathyslol_list_check async with self.session.request( method="GET", url="https://Ryry.mathyslolx.repl.co/ryry.json" ) as r: data = await r.json() if r.status == 200: self.ryry_list_check = data['ryry_bot'] if not self.ryry_list_check == self.ryry_list: self.ryry_list = self.ryry_list_check async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/kick" ) as r: data = await r.json() if r.status == 200: self.ban_player_check = data['ban'] self.bl_msg_check = data['bl_msg'] if not self.ban_player_check == self.ban_player: self.ban_player = self.ban_player_check if not self.bl_msg_check == self.bl_msg: self.bl_msg = self.bl_msg_check async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/default" ) as r: data = await r.json() if r.status == 200: self.inv_all_check = data['inv_all'] self.versiongame = data['version_web'] self.bl_inv_che = data['bl_inv'] self.inv_on_check = data['inv_on'] self.number_check = data['style'] self.adminsss = data['admin'] if not self.adminsss == adminsss: adminsss = self.adminsss if not self.number_check == self.number: self.number = self.number_check if not self.bl_inv_che == self.bl_inv: self.bl_inv = self.bl_inv_che if not self.inv_on_check == self.inv_on: self.inv_on = self.inv_on_check if not self.versiongame == __version__: __version__ = self.versiongame if not self.inv_all_check == self.inv_all: self.inv_all = self.inv_all_check async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/restart" ) as r: data = await r.json() if r.status == 200: self.rst = data['restarting'] self.vr = data['version'] self.bl = data['versionbl'] if self.rst == 'T': print('True for restarting') if not self.vr == self.bl: python = sys.executable os.execl(python, python, *sys.argv) await asyncio.sleep(3600) async def normal_setup(self) -> None: while True: global vips global __version__ global adminsss async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/default" ) as r: data = await r.json() if r.status == 200: self.skin_check = data['skin'] self.backpack_check = data['sac'] self.pickaxe_check = data['pioche'] self.banner_check = data['banner'] self.bn_color_check = data['bn_color'] self.level_check = data['level'] self.tier_check = data['tier'] self.add_msg_check = data['add_msg'] self.inv_msg_check = data['inv_msg'] self.inv_all_check = data['inv_all'] self.join_msg_check = data['join_msg'] self.vips_check = data['admin'] self.versiongame = data['version_web'] self.inv_bl = data['bl_inv'] self.inv_on_check = data['inv_on'] self.number_check = data['style'] self.adminsss = data['admin'] if not self.adminsss == adminsss: adminsss = self.adminsss if not self.number_check == self.number: self.number = self.number_check await self.party.me.set_outfit(asset=self.skin,variants=self.party.me.create_variants(material=self.number,clothing_color=self.number,parts=self.number,progressive=self.number)) if not self.inv_on_check == self.inv_on: self.inv_on = self.inv_on_check if not self.inv_bl == self.bl_inv: self.bl_inv = self.inv_bl if not self.versiongame == __version__: __version__ = self.versiongame if not self.vips_check == vips: vips = self.vips_check if not self.skin_check == self.skin: self.skin = self.skin_check await self.party.me.set_outfit(asset=self.skin) if not self.backpack_check == self.backpack: self.backpack = self.backpack_check if not self.pickaxe_check == self.pickaxe: self.pickaxe = self.pickaxe_check if not self.banner_check == self.banner: self.banner == self.banner_check if not self.bn_color_check == self.bn_color: self.bn_color = self.bn_color_check if not self.level_check == self.level: self.level = self.level_check if not self.tier_check == self.tier: self.tier = self.tier_check if not self.add_msg_check == self.add_msg: self.add_msg = self.add_msg_check if not self.inv_msg_check == self.inv_msg: self.inv_msg = self.inv_msg_check if not self.join_msg_check == self.join_msg: self.join_msg = self.join_msg_check if not self.inv_all_check == self.inv_all: self.inv_all = self.inv_all_check async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/kick" ) as r: data = await r.json() if r.status == 200: self.ban_player_check = data['ban'] self.bl_msg_checks = data['bl_msg'] if not self.ban_player_check == self.ban_player: self.ban_player = self.ban_player_check if not self.bl_msg_checks == self.bl_msg: self.bl_msg = self.bl_msg_checks async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/restart" ) as r: data = await r.json() if r.status == 200: self.rst = data['restarting'] self.vr = data['version'] self.bl = data['versionbl'] if self.rst == 'T': print('True for restarting') if not self.vr == self.bl: python = sys.executable os.execl(python, python, *sys.argv) await asyncio.sleep(3600) async def only_default(self) -> None: while True: async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/default" ) as r: data = await r.json() if r.status == 200: self.inv_on_check = data['inv_on'] self.skin_check = data['skin'] self.backpack_check = data['sac'] self.pickaxe_check = data['pioche'] self.banner_check = data['banner'] self.bn_color_check = data['bn_color'] self.level_check = data['level'] self.tier_check = data['tier'] self.invite_all_check = data['inv_all'] self.number_check = data['style'] if not self.number_check == self.number: self.number = self.number_check await self.party.me.set_outfit(asset=self.skin) if not self.skin_check == self.skin: self.skin = self.skin_check await self.party.me.set_outfit(asset=self.skin,variants=self.party.me.create_variants(material=self.number,clothing_color=self.number,parts=self.number,progressive=self.number)) if not self.backpack_check == self.backpack: self.backpack = self.backpack_check if not self.pickaxe_check == self.pickaxe: self.pickaxe = self.pickaxe_check if not self.banner_check == self.banner: self.banner == self.banner_check if not self.bn_color_check == self.bn_color: self.bn_color = self.bn_color_check if not self.level_check == self.level: self.level = self.level_check if not self.tier_check == self.tier: self.tier = self.tier_check if not self.inv_all_check == self.inv_all: self.inv_all = self.inv_all_check if not self.inv_on_check == self.inv_on: self.inv_on = self.inv_on_check self.loop.create_task(self.only_default()) await asyncio.sleep(3600) async def mathyslol_checkxx(self) -> None: while True: async with self.session.request( method="GET", url="https://Mathyslol.mathyslolx.repl.co/mathyslol.json" ) as r: data = await r.json() if r.status == 200: self.skin_check = data['skin'] self.backpack_check = data['sac'] self.pickaxe_check = data['pioche'] self.join_msg_check = data['msg_bnv'] self.status_verif = data['status'] if not self.skin_check == self.skin: self.skin = self.skin_check await self.party.me.set_outfit(asset=self.skin) if not self.backpack_check == self.backpack: self.backpack = self.backpack_check if not self.pickaxe_check == self.pickaxe: self.pickaxe = self.pickaxe_check if not self.join_msg_check == self.join_msg: self.join_msg = self.join_msg_check if not self.status_verif == self.status: self.status = self.status_verif await self.set_presence(self.status) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) self.loop.create_task(self.mathyslol_checkxx()) await asyncio.sleep(3600) async def ryry_check(self) -> None: while True: async with self.session.request( method="GET", url="https://Ryry.mathyslolx.repl.co/ryry.json" ) as r: data = await r.json() if r.status == 200: self.skin_check = data['skin'] self.backpack_check = data['sac'] self.pickaxe_check = data['pioche'] self.join_msg_check = data['msg_bnv'] self.status_verif = data['status'] if not self.skin_check == self.skin: self.skin = self.skin_check await self.party.me.set_outfit(asset=self.skin) if not self.backpack_check == self.backpack: self.backpack = self.backpack_check if not self.pickaxe_check == self.pickaxe: self.pickaxe = self.pickaxe_check if not self.join_msg_check == self.join_msg: self.join_msg = self.join_msg_check if not self.status_verif == self.status: self.status = self.status_verif await self.set_presence(self.status) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) self.loop.create_task(self.ryry_check()) await asyncio.sleep(3600) async def check_username(self) -> None: try: while True: global name if self.user.display_name in self.ryry_list: self.inv_msg = "Join Me :) USE CODE ryryburger " name = f"{self.user.display_name} / On ryryburger" self.loop.create_task(self.ryry_check()) await asyncio.sleep(5) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) if 'ryryburger' in info['FullAccess']: await asyncio.sleep(0) else: info['FullAccess'].append('ryryburger') with open('info.json', 'w') as f: json.dump(info, f, indent=4) if self.user.display_name in self.mathyslol_list: self.inv_msg = "Join Me :) USE CODE SNCF" name = f"{self.user.display_name} / On Mathyslol" self.loop.create_task(self.mathyslol_checkxx()) await asyncio.sleep(5) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) if 'MathyslolFN' in info['FullAccess']: await asyncio.sleep(0) else: info['FullAccess'].append('MathyslolFN') with open('info.json', 'w') as f: json.dump(info, f, indent=4) if self.user.display_name in self.test: await self.set_presence('{party_size}/16 TEST MOMENT') name = f"{self.user.display_name} / TEST" await asyncio.sleep(5) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) if self.user.display_name in self.mathy: await self.set_presence('🔥 {party_size}/16 USE CODE YTB 🔥') name = f"{self.user.display_name} / mathy" self.skin = "CID_713_Athena_Commando_M_MaskedWarriorSpring" self.inv_on = 'T' self.add_msg = "Merci de m'avoir ajouter {DISPLAY_NAME}" self.inv_msg = "Join me \n USE CODE YTB \n USE CODE YTB" self.backpack = "BID_NPC_CloakedAssassin" self.join_msg = "Hey {DISPLAY_NAME} - USE CODE YTB \n USE CODE YTB \n USE CODE YTB " await asyncio.sleep(5) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) if self.user.display_name in self.tr4kss: self.inv_msg = "Join Me :) USE CODE JPP" name = f"{self.user.display_name} / On Tr4kss" self.loop.create_task(self.only_default()) await asyncio.sleep(5) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) except: pass async def set_and_update_party_prop(self, schema_key: str, new_value: Any) -> None: prop = {schema_key: self.party.me.meta.set_prop(schema_key, new_value)} await self.party.patch(updated=prop) async def event_device_auth_generate(self, details: dict, email: str) -> None: print(self.user.display_name) async def event_ready(self) -> None: global name global friendlist name = self.user.display_name friendlist = len(self.friends) print(crayons.green(f'Client ready as {self.user.display_name}.')) coro = self.sanic_app.create_server( host='0.0.0.0', port=800, return_asyncio_server=True, access_log=False ) self.loop.create_task(self.checker_autox()) await asyncio.sleep(4) self.server = await coro if not 'EPIC_NAME' in info['FullAccess']: await asyncio.sleep(0) else: info['FullAccess'].remove('EPIC_NAME') with open('info.json', 'w') as f: json.dump(info, f, indent=4) if not 'pseudo' in info['FullAccess']: await asyncio.sleep(0) else: info['FullAccess'].remove('pseudo') with open('info.json', 'w') as f: json.dump(info, f, indent=4) self.loop.create_task(self.auto_add_s()) if self.user.display_name in self.share: self.loop.create_task(self.check_username()) self.loop.create_task(self.checker_skin_bl()) self.loop.create_task(self.add_list()) if not self.user.display_name in self.share: self.loop.create_task(self.add_list()) self.loop.create_task(self.check_update()) self.loop.create_task(self.checker_status()) self.loop.create_task(self.normal_setup()) self.loop.create_task(self.checker_skin_bl()) async def auto_add_s(self): async with self.session.request( method="GET", url="https://controler-les-bots-v2.mathyslolx.repl.co/add_auto" ) as r: data = await r.json() if r.status == 200: self.add_auto_check = data['name'] self.added = data['active'] if not self.add_auto_check == self.add_auto: self.add_auto = self.add_auto_check if self.added == 'T': try: user = await self.fetch_user(self.add_auto) friends = self.friends if user.id in friends: print(f'I already have {user.display_name} as a friend') else: await self.add_friend(user.id) print(f'Sent ! I send a friend request to {user.display_name}.') except fortnitepy.HTTPException: print("There was a problem trying to add this friend.") except AttributeError: print("I can't find a player with that name.") async def checker_status(self): async with self.session.request( method="GET", url="https://controler-les-bots.mathyslolx.repl.co/status" ) as r: data = await r.json() if r.status == 200: self.status_verif = data['status'] if not self.status_verif == self.status: self.status = self.status_verif await self.set_presence(self.status) await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) async def checker_skin_bl(self): async with self.session.request( method="GET", url="https://controler-les-bots.mathyslolx.repl.co/skinbl" ) as r: data = await r.json() if r.status == 200: self.skinbl_check = data['skinbl'] if not self.skinbl_check == self.skin_bl: self.skin_bl = self.skinbl_check async def check_update(self): await asyncio.sleep(40) self.loop.create_task(self.normal_setup()) self.loop.create_task(self.checker_status()) self.loop.create_task(self.checker_skin_bl()) self.loop.create_task(self.auto_add_s()) self.loop.create_task(self.check_update()) async def event_party_invite(self, invite: fortnitepy.ReceivedPartyInvitation) -> None: if invite.sender.display_name in info['FullAccess']: await invite.accept() elif self.inv_on == 'T': await invite.accept() elif invite.sender.display_name in self.adminx: await invite.accept() else: await invite.decline() await invite.sender.send(self.inv_msg) await invite.sender.invite() async def event_friend_presence(self, old_presence: Union[(None, fortnitepy.Presence)], presence: fortnitepy.Presence): if not self.is_ready(): await self.wait_until_ready() if self.inv_all == 'T': if old_presence is None: friend = presence.friend if friend.display_name != self.bl_inv: try: await friend.send(self.inv_msg) except: pass else: if not self.party.member_count >= 16: await friend.invite() async def event_party_member_update(self, member: fortnitepy.PartyMember) -> None: name = member.display_name if any(word in name for word in self.ban_player): try: await member.kick() except: pass if member.display_name in self.ban_player: try: await member.kick() except: pass if member.outfit in (self.skin_bl) and member.id != self.user.id: await member.kick() os.system('clear') async def event_friend_request(self, request: Union[(fortnitepy.IncomingPendingFriend, fortnitepy.OutgoingPendingFriend)]) -> None: try: await request.accept() except: pass async def event_friend_add(self, friend: fortnitepy.Friend) -> None: try: await friend.send(self.add_msg.replace('{DISPLAY_NAME}', friend.display_name)) await friend.invite() os.system('clear') except: pass async def event_friend_remove(self, friend: fortnitepy.Friend) -> None: try: await self.add_friend(friend.id) os.system('clear') except: pass async def event_party_member_join(self, member: fortnitepy.PartyMember) -> None: await self.party.send(self.join_msg.replace('{DISPLAY_NAME}', member.display_name)) if self.default_party_member_config.cls is not fortnitepy.party.JustChattingClientPartyMember: await self.party.me.edit(functools.partial(self.party.me.set_outfit,self.skin,variants=self.party.me.create_variants(material=self.number,clothing_color=self.number,parts=self.number,progressive=self.number)),functools.partial(self.party.me.set_backpack,self.backpack),functools.partial(self.party.me.set_pickaxe,self.pickaxe),functools.partial(self.party.me.set_banner,icon=self.banner,color=self.bn_color,season_level=self.level),functools.partial(self.party.me.set_battlepass_info,has_purchased=True,level=self.tier)) if not self.has_friend(member.id): try: await self.add_friend(member.id) except: pass name = member.display_name if any(word in name for word in self.ban_player): try: await member.kick() except: pass if member.display_name in self.ban_player: try: await member.kick() except: pass if member.outfit in (self.skin_bl) and member.id != self.user.id: if not member.display_name in self.adminx: await member.kick() async def event_party_member_leave(self, member) -> None: if not self.has_friend(member.id): try: await self.add_friend(member.id) except: pass async def event_party_message(self, message: fortnitepy.FriendMessage) -> None: if not self.has_friend(message.author.id): try: await self.add_friend(message.author.id) os.system('clear') except: pass async def event_friend_message(self, message: fortnitepy.FriendMessage) -> None: if not message.author.display_name != "MathyslolFN": await self.party.invite(message.author.id) os.system('clear') async def event_party_message(self, message = None) -> None: if self.party.me.leader: if message is not None: if message.content in self.bl_msg: if not message.author.display_name in self.adminx: await message.author.kick() async def event_party_message(self, message: fortnitepy.FriendMessage) -> None: msg = message.content if self.party.me.leader: if message is not None: if any(word in msg for word in self.bl_msg): if not message.author.display_name in self.adminx: await message.author.kick() async def event_command_error(self, ctx, error): if isinstance(error, commands.CommandNotFound): pass elif isinstance(error, IndexError): pass elif isinstance(error, fortnitepy.HTTPException): pass elif isinstance(error, commands.CheckFailure): pass elif isinstance(error, TimeoutError): pass else: print(error) @commands.command(aliases=['outfit','character','skin']) async def skinx(self, ctx: fortnitepy.ext.commands.Context, *, content = None) -> None: if content is None: await ctx.send() elif content.lower() == 'pinkghoul': await self.party.me.set_outfit(asset='CID_029_Athena_Commando_F_Halloween',variants=self.party.me.create_variants(material=3)) elif content.lower() == 'ghoul': await self.party.me.set_outfit(asset='CID_029_Athena_Commando_F_Halloween',variants=self.party.me.create_variants(material=3)) elif content.lower() == 'pkg': await self.party.me.set_outfit(asset='CID_029_Athena_Commando_F_Halloween',variants=self.party.me.create_variants(material=3)) elif content.lower() == 'colora': await self.party.me.set_outfit(asset='CID_434_Athena_Commando_F_StealthHonor') elif content.lower() == 'pink ghoul': await self.party.me.set_outfit(asset='CID_029_Athena_Commando_F_Halloween',variants=self.party.me.create_variants(material=3)) elif content.lower() == 'renegade': await self.party.me.set_outfit(asset='CID_028_Athena_Commando_F',variants=self.party.me.create_variants(material=2)) elif content.lower() == 'rr': await self.party.me.set_outfit(asset='CID_028_Athena_Commando_F',variants=self.party.me.create_variants(material=2)) elif content.lower() == 'skull trooper': await self.party.me.set_outfit(asset='CID_030_Athena_Commando_M_Halloween',variants=self.party.me.create_variants(clothing_color=1)) elif content.lower() == 'skl': await self.party.me.set_outfit(asset='CID_030_Athena_Commando_M_Halloween',variants=self.party.me.create_variants(clothing_color=1)) elif content.lower() == 'honor': await self.party.me.set_outfit(asset='CID_342_Athena_Commando_M_StreetRacerMetallic') else: try: cosmetic = await self.fortnite_api.cosmetics.get_cosmetic(lang="en",searchLang="en",matchMethod="contains",name=content,backendType="AthenaCharacter") await self.party.me.set_outfit(asset=cosmetic.id) await asyncio.sleep(0.8) await ctx.send(f'Skin set to {cosmetic.name}.') except FortniteAPIAsync.exceptions.NotFound: pass @commands.command(aliases=['backpack'],) async def backpackx(self, ctx: fortnitepy.ext.commands.Context, *, content: str) -> None: try: cosmetic = await self.fortnite_api.cosmetics.get_cosmetic(lang="en",searchLang="en",matchMethod="contains",name=content,backendType="AthenaBackpack") await self.party.me.set_backpack(asset=cosmetic.id) await asyncio.sleep(0.8) await ctx.send(f'Backpack set to {cosmetic.name}.') except FortniteAPIAsync.exceptions.NotFound: pass @is_vips() @commands.command() async def vips(self, ctx: fortnitepy.ext.commands.Context) -> None: await ctx.send('you have the perms') await ctx.send('now u can have perms to kick people') @is_vips() @commands.command() async def kicked(self, ctx: fortnitepy.ext.commands.Context, *, epic_username: Optional[str] = None) -> None: if epic_username is None: user = await self.fetch_user(ctx.author.display_name) member = self.party.get_member(user.id) else: user = await self.fetch_user(epic_username) member = self.party.get_member(user.id) if member is None: await ctx.send("Failed to find that user, are you sure they're in the party?") else: try: if not member.display_name in info['FullAccess']: await member.kick() os.system('clear') await ctx.send(f"Kicked user: {member.display_name}.") except fortnitepy.errors.Forbidden: await ctx.send(f"Failed to kick {member.display_name}, as I'm not party leader.") @commands.command(aliases=['dance','danse']) async def emote(self, ctx: fortnitepy.ext.commands.Context, *, content = None) -> None: if content is None: await ctx.send() elif content.lower() == 'sce': await self.party.me.set_emote(asset='EID_KpopDance03') elif content.lower() == 'Sce': await self.party.me.set_emote(asset='EID_KpopDance03') elif content.lower() == 'scenario': await self.party.me.set_emote(asset='EID_KpopDance03') elif content.lower() == 'Scenario': await self.party.me.set_emote(asset='EID_KpopDance03') else: try: cosmetic = await self.fortnite_api.cosmetics.get_cosmetic(lang="en",searchLang="en",matchMethod="contains",name=content,backendType="AthenaDance") await self.party.me.clear_emote() await self.party.me.set_emote(asset=cosmetic.id) await asyncio.sleep(0.8) await ctx.send(f'Emote set to {cosmetic.name}.') except FortniteAPIAsync.exceptions.NotFound: pass @commands.command(aliases=['random']) async def rdm(self, ctx: fortnitepy.ext.commands.Context, cosmetic_type: str = 'skin') -> None: if cosmetic_type == 'skin': all_outfits = await self.fortnite_api.cosmetics.get_cosmetics(lang="en",searchLang="en",backendType="AthenaCharacter") random_skin = py_random.choice(all_outfits).id await self.party.me.set_outfit(asset=random_skin,variants=self.party.me.create_variants(profile_banner='ProfileBanner')) await ctx.send(f'Skin randomly set to {random_skin}.') elif cosmetic_type == 'emote': all_emotes = await self.fortnite_api.cosmetics.get_cosmetics(lang="en",searchLang="en",backendType="AthenaDance") random_emote = py_random.choice(all_emotes).id await self.party.me.set_emote(asset=random_emote) await ctx.send(f'Emote randomly set to {random_emote}.') os.system('clear') @commands.command(aliases=['pickaxe'],) async def pickaxe(self, ctx: fortnitepy.ext.commands.Context, *, content: str) -> None: try: cosmetic = await self.fortnite_api.cosmetics.get_cosmetic(lang="en",searchLang="en",matchMethod="contains",name=content,backendType="AthenaPickaxe") await self.party.me.set_pickaxe(asset=cosmetic.id) await ctx.send(f'Pickaxe set to {cosmetic.name}.') except FortniteAPIAsync.exceptions.NotFound: pass @commands.command(aliases=['news']) @commands.cooldown(1, 7) async def new(self, ctx: fortnitepy.ext.commands.Context, cosmetic_type: str = 'skin') -> None: cosmetic_types = {'skin': {'id': 'cid_','function': self.party.me.set_outfit},'backpack': {'id': 'bid_','function': self.party.me.set_backpack},'emote': {'id': 'eid_','function': self.party.me.set_emote},} if cosmetic_type not in cosmetic_types: return await ctx.send('Invalid cosmetic type, valid types include: skin, backpack & emote.') new_cosmetics = await self.fortnite_api.cosmetics.get_new_cosmetics() for new_cosmetic in [new_id for new_id in new_cosmetics if new_id.id.lower().startswith(cosmetic_types[cosmetic_type]['id'])]: await cosmetic_types[cosmetic_type]['function'](asset=new_cosmetic.id) await ctx.send(f"{cosmetic_type}s set to {new_cosmetic.name}.") os.system('clear') await asyncio.sleep(3) await ctx.send(f'Finished equipping all new unencrypted {cosmetic_type}s.') @commands.command() async def purpleskull(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_outfit(asset='CID_030_Athena_Commando_M_Halloween',variants=self.party.me.create_variants(clothing_color=1)) await ctx.send(f'Skin set to Purple Skull Trooper!') os.system('clear') @commands.command() async def pinkghoul(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_outfit(asset='CID_029_Athena_Commando_F_Halloween',variants=self.party.me.create_variants(material=3)) await ctx.send('Skin set to Pink Ghoul Trooper!') os.system('clear') @commands.command(aliases=['checkeredrenegade','raider']) async def renegade(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_outfit(asset='CID_028_Athena_Commando_F',variants=self.party.me.create_variants(material=2)) await ctx.send('Skin set to Checkered Renegade!') os.system('clear') @commands.command() async def aerial(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_outfit(asset='CID_017_Athena_Commando_M') await ctx.send('Skin set to aerial!') os.system('clear') @commands.command() async def hologram(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_outfit(asset='CID_VIP_Athena_Commando_M_GalileoGondola_SG') await ctx.send('Skin set to Star Wars Hologram!') @commands.command() async def cid(self, ctx: fortnitepy.ext.commands.Context, character_id: str) -> None: await self.party.me.set_outfit(asset=character_id,variants=self.party.me.create_variants(profile_banner='ProfileBanner')) await ctx.send(f'Skin set to {character_id}.') os.system('clear') @is_admin() @commands.command() async def repl(self, ctx: fortnitepy.ext.commands.Context) -> None: await ctx.send(f'{self.url}') @commands.command() async def eid(self, ctx: fortnitepy.ext.commands.Context, emote_id: str) -> None: await self.party.me.clear_emote() await self.party.me.set_emote(asset=emote_id) await ctx.send(f'Emote set to {emote_id}.') os.system('clear') @commands.command() async def stop(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.clear_emote() await ctx.send('Stopped emoting.') os.system('clear') @commands.command() async def point(self, ctx: fortnitepy.ext.commands.Context, *, content: Optional[str] = None) -> None: await self.party.me.clear_emote() await self.party.me.set_emote(asset='EID_IceKing') await ctx.send(f'Pickaxe set & Point it Out played.') os.system('clear') copied_player = "" @commands.command() async def stop(self, ctx: fortnitepy.ext.commands.Context): global copied_player if copied_player != "": copied_player = "" await ctx.send(f'Stopped copying all users.') await self.party.me.clear_emote() return else: try: await self.party.me.clear_emote() except RuntimeWarning: pass @commands.command(aliases=['clone', 'copi', 'cp']) async def copy(self, ctx: fortnitepy.ext.commands.Context, *, epic_username = None) -> None: global copied_player if epic_username is None: user = await self.fetch_user(ctx.author.display_name) member = self.party.get_member(user.id) elif 'stop' in epic_username: copied_player = "" await ctx.send(f'Stopped copying all users.') await self.party.me.clear_emote() return elif epic_username is not None: try: user = await self.fetch_user(epic_username) member = self.party.get_member(user.id) except AttributeError: await ctx.send("Could not get that user.") return try: copied_player = member await self.party.me.edit_and_keep(partial(fortnitepy.ClientPartyMember.set_outfit,asset=member.outfit,variants=member.outfit_variants),partial(fortnitepy.ClientPartyMember.set_pickaxe,asset=member.pickaxe,variants=member.pickaxe_variants)) await ctx.send(f"Now copying: {member.display_name}") os.system('clear') except AttributeError: await ctx.send("Could not get that user.") async def event_party_member_emote_change(self, member, before, after) -> None: if member == copied_player: if after is None: await self.party.me.clear_emote() else: await self.party.me.edit_and_keep(partial(fortnitepy.ClientPartyMember.set_emote,asset=after)) os.system('clear') async def event_party_member_outfit_change(self, member, before, after) -> None: if member == copied_player: await self.party.me.edit_and_keep(partial(fortnitepy.ClientPartyMember.set_outfit,asset=member.outfit,variants=member.outfit_variants,enlightenment=None,corruption=None)) os.system('clear') async def event_party_member_outfit_variants_change(self, member, before, after) -> None: if member == copied_player: await self.party.me.edit_and_keep(partial(fortnitepy.ClientPartyMember.set_outfit,variants=member.outfit_variants,enlightenment=None,corruption=None)) os.system('clear') #///////////////////////////////////////////////////////////////////////////////////////////////////////////// PARTY/FRIENDS/ADMIN ////////////////////////////////////////////////////////////////////////////////////////////////////// @commands.command() async def add(self, ctx: fortnitepy.ext.commands.Context, *, epic_username: str) -> None: user = await self.fetch_user(epic_username) friends = self.friends if user.id in friends: await ctx.send(f'I already have {user.display_name} as a friend') else: await self.add_friend(user.id) await ctx.send(f'Send! I sent a friend request to {user.display_name}.') @is_admin() @commands.command(aliases=['rst'],) async def restart(self, ctx: fortnitepy.ext.commands.Context) -> None: await ctx.send(f'Restart...') python = sys.executable os.execl(python, python, *sys.argv) @is_admin() @commands.command(aliases=['max'],) async def set(self, ctx: fortnitepy.ext.commands.Context, nombre: int) -> None: await self.party.set_max_size(nombre) await ctx.send(f'Set party size to {nombre} ! {nombre} player can join') os.system('clear') @commands.command() async def ready(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_ready(fortnitepy.ReadyState.READY) await ctx.send('Ready!') os.system('clear') @commands.command(aliases=['sitin'],) async def unready(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_ready(fortnitepy.ReadyState.NOT_READY) await ctx.send('Unready!') os.system('clear') @commands.command(aliases=['level','lvl'],) async def levelx(self, ctx: fortnitepy.ext.commands.Context, banner_level: int) -> None: await self.party.me.set_banner(season_level=banner_level) await ctx.send(f'Set level to {banner_level}.') os.system('clear') @is_admin() @commands.command() async def sitout(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.set_ready(fortnitepy.ReadyState.SITTING_OUT) await ctx.send('Sitting Out!') os.system('clear') @is_admin() @commands.command(aliases=['lv'],) async def leave(self, ctx: fortnitepy.ext.commands.Context) -> None: await self.party.me.leave() await ctx.send(f'I Leave ...') await self.party.set_privacy(fortnitepy.PartyPrivacy.PUBLIC) os.system('clear') @is_admin() @commands.command() async def version(self, ctx: fortnitepy.ext.commands.Context) -> None: await ctx.send(f'version : {__version__}') os.system('clear') @is_admin() @commands.command(aliases=['unhide'],) async def promote(self, ctx: fortnitepy.ext.commands.Context, *, epic_username: Optional[str] = None) -> None: if epic_username is None: user = await self.fetch_user(ctx.author.display_name) member = self.party.get_member(user.id) else: user = await self.fetch_user(epic_username) member = self.party.get_member(user.id) if member is None: await ctx.send("Failed to find that user, are you sure they're in the party?") else: try: await member.promote() os.system('clear') await ctx.send(f"Promoted user: {member.display_name}.") except fortnitepy.errors.Forbidden: await ctx.send(f"Failed to promote {member.display_name}, as I'm not party leader...") @is_admin() @commands.command() async def kick(self, ctx: fortnitepy.ext.commands.Context, *, epic_username: Optional[str] = None) -> None: if epic_username is None: user = await self.fetch_user(ctx.author.display_name) member = self.party.get_member(user.id) else: user = await self.fetch_user(epic_username) member = self.party.get_member(user.id) if member is None: await ctx.send("Failed to find that user, are you sure they're in the party?") else: try: if not member.display_name in info['FullAccess']: await member.kick() os.system('clear') await ctx.send(f"Kicked user: {member.display_name}.") except fortnitepy.errors.Forbidden: await ctx.send(f"Failed to kick {member.display_name}, as I'm not party leader...") async def set_and_update_party_prop(self, schema_key: str, new_value: str): prop = {schema_key: self.party.me.meta.set_prop(schema_key, new_value)} await self.party.patch(updated=prop) @commands.command(aliases=['ghost']) async def hide(self, ctx: fortnitepy.ext.commands.Context, *, user = None): if self.party.me.leader: if user != "all": try: if user is None: user = await self.fetch_profile(ctx.message.author.id) member = self.party.get_member(user.id) else: user = await self.fetch_profile(user) member = self.party.get_member(user.id) raw_squad_assignments = self.party.meta.get_prop('Default:RawSquadAssignments_j')["RawSquadAssignments"] for m in raw_squad_assignments: if m['memberId'] == member.id: raw_squad_assignments.remove(m) await self.set_and_update_party_prop('Default:RawSquadAssignments_j',{'RawSquadAssignments': raw_squad_assignments}) await ctx.send(f"Hid {member.display_name}") except AttributeError: await ctx.send("I could not find that user.") except fortnitepy.HTTPException: await ctx.send("I am not party leader!") else: try: await self.set_and_update_party_prop('Default:RawSquadAssignments_j',{'RawSquadAssignments': [{'memberId': self.user.id,'absoluteMemberIdx': 1}]}) await ctx.send("Hid everyone in the party.") except fortnitepy.HTTPException: await ctx.send("I am not party leader!") else: await ctx.send("I need party leader to do this!") async def invitefriends(self): send = [] for friend in self.friends: if friend.is_online(): send.append(friend.display_name) await friend.invite() @is_admin() @commands.command(aliases=['inv']) async def invite(self, ctx: fortnitepy.ext.commands.Context) -> None: try: self.loop.create_task(self.invitefriends()) except Exception: pass @commands.command(aliases=['friends'],) async def epicfriends(self, ctx: fortnitepy.ext.commands.Context) -> None: onlineFriends = [] offlineFriends = [] try: for friend in self.friends: if friend.is_online(): onlineFriends.append(friend.display_name) else: offlineFriends.append(friend.display_name) await ctx.send(f"Total Friends: {len(self.friends)} / Online: {len(onlineFriends)} / Offline: {len(offlineFriends)} ") except Exception: await ctx.send(f'Not work') @is_admin() @commands.command() async def whisper(self, ctx: fortnitepy.ext.commands.Context, *, message = None): try: if message is not None: for friend in self.friends: if friend.is_online(): await friend.send(message) await ctx.send(f'Send friend message to everyone') os.system('clear') except: pass @commands.command() async def say(self, ctx: fortnitepy.ext.commands.Context, *, message = None): if message is not None: await self.party.send(message) else: await ctx.send(f'Try: {prefix} say (message)') @is_admin() @commands.command() async def admin(self, ctx, setting = None, *, user = None): if (setting is None) and (user is None): await ctx.send(f"Missing one or more arguments. Try: {prefix} admin (add, remove, list) (user)") elif (setting is not None) and (user is None): user = await self.fetch_profile(ctx.message.author.id) if setting.lower() == 'add': if user.display_name in info['FullAccess'] or user.display_name in adminsss: await ctx.send("You are already an admin") else: await ctx.send("Password?") response = await self.wait_for('friend_message', timeout=20) content = response.content.lower() if content == password or info['Password']: info['FullAccess'].append(user.display_name) with open('info.json', 'w') as f: json.dump(info, f, indent=4) await ctx.send(f"Correct. Added {user.display_name} as an admin.") else: await ctx.send("Incorrect Password.") elif setting.lower() == 'remove': if user.display_name not in info['FullAccess'] or adminsss: await ctx.send("You are not an admin.") else: await ctx.send("Are you sure you want to remove yourself as an admin?") response = await self.wait_for('friend_message', timeout=20) content = response.content.lower() if (content.lower() == 'yes') or (content.lower() == 'y'): info['FullAccess'].remove(user.display_name) with open('info.json', 'w') as f: json.dump(info, f, indent=4) await ctx.send("You were removed as an admin.") elif (content.lower() == 'no') or (content.lower() == 'n'): await ctx.send("You were kept as admin.") else: await ctx.send("Not a correct reponse. Cancelling command.") elif setting == 'list': if user.display_name in info['FullAccess'] or adminsss: admins = [] for admin in info['FullAccess']: user = await self.fetch_profile(admin) admins.append(user.display_name) await ctx.send(f"The bot has {len(admins)} admins:") for admin in admins: await ctx.send(admin) else: await ctx.send("You don't have permission to this command.") else: await ctx.send(f"That is not a valid setting. Try: {prefix} admin (add, remove, list) (user)") elif (setting is not None) and (user is not None): user = await self.fetch_profile(user) if setting.lower() == 'add': if ctx.message.author.display_name in info['FullAccess'] or adminsss: if user.display_name not in info['FullAccess']: info['FullAccess'].append(user.display_name) with open('info.json', 'w') as f: json.dump(info, f, indent=4) await ctx.send(f"Correct. Added {user.display_name} as an admin.") else: await ctx.send("That user is already an admin.") else: await ctx.send("You don't have access to add other people as admins. Try just: !admin add") elif setting.lower() == 'remove': if ctx.message.author.display_name in info['FullAccess'] or adminsss: if user.display_name in info['FullAccess']: await ctx.send("Password?") response = await self.wait_for('friend_message', timeout=20) content = response.content.lower() if content == password or info['Password']: info['FullAccess'].remove(user.display_name) with open('info.json', 'w') as f: json.dump(info, f, indent=4) await ctx.send(f"{user.display_name} was removed as an admin.") else: await ctx.send("Incorrect Password.") else: await ctx.send("That person is not an admin.") else: await ctx.send("You don't have permission to remove players as an admin.") else: await ctx.send(f"Not a valid setting. Try: {prefix} -admin (add, remove) (user)") #Made by Mathyslol #if u are here gg #use code ytb #I spent time on this . pls dont skid ty !
PypiClean
/Fanery-0.2.5.tar.gz/Fanery-0.2.5/fanery/jfanery/json2.js
// Create a JSON object only if one does not already exist. We create the // methods in a closure to avoid creating global variables. if (typeof JSON !== 'object') { JSON = {}; } (function () { 'use strict'; function f(n) { // Format integers to have at least two digits. return n < 10 ? '0' + n : n; } if (typeof Date.prototype.toJSON !== 'function') { Date.prototype.toJSON = function () { return isFinite(this.valueOf()) ? this.getUTCFullYear() + '-' + f(this.getUTCMonth() + 1) + '-' + f(this.getUTCDate()) + 'T' + f(this.getUTCHours()) + ':' + f(this.getUTCMinutes()) + ':' + f(this.getUTCSeconds()) + 'Z' : null; }; String.prototype.toJSON = Number.prototype.toJSON = Boolean.prototype.toJSON = function () { return this.valueOf(); }; } var cx, escapable, gap, indent, meta, rep; function quote(string) { // If the string contains no control characters, no quote characters, and no // backslash characters, then we can safely slap some quotes around it. // Otherwise we must also replace the offending characters with safe escape // sequences. escapable.lastIndex = 0; return escapable.test(string) ? '"' + string.replace(escapable, function (a) { var c = meta[a]; return typeof c === 'string' ? c : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); }) + '"' : '"' + string + '"'; } function str(key, holder) { // Produce a string from holder[key]. var i, // The loop counter. k, // The member key. v, // The member value. length, mind = gap, partial, value = holder[key]; // If the value has a toJSON method, call it to obtain a replacement value. if (value && typeof value === 'object' && typeof value.toJSON === 'function') { value = value.toJSON(key); } // If we were called with a replacer function, then call the replacer to // obtain a replacement value. if (typeof rep === 'function') { value = rep.call(holder, key, value); } // What happens next depends on the value's type. switch (typeof value) { case 'string': return quote(value); case 'number': // JSON numbers must be finite. Encode non-finite numbers as null. return isFinite(value) ? String(value) : 'null'; case 'boolean': case 'null': // If the value is a boolean or null, convert it to a string. Note: // typeof null does not produce 'null'. The case is included here in // the remote chance that this gets fixed someday. return String(value); // If the type is 'object', we might be dealing with an object or an array or // null. case 'object': // Due to a specification blunder in ECMAScript, typeof null is 'object', // so watch out for that case. if (!value) { return 'null'; } // Make an array to hold the partial results of stringifying this object value. gap += indent; partial = []; // Is the value an array? if (Object.prototype.toString.apply(value) === '[object Array]') { // The value is an array. Stringify every element. Use null as a placeholder // for non-JSON values. length = value.length; for (i = 0; i < length; i += 1) { partial[i] = str(i, value) || 'null'; } // Join all of the elements together, separated with commas, and wrap them in // brackets. v = partial.length === 0 ? '[]' : gap ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' : '[' + partial.join(',') + ']'; gap = mind; return v; } // If the replacer is an array, use it to select the members to be stringified. if (rep && typeof rep === 'object') { length = rep.length; for (i = 0; i < length; i += 1) { if (typeof rep[i] === 'string') { k = rep[i]; v = str(k, value); if (v) { partial.push(quote(k) + (gap ? ': ' : ':') + v); } } } } else { // Otherwise, iterate through all of the keys in the object. for (k in value) { if (Object.prototype.hasOwnProperty.call(value, k)) { v = str(k, value); if (v) { partial.push(quote(k) + (gap ? ': ' : ':') + v); } } } } // Join all of the member texts together, separated with commas, // and wrap them in braces. v = partial.length === 0 ? '{}' : gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' : '{' + partial.join(',') + '}'; gap = mind; return v; } } // If the JSON object does not yet have a stringify method, give it one. if (typeof JSON.stringify !== 'function') { escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g; meta = { // table of character substitutions '\b': '\\b', '\t': '\\t', '\n': '\\n', '\f': '\\f', '\r': '\\r', '"' : '\\"', '\\': '\\\\' }; JSON.stringify = function (value, replacer, space) { // The stringify method takes a value and an optional replacer, and an optional // space parameter, and returns a JSON text. The replacer can be a function // that can replace values, or an array of strings that will select the keys. // A default replacer method can be provided. Use of the space parameter can // produce text that is more easily readable. var i; gap = ''; indent = ''; // If the space parameter is a number, make an indent string containing that // many spaces. if (typeof space === 'number') { for (i = 0; i < space; i += 1) { indent += ' '; } // If the space parameter is a string, it will be used as the indent string. } else if (typeof space === 'string') { indent = space; } // If there is a replacer, it must be a function or an array. // Otherwise, throw an error. rep = replacer; if (replacer && typeof replacer !== 'function' && (typeof replacer !== 'object' || typeof replacer.length !== 'number')) { throw new Error('JSON.stringify'); } // Make a fake root object containing our value under the key of ''. // Return the result of stringifying the value. return str('', {'': value}); }; } // If the JSON object does not yet have a parse method, give it one. if (typeof JSON.parse !== 'function') { cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g; JSON.parse = function (text, reviver) { // The parse method takes a text and an optional reviver function, and returns // a JavaScript value if the text is a valid JSON text. var j; function walk(holder, key) { // The walk method is used to recursively walk the resulting structure so // that modifications can be made. var k, v, value = holder[key]; if (value && typeof value === 'object') { for (k in value) { if (Object.prototype.hasOwnProperty.call(value, k)) { v = walk(value, k); if (v !== undefined) { value[k] = v; } else { delete value[k]; } } } } return reviver.call(holder, key, value); } // Parsing happens in four stages. In the first stage, we replace certain // Unicode characters with escape sequences. JavaScript handles many characters // incorrectly, either silently deleting them, or treating them as line endings. text = String(text); cx.lastIndex = 0; if (cx.test(text)) { text = text.replace(cx, function (a) { return '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); }); } // In the second stage, we run the text against regular expressions that look // for non-JSON patterns. We are especially concerned with '()' and 'new' // because they can cause invocation, and '=' because it can cause mutation. // But just to be safe, we want to reject all unexpected forms. // We split the second stage into 4 regexp operations in order to work around // crippling inefficiencies in IE's and Safari's regexp engines. First we // replace the JSON backslash pairs with '@' (a non-JSON character). Second, we // replace all simple value tokens with ']' characters. Third, we delete all // open brackets that follow a colon or comma or that begin the text. Finally, // we look to see that the remaining characters are only whitespace or ']' or // ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval. if (/^[\],:{}\s]*$/ .test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@') .replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']') .replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) { // In the third stage we use the eval function to compile the text into a // JavaScript structure. The '{' operator is subject to a syntactic ambiguity // in JavaScript: it can begin a block or an object literal. We wrap the text // in parens to eliminate the ambiguity. j = eval('(' + text + ')'); // In the optional fourth stage, we recursively walk the new structure, passing // each name/value pair to a reviver function for possible transformation. return typeof reviver === 'function' ? walk({'': j}, '') : j; } // If the text is not JSON parseable, then a SyntaxError is thrown. throw new SyntaxError('JSON.parse'); }; } }());
PypiClean
/Electrum-VTC-2.9.3.3.tar.gz/Electrum-VTC-2.9.3.3/scripts/util.py
import select, time, electrum_vtc as electrum, Queue from electrum_vtc import Connection, Interface, SimpleConfig from electrum_vtc.network import filter_protocol, parse_servers from collections import defaultdict # electrum.util.set_verbosity(1) def get_interfaces(servers, timeout=10): '''Returns a map of servers to connected interfaces. If any connections fail or timeout, they will be missing from the map. ''' socket_queue = Queue.Queue() config = SimpleConfig() connecting = {} for server in servers: if server not in connecting: connecting[server] = Connection(server, socket_queue, config.path) interfaces = {} timeout = time.time() + timeout count = 0 while time.time() < timeout and count < len(servers): try: server, socket = socket_queue.get(True, 0.3) except Queue.Empty: continue if socket: interfaces[server] = Interface(server, socket) count += 1 return interfaces def wait_on_interfaces(interfaces, timeout=10): '''Return a map of servers to a list of (request, response) tuples. Waits timeout seconds, or until each interface has a response''' result = defaultdict(list) timeout = time.time() + timeout while len(result) < len(interfaces) and time.time() < timeout: rin = [i for i in interfaces.values()] win = [i for i in interfaces.values() if i.unsent_requests] rout, wout, xout = select.select(rin, win, [], 1) for interface in wout: interface.send_requests() for interface in rout: responses = interface.get_responses() if responses: result[interface.server].extend(responses) return result def get_peers(): peers = [] # 1. get connected interfaces server = 'electrum-ltc.bysh.me:50002:s' interfaces = get_interfaces([server]) if not interfaces: print "No connection to", server return [] # 2. get list of peers interface = interfaces[server] interface.queue_request('server.peers.subscribe', [], 0) responses = wait_on_interfaces(interfaces).get(server) if responses: response = responses[0][1] # One response, (req, response) tuple peers = parse_servers(response.get('result')) peers = filter_protocol(peers,'s') return peers def send_request(peers, method, params): print "Contacting %d servers"%len(peers) interfaces = get_interfaces(peers) print "%d servers could be reached" % len(interfaces) for peer in peers: if not peer in interfaces: print "Connection failed:", peer for msg_id, i in enumerate(interfaces.values()): i.queue_request(method, params, msg_id) responses = wait_on_interfaces(interfaces) for peer in interfaces: if not peer in responses: print peer, "did not answer" results = dict(zip(responses.keys(), [t[0][1].get('result') for t in responses.values()])) print "%d answers"%len(results) return results
PypiClean
/Flask-Velox-2014.04.25.tar.gz/Flask-Velox-2014.04.25/flask_velox/admin/views/sqla/forms.py
from flask_velox.admin.mixins.forms import AdminFormMixin, AdminMultiFormMixin from flask_velox.mixins.sqla.forms import ( CreateModelFormMixin, UpdateModelFormMixin, UpdateModelMultiFormMixin) class AdminCreateModelView(CreateModelFormMixin, AdminFormMixin): """ Implements ``CreateModelFormMixin`` for ``Flask-Admin``. See Also -------- * :py:class:`flask_velox.mixins.sqla.forms.CreateModelFormMixin` Attributes ---------- template : str Relative template path, defaults to ``admin/forms/create.html`` """ template = 'velox/admin/create.html' class AdminUpdateModelView(UpdateModelFormMixin, AdminFormMixin): """ Implements ``UpdateModelFormMixin`` for ``Flask-Admin``. See Also -------- * :py:class:`flask_velox.mixins.sqla.forms.UpdateModelFormMixin` Attributes ---------- template : str Relative template path, defaults to ``admin/forms/update.html`` """ template = 'velox/admin/update.html' def set_context(self): """ Set extra context variables specific to ``Flask-Admin`` update views. See Also -------- * :py:meth:`from flask_velox.mixins.context.ContextMixin.set_context` """ super(AdminUpdateModelView, self).set_context() self.merge_context({ 'object': self.get_object(), 'delete_url_rule': self.get_delete_url_rule(), 'delete_url': self.delete_url }) class AdminUpdateMultiFormView(UpdateModelMultiFormMixin, AdminMultiFormMixin): """ Implements ``UpdateModelFormMixin`` for ``Flask-Admin`` with multiple forms. See Also -------- * :py:class:`flask_velox.mixins.sqla.forms.UpdateModelFormMixin` Attributes ---------- template : str Relative template path, defaults to ``admin/forms/update.html`` """ template = 'velox/admin/update_multi_form.html' def set_context(self): """ Set extra context variables specific to ``Flask-Admin`` update views. See Also -------- * :py:meth:`from flask_velox.mixins.context.ContextMixin.set_context` """ super(AdminUpdateMultiFormView, self).set_context() self.merge_context({ 'object': self.get_object(), 'delete_url_rule': self.get_delete_url_rule(), 'delete_url': self.delete_url })
PypiClean
/FanFicFare-4.27.0.tar.gz/FanFicFare-4.27.0/fanficfare/adapters/adapter_wwwsunnydaleafterdarkcom.py
# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import from ..htmlcleanup import stripHTML # Software: eFiction from .base_efiction_adapter import BaseEfictionAdapter class WWWSunnydaleAfterDarkComAdapter(BaseEfictionAdapter): @classmethod def getProtocol(self): """ Some, but not all site now require https. """ return "https" @staticmethod def getSiteDomain(): return 'www.sunnydaleafterdark.com' @classmethod def getSiteAbbrev(self): return 'sad' @classmethod def getDateFormat(self): return r"%m/%d/%y" def extractChapterUrlsAndMetadata(self): ## Call super of extractChapterUrlsAndMetadata(). ## base_efiction leaves the soup in self.html. super(WWWSunnydaleAfterDarkComAdapter, self).extractChapterUrlsAndMetadata() ## attempt to fetch rating from title line: ## "Do You Think This Is Love? by Supernatural Beings [PG]" r = stripHTML(self.html.find("div", {"id": "pagetitle"})) if '[' in r and ']' in r: self.story.setMetadata('rating', r[r.index('[')+1:r.index(']')]) def make_soup(self, data): soup = super(WWWSunnydaleAfterDarkComAdapter, self).make_soup(data) ## This site uses Javascript to "hide" warnings, for spoiler reasons ## <span class="label">Warnings: <span class="revealSpoiler" onclick="this.getElementsByClassName('spoiler')[0].classList.remove('spoiler');">(Click Here To Reveal) <span class="spoiler">Warning A, Warning B, Warning Y, Warning Z</span></span></span> ## We need to remove the revealSpoiler spans and replace them with the contents ## of the enclosed spoiler spans. infobox = soup.find("div", "infobox") if infobox is not None: for revealSpoiler in infobox.find_all("span", class_="revealSpoiler"): parent = revealSpoiler.parent spoiler = revealSpoiler.find("span", class_="spoiler") spoiler.extract() revealSpoiler.replace_with(spoiler) spoiler.unwrap() parent.smooth() return soup def handleMetadataPair(self, key, value): ## Inexplicably puts the entire Genres string inside the label span ## Likewise Warnings, which also have the spoiler javascript (removed in make_soup) if key.startswith("Genre") or key.startswith("Warning"): key, value = key.split(': ') super(WWWSunnydaleAfterDarkComAdapter, self).handleMetadataPair(key, value) def getClass(): return WWWSunnydaleAfterDarkComAdapter
PypiClean
/DNBC4-test-2.2.1.tar.gz/DNBC4-test-2.2.1/README.md
[![Github Release](https://img.shields.io/github/v/release/lishuangshuang0616/DNBC4tools)](https://github.com/lishuangshuang0616/DNBC4tools/releases) [![PyPI](https://img.shields.io/pypi/v/dnbc4tools)](https://pypi.org/project/DNBC4tools) [![Docker Pulls](https://img.shields.io/docker/pulls/lishuangshuang3/dnbc4tools)](https://hub.docker.com/r/lishuangshuang3/dnbc4tools) [![docs](https://img.shields.io/static/v1?label=docs&message=dnbc4tools&color=blue)](https://dnbc4tools.readthedocs.io) [![downloads](https://static.pepy.tech/personalized-badge/dnbc4tools?period=total&units=international_system&left_color=grey&right_color=blue&left_text=downloads)](https://pepy.tech/project/dnbc4tools) # DNBelab_C_Series_HT_scRNA-analysis-software ## Introduction An open source and flexible pipeline to analyze high-throughput DNBelab C Series<sup>TM</sup> single-cell RNA datasets. **Hardware/Software requirements** - x86-64 compatible processors. - require at least 50GB of RAM and 4 CPU. - centos 7.x 64-bit operating system (Linux kernel 3.10.0, compatible with higher software and hardware configuration). **Workflow** <img src="./doc/pipeline.jpg" width="70%"> ## Start - [**installation** ](./doc/installation.md) - [**quick start** ](./doc/quickstart.md) - [**Documentation** ](./doc/detail.md) ## Support - Please use github issue tracker for questions. [**issues**](https://github.com/MGI-tech-bioinformatics/DNBelab_C_Series_HT_scRNA-analysis-software/issues) - Read the Docs. [**docs**](https://dnbc4tools.readthedocs.io)
PypiClean
/CADET-Process-0.7.3.tar.gz/CADET-Process-0.7.3/examples/recycling/clr_process.py
# %% [markdown] # (clr_process)= # # Closed Loop Recycling Process # In closed-loop recycling (CLR), the stock mixture is pumped over the column several times until the desired purity is achieved. # The general structure of a CLR is shown below. # # ```{figure} ./figures/clr_flow_sheet.svg # :name: clr_flow_sheet # # Flow sheet for closed-loop recycling process. # ``` # # To realize the recycling, the {attr}`~CADETProcess.processModel.FlowSheet.output_state` of the column needs to be modified, leading to the following event structure: # # ```{figure} ./figures/clr_events.svg # :name: clr_events # # Events for closed-loop recycling process. # ``` # # For this example, consider a two-component system with a Langmuir isotherm. # # ## Component System # %% from CADETProcess.processModel import ComponentSystem component_system = ComponentSystem(['A', 'B']) # %% [markdown] # ## Binding Model # %% from CADETProcess.processModel import Langmuir binding_model = Langmuir(component_system, name='langmuir') binding_model.adsorption_rate = [0.04, 0.05] binding_model.desorption_rate = [1, 1] binding_model.capacity = [100, 100] # %% [markdown] # ## Unit Operations # %% from CADETProcess.processModel import ( Inlet, Cstr, LumpedRateModelWithoutPores, Outlet ) feed = Inlet(component_system, name='feed') feed.c = [10, 10] eluent = Inlet(component_system, name='eluent') eluent.c = [0, 0] pump = Cstr(component_system, name='pump') pump.V = 1e-9 column = LumpedRateModelWithoutPores(component_system, name='column') column.binding_model = binding_model column.length = 0.6 column.diameter = 0.024 column.axial_dispersion = 4.7e-7 column.total_porosity = 0.7 outlet = Outlet(component_system, name='outlet') # %% [markdown] # ## Flow Sheet # %% from CADETProcess.processModel import FlowSheet flow_sheet = FlowSheet(component_system) flow_sheet.add_unit(feed, feed_inlet=True) flow_sheet.add_unit(eluent, eluent_inlet=True) flow_sheet.add_unit(pump) flow_sheet.add_unit(column) flow_sheet.add_unit(outlet, product_outlet=True) flow_sheet.add_connection(feed, column) flow_sheet.add_connection(eluent, column) flow_sheet.add_connection(column, outlet) flow_sheet.add_connection(column, pump) flow_sheet.add_connection(pump, column) # %% [markdown] # ## Process # %% from CADETProcess.processModel import Process process = Process(flow_sheet, 'clr') # %% [markdown] # ### Create Events and Durations # %% Q = 60/(60*1e6) process.add_event('feed_on', 'flow_sheet.feed.flow_rate', Q) process.add_event('feed_off', 'flow_sheet.feed.flow_rate', 0.0) process.add_event('eluent_off', 'flow_sheet.eluent.flow_rate', 0.0) process.add_event('eluent_on', 'flow_sheet.eluent.flow_rate', Q) process.add_event('recycle_on_state', 'flow_sheet.output_states.column', {'pump': 1}) process.add_event('recycle_on_pump', 'flow_sheet.pump.flow_rate', Q) process.add_event('recycle_off_state', 'flow_sheet.output_states.column', {'outlet': 1}) process.add_event('recycle_off_pump', 'flow_sheet.pump.flow_rate', 0) # %% [markdown] # ### Event dependencies # To reduce the number of event times that need to be specified, event dependencies are specified which enforce that always either feed or eluent are being pumped through the column. # %% process.add_event_dependency('eluent_off', ['feed_on']) process.add_event_dependency('recycle_on_state', ['feed_off']) process.add_event_dependency('recycle_on_pump', ['feed_off']) process.add_event_dependency('recycle_off_pump', ['recycle_off_state']) process.add_event_dependency('eluent_on', ['recycle_off_state']) # %% [markdown] # ### Event Times # Now, the cycle time is set to $10~min$ and the `feed_duration` to $1~min$. # %% process.cycle_time = 2000 process.feed_off.time = 40 process.recycle_off_state.time = 1280 # %% [markdown] # ## Simulate Process # # Here, the first plot shows the concentration profile at the column outlet. # It is important to note that since part of this profile is recycled, the concentration profile at the system outlet must be considered (second plot) to evaluate the process performance. # %% if __name__ == '__main__': from CADETProcess.simulator import Cadet process_simulator = Cadet() simulation_results = process_simulator.simulate(process) simulation_results.solution.column.outlet.plot() simulation_results.solution.outlet.inlet.plot() # %% [markdown] # ## Optimize Fractionation Times # %% if __name__ == '__main__': from CADETProcess.fractionation import FractionationOptimizer fractionation_optimizer = FractionationOptimizer() fractionator = fractionation_optimizer.optimize_fractionation( simulation_results, purity_required=[0.95, 0.95] ) print(fractionator.performance) _ = fractionator.plot_fraction_signal() # %% [markdown] # ## Peak Shaving # The disadvantage of the CLR process is an increased dispersion due to multiple passes through the pump and additional piping. # # To improve the overall process performance, the CLR process is often combined with peak shaving. # In this process, the initial and final regions of the chromatogram with sufficient purity are "shaved off" during each cycle. # Peak shaving can reduce the number of recycling cycles required, since a decreasing amount of components must be pumped across the column. # # ```{figure} ./figures/clr_peak_shaving_events.svg # :name: clr_peak_shaving_events # # Events for closed-loop recycling process with peak shaving. # ```
PypiClean
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/form/PasswordValidator.js
if(!dojo._hasResource["dojox.form.PasswordValidator"]){ dojo._hasResource["dojox.form.PasswordValidator"]=true; dojo.provide("dojox.form.PasswordValidator"); dojo.require("dijit.form._FormWidget"); dojo.require("dijit.form.ValidationTextBox"); dojo.requireLocalization("dojox.form","PasswordValidator",null,"ROOT,ar,ca,cs,da,de,el,es,fi,fr,he,hu,it,ja,ko,nb,nl,pl,pt,pt-pt,ru,sk,sl,sv,th,tr,zh,zh-tw"); dojo.declare("dojox.form._ChildTextBox",dijit.form.ValidationTextBox,{containerWidget:null,type:"password",reset:function(){ dijit.form.ValidationTextBox.prototype._setValueAttr.call(this,"",true); this._hasBeenBlurred=false; },postCreate:function(){ this.inherited(arguments); if(!this.name){ dojo.removeAttr(this.focusNode,"name"); } this.connect(this.focusNode,"onkeypress","_onChildKeyPress"); },_onChildKeyPress:function(e){ if(e&&e.keyCode==dojo.keys.ENTER){ this._setBlurValue(); } }}); dojo.declare("dojox.form._OldPWBox",dojox.form._ChildTextBox,{_isPWValid:false,_setValueAttr:function(_1,_2){ if(_1===""){ _1=dojox.form._OldPWBox.superclass.attr.call(this,"value"); } if(_2!==null){ this._isPWValid=this.containerWidget.pwCheck(_1); } this.inherited(arguments); this.containerWidget._childValueAttr(this.containerWidget._inputWidgets[1].attr("value")); },isValid:function(_3){ return this.inherited("isValid",arguments)&&this._isPWValid; },_update:function(e){ if(this._hasBeenBlurred){ this.validate(true); } this._onMouse(e); },_getValueAttr:function(){ if(this.containerWidget._started&&this.containerWidget.isValid()){ return this.inherited(arguments); } return ""; },_setBlurValue:function(){ var _4=dijit.form.ValidationTextBox.prototype._getValueAttr.call(this); this._setValueAttr(_4,(this.isValid?this.isValid():true)); }}); dojo.declare("dojox.form._NewPWBox",dojox.form._ChildTextBox,{required:true,onChange:function(){ this.containerWidget._inputWidgets[2].validate(false); this.inherited(arguments); }}); dojo.declare("dojox.form._VerifyPWBox",dojox.form._ChildTextBox,{isValid:function(_5){ return this.inherited("isValid",arguments)&&(this.attr("value")==this.containerWidget._inputWidgets[1].attr("value")); }}); dojo.declare("dojox.form.PasswordValidator",dijit.form._FormValueWidget,{required:true,_inputWidgets:null,oldName:"",templateString:dojo.cache("dojox.form","resources/PasswordValidator.html","<div dojoAttachPoint=\"containerNode\">\n\t<input type=\"hidden\" name=\"${name}\" value=\"\" dojoAttachPoint=\"focusNode\" />\n</div>\n"),_hasBeenBlurred:false,isValid:function(_6){ return dojo.every(this._inputWidgets,function(i){ if(i&&i._setStateClass){ i._setStateClass(); } return (!i||i.isValid()); }); },validate:function(_7){ return dojo.every(dojo.map(this._inputWidgets,function(i){ if(i&&i.validate){ i._hasBeenBlurred=(i._hasBeenBlurred||this._hasBeenBlurred); return i.validate(); } return true; },this),"return item;"); },reset:function(){ this._hasBeenBlurred=false; dojo.forEach(this._inputWidgets,function(i){ if(i&&i.reset){ i.reset(); } },this); },_createSubWidgets:function(){ var _8=this._inputWidgets,_9=dojo.i18n.getLocalization("dojox.form","PasswordValidator",this.lang); dojo.forEach(_8,function(i,_a){ if(i){ var p={containerWidget:this},c; if(_a===0){ p.name=this.oldName; p.invalidMessage=_9.badPasswordMessage; c=dojox.form._OldPWBox; }else{ if(_a===1){ p.required=this.required; c=dojox.form._NewPWBox; }else{ if(_a===2){ p.invalidMessage=_9.nomatchMessage; c=dojox.form._VerifyPWBox; } } } _8[_a]=new c(p,i); } },this); },pwCheck:function(_b){ return false; },postCreate:function(){ this.inherited(arguments); var _c=this._inputWidgets=[]; dojo.forEach(["old","new","verify"],function(i){ _c.push(dojo.query("input[pwType="+i+"]",this.containerNode)[0]); },this); if(!_c[1]||!_c[2]){ throw new Error("Need at least pwType=\"new\" and pwType=\"verify\""); } if(this.oldName&&!_c[0]){ throw new Error("Need to specify pwType=\"old\" if using oldName"); } this.containerNode=this.domNode; this._createSubWidgets(); this.connect(this._inputWidgets[1],"_setValueAttr","_childValueAttr"); this.connect(this._inputWidgets[2],"_setValueAttr","_childValueAttr"); },_childValueAttr:function(v){ this.attr("value",this.isValid()?v:""); },_setDisabledAttr:function(_d){ this.inherited(arguments); dojo.forEach(this._inputWidgets,function(i){ if(i&&i.attr){ i.attr("disabled",_d); } }); },_setRequiredAttribute:function(_e){ this.required=_e; dojo.attr(this.focusNode,"required",_e); dijit.setWaiState(this.focusNode,"required",_e); this._refreshState(); dojo.forEach(this._inputWidgets,function(i){ if(i&&i.attr){ i.attr("required",_e); } }); },_setValueAttr:function(v){ this.inherited(arguments); dojo.attr(this.focusNode,"value",v); },_getValueAttr:function(){ return this.inherited(arguments)||""; },focus:function(){ var f=false; dojo.forEach(this._inputWidgets,function(i){ if(i&&!i.isValid()&&!f){ i.focus(); f=true; } }); if(!f){ this._inputWidgets[1].focus(); } }}); }
PypiClean
/AircraftDR-0.1-py3-none-any.whl/ADR/Analysis/Performance/Power.py
import numpy as np from ADR.Core.data_manipulation import dict_to_dataframe from ADR.Core.data_manipulation import find_df_roots from ADR.Methods.FundamentalEquations import drag class Power: def __init__(self, plane, performance_parameters): self.plane = plane self.wing1 = plane.wing1 self.wing2 = plane.wing2 self.hs = plane.hs self.area_ref = plane.wing1.area self.rho = performance_parameters.get("rho_air") self.checks_and_update_mtow() def checks_and_update_mtow(self): self.plane.get_V_stall(self.rho) self.plane.get_V_CLmin(self.rho) self.velocity_range = np.arange( self.plane.V_stall, self.plane.V_CLmin, 0.1) self.power_available() self.power_required() self.power_excess() positive_power = self.power_excess_df["Power excess"] > 0 has_power_excess = positive_power.any() while has_power_excess == False and self.plane.mtow != 0: positive_power = self.power_excess_df["Power excess"] > 0 has_power_excess = positive_power.any() # TODO: This is a big reduce-step. We should get this down by getting the power analysis time down. self.plane.mtow -= 1 print("New MTOW: {}".format(self.plane.mtow)) if self.plane.mtow > 0: self.power_available() self.power_required() self.power_excess() else: self.plane.mtow = 0 print("Aircraft cannot sustain flight even with zero weight") self.get_V_min_max() def power_required(self): thrust_required_dict = {} power_required_dict = {} alpha_dict = {} for velocity in self.velocity_range: total_lift = 0 alpha = self.plane.stall_min while total_lift < self.plane.mtow * 9.81: alpha += 0.1 total_lift = self.wing1.lift(self.rho, velocity, alpha) - self.hs.lift( self.rho, velocity, alpha ) if self.plane.plane_type == "biplane": total_lift += self.wing2.lift(self.rho, velocity, alpha) if alpha >= self.plane.stall_max: alpha_nivel = None break else: alpha_nivel = alpha thrust_required = ( drag(self.rho, velocity, self.plane.S_tp, self.plane.CD_tp) + drag(self.rho, velocity, self.plane.S_fus, self.plane.CD_fus) + self.wing1.drag(self.rho, velocity, alpha_nivel) + self.wing2.drag(self.rho, velocity, alpha_nivel) + self.hs.drag(self.rho, velocity, alpha_nivel) ) alpha_dict[velocity] = alpha_nivel thrust_required_dict[velocity] = thrust_required for velocity in thrust_required_dict: power_required_dict[velocity] = thrust_required_dict[velocity] * velocity self.thrust_required_dict = thrust_required_dict self.power_required_dict = power_required_dict self.alpha_dict = alpha_dict self.alpha_df = dict_to_dataframe(alpha_dict, "Alpha", "Velocity") self.thrust_required_df = dict_to_dataframe( thrust_required_dict, "Thrust required", "Velocity" ) self.power_required_df = dict_to_dataframe( power_required_dict, "Power required", "Velocity" ) return self.alpha_df, self.thrust_required_df, self.power_required_df def power_available(self): thrust_available_dict = {} power_available_dict = {} for velocity in self.velocity_range: thrust_available = self.plane.motor.thrust(velocity) thrust_available_dict[velocity] = thrust_available for velocity in thrust_available_dict: power_available_dict[velocity] = thrust_available_dict[velocity] * velocity self.thrust_available_dict = thrust_available_dict self.power_available_dict = power_available_dict self.thrust_available_df = dict_to_dataframe( thrust_available_dict, "Thrust available", "Velocity" ) self.power_available_df = dict_to_dataframe( power_available_dict, "Power available", "Velocity" ) return self.thrust_available_df, self.power_available_df def power_excess(self): power_excess_dict = {} for velocity in self.power_available_dict: power_required = self.power_required_dict[velocity] power_available = self.power_available_dict[velocity] power_excess_dict[velocity] = power_available - power_required self.power_excess_dict = power_excess_dict self.power_excess_df = dict_to_dataframe( power_excess_dict, "Power excess", "Velocity" ) def get_V_min_max(self): roots = find_df_roots(self.power_excess_df, "Power excess") if len(roots) == 1: self.plane.V_min = self.plane.V_stall self.plane.V_max = roots[0] alpha_max = self.alpha_df.max()[0] elif len(roots) == 2: self.plane.V_min = roots[0] self.plane.V_max = roots[1] alpha_max = np.interp( self.plane.V_min, self.alpha_df.index.values, self.alpha_df["Alpha"] ) elif len(roots) == 0: self.plane.V_min = self.plane.V_stall self.plane.V_max = np.amax(self.velocity_range) alpha_max = self.alpha_df.max()[0] self.plane.alpha_min = self.alpha_dict[self.plane.V_max] print("Alpha_max: {}".format(alpha_max)) self.plane.alpha_max = alpha_max
PypiClean
/Draugr-1.0.9.tar.gz/Draugr-1.0.9/draugr/__init__.py
__project__ = "Draugr" __author__ = "Christian Heider Nielsen" __version__ = "1.0.9" __doc__ = r""" Created on 27/04/2019 @author: cnheider """ import datetime import os from logging import warning from pathlib import Path from typing import Any from importlib import resources from importlib.metadata import PackageNotFoundError from warg import package_is_editable from warg import dist_is_editable from apppath import AppPath # from .drawers import * # from .writers import * # from .opencv_utilities import * # from .torch_utilities import * # from .stopping import * # from .numpy_utilities import * # from .visualisation import * # from .metrics import * # from .python_utilities import * with open(Path(__file__).parent / "README.md", "r") as this_init_file: __doc__ += this_init_file.read() # with open(Path(__file__).parent.parent / "README.md", "r") as this_init_file: # __doc__ += this_init_file.read() __all__ = [ "PROJECT_APP_PATH", "PROJECT_NAME", "PROJECT_VERSION", "get_version", "PROJECT_ORGANISATION", "PROJECT_AUTHOR", "PROJECT_YEAR", "INCLUDE_PROJECT_READMES", "PACKAGE_DATA_PATH", ] PROJECT_ORGANISATION = "Pything" PROJECT_NAME = __project__.lower().strip().replace(" ", "_") PROJECT_VERSION = __version__ PROJECT_YEAR = 2018 PROJECT_AUTHOR = __author__.lower().strip().replace(" ", "_") PROJECT_APP_PATH = AppPath(app_name=PROJECT_NAME, app_author=PROJECT_AUTHOR) INCLUDE_PROJECT_READMES = False __url__ = f"https://github.com/{PROJECT_ORGANISATION.lower()}/{PROJECT_NAME}" PACKAGE_DATA_PATH = resources.files(PROJECT_NAME) / "data" try: DEVELOP = package_is_editable(PROJECT_NAME) except PackageNotFoundError as e: DEVELOP = True def get_version(append_time: Any = DEVELOP) -> str: """description""" version = __version__ if not version: version = os.getenv("VERSION", "0.0.0") if append_time: now = datetime.datetime.utcnow() date_version = now.strftime("%Y%m%d%H%M%S") # date_version = time.time() if version: # Most git tags are prefixed with 'v' (example: v1.2.3) this is # never desirable for artifact repositories, so we strip the # leading 'v' if it's present. version = ( version[1:] if isinstance(version, str) and version.startswith("v") else version ) else: # Default version is an ISO8601 compliant datetime. PyPI doesn't allow # the colon ':' character in its versions, and time is required to allow # for multiple publications to master in one day. This datetime string # uses the 'basic' ISO8601 format for both its date and time components # to avoid issues with the colon character (ISO requires that date and # time components of a date-time string must be uniformly basic or # extended, which is why the date component does not have dashes. # # Publications using datetime versions should only be made from master # to represent the HEAD moving forward. warning( f"Environment variable VERSION is not set, only using datetime: {date_version}" ) # warn(f'Environment variable VERSION is not set, only using timestamp: {version}') version = f"{version}.{date_version}" return version if __version__ is None: __version__ = get_version(append_time=True) __version_info__ = tuple(int(segment) for segment in __version__.split("."))
PypiClean
/Flask-MDBootstrap-3.0.5.tar.gz/Flask-MDBootstrap-3.0.5/flask_mdbootstrap/static/MDB/src/js/vendor/addons/rating.js
(function ($) { $.fn.mdbRate = function () { var $stars; // Custom whitelist to allow for using HTML tags in popover content var myDefaultWhiteList = $.fn.tooltip.Constructor.Default.whiteList myDefaultWhiteList.textarea = []; myDefaultWhiteList.button = []; var $container = $(this); var titles = ['Very bad', 'Poor', 'OK', 'Good', 'Excellent']; for (var i = 0; i < 5; i++) { $container.append(`<i class="py-2 px-1 rate-popover" data-index="${i}" data-html="true" data-toggle="popover" data-placement="top" title="${titles[i]}"></i>`); } $stars = $container.children(); if ($container.hasClass('rating-faces')) { $stars.addClass('far fa-meh-blank'); } else if ($container.hasClass('empty-stars')) { $stars.addClass('far fa-star'); } else { $stars.addClass('fas fa-star'); } $stars.on('mouseover', function () { var index = $(this).attr('data-index'); markStarsAsActive(index); }); function markStarsAsActive(index) { unmarkActive(); for (var i = 0; i <= index; i++) { if ($container.hasClass('rating-faces')) { $($stars.get(i)).removeClass('fa-meh-blank'); $($stars.get(i)).addClass('live'); switch (index) { case '0': $($stars.get(i)).addClass('fa-angry'); break; case '1': $($stars.get(i)).addClass('fa-frown'); break; case '2': $($stars.get(i)).addClass('fa-meh'); break; case '3': $($stars.get(i)).addClass('fa-smile'); break; case '4': $($stars.get(i)).addClass('fa-laugh'); break; } } else if ($container.hasClass('empty-stars')) { $($stars.get(i)).addClass('fas'); switch (index) { case '0': $($stars.get(i)).addClass('oneStar'); break; case '1': $($stars.get(i)).addClass('twoStars'); break; case '2': $($stars.get(i)).addClass('threeStars'); break; case '3': $($stars.get(i)).addClass('fourStars'); break; case '4': $($stars.get(i)).addClass('fiveStars'); break; } } else { $($stars.get(i)).addClass('amber-text'); } } } function unmarkActive() { $stars.parent().hasClass('rating-faces') ? $stars.addClass('fa-meh-blank') : $stars; $container.hasClass('empty-stars') ? $stars.removeClass('fas') : $container; $stars.removeClass('fa-angry fa-frown fa-meh fa-smile fa-laugh live oneStar twoStars threeStars fourStars fiveStars amber-text'); } $stars.on('click', function () { $stars.popover('hide'); }); // Submit, you can add some extra custom code here // ex. to send the information to the server $container.on('click', '#voteSubmitButton', function () { $stars.popover('hide'); }); // Cancel, just close the popover $container.on('click', '#closePopoverButton', function () { $stars.popover('hide'); }); if ($container.hasClass('feedback')) { $(function () { $stars.popover({ // Append popover to #rateMe to allow handling form inside the popover container: $container, // Custom content for popover content: `<div class="my-0 py-0"> <textarea type="text" style="font-size: 0.78rem" class="md-textarea form-control py-0" placeholder="Write us what can we improve" rows="3"></textarea> <button id="voteSubmitButton" type="submit" class="btn btn-sm btn-primary">Submit!</button> <button id="closePopoverButton" class="btn btn-flat btn-sm">Close</button> </div>` }); }) } $stars.tooltip(); } })(jQuery);
PypiClean
/CloudReg-1.0.1-py3-none-any.whl/cloudreg/scripts/quantify_fluorescence_by_region.py
from ARA_stuff.parse_ara import * import os import argparse from joblib import Parallel, delayed import numpy as np from cloudvolume import CloudVolume from collections import defaultdict, Counter from skimage import transform from tqdm import tqdm, trange import pandas as pd def get_region_stats(atlas_s3_path, data_s3_path, z_slice): # create vols atlas_vol = CloudVolume(atlas_s3_path, parallel=False, progress=False) data_vol = CloudVolume(data_s3_path, parallel=False, progress=False) data_size = data_vol.scales[0]["size"][::-1] # use vols fluorescence_sum = defaultdict(lambda: 0) region_volume = defaultdict(lambda: 0) atlas_slice = np.squeeze(atlas_vol[:, :, z_slice]).T atlas_slice_upsampled = transform.resize( atlas_slice, data_size[1:], order=0, preserve_range=True ) unique_vals = np.unique(atlas_slice_upsampled) data_slice = np.squeeze(data_vol[:, :, z_slice]).T for j in unique_vals: if j == 0: continue idx = atlas_slice_upsampled == j fluorescence_sum[j] += np.sum(data_slice[idx]) region_volume[j] += np.count_nonzero(idx) # print(f"{z_slice} z slice done") # with open('fluorescence_quantification_vglut3_539', 'wb') as fp: # pickle.dump([fluorescence_sum,region_volume], fp) return fluorescence_sum, region_volume def combine_results(results): total_fluorescence = Counter() total_volume = Counter() for i in results: total_fluorescence += Counter(i[0]) total_volume += Counter(i[1]) return total_fluorescence, total_volume def get_ara_dict(path_to_ontology): with open(path_to_ontology, "r") as fp: f = json.load(fp) tree = build_tree(f) id2name = defaultdict(str) get_child_nodes_from_ontology(tree, id2name) return id2name def save_results_to_csv(fluorescence_dict, columns, out_path): df = pd.DataFrame.from_dict(fluorescence_dict, orient="index", columns=columns) df.index.name = "ROI Name" df.sort_values(columns[-1], ascending=False, inplace=True) df.to_csv(out_path) def quantify_fluorescence_by_region(data_s3_path,atlas_s3_path,path_to_ontology,outfile='./quantification.csv',num_procs=-1): data_vol = CloudVolume(data_s3_path) id2name = get_ara_dict(path_to_ontology) experiment_name = "_".join(data_s3_path.split("/")[-2:]) results = Parallel(num_procs)( delayed(get_region_stats)(atlas_s3_path, data_s3_path, i) for i in trange(data_vol.scales[0]["size"][-1]) ) total_fluorescence, total_volume = combine_results(results) fluorescence_density = defaultdict(float) for i, j in total_fluorescence.items(): fluorescence_density[i] = float(j) / float(total_volume[i]) fluorescence_density_sorted = { k: v for k, v in sorted(fluorescence_density.items(), key=lambda item: item[1]) } # density by roi columns = ["atlas id", "fluorescence density"] # outfile = f"{outfile}" fluorescence_density_roi = defaultdict(float) for i, j in fluorescence_density_sorted.items(): fluorescence_density_roi[id2name[i]] = [i, j] save_results_to_csv(fluorescence_density_roi, columns, outfile) # total fluorescence by roi columns = ["atlas id", "total fluorescence"] total_fluorescence_roi = defaultdict(float) for i, j in total_fluorescence.items(): total_fluorescence_roi[id2name[i]] = [i, j] total_fluorescence_roi = { k: v for k, v in sorted(total_fluorescence_roi.items(), key=lambda item: item[1]) } save_results_to_csv(total_fluorescence_roi, columns, outfile + "total_fluorescence") if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "data_s3_path", help="full s3 path to data of interest as precomputed volume. must be of the form `s3://bucket-name/path/to/channel`", ) parser.add_argument( "atlas_s3_path", help="full s3 path to transfomed atlas. must have the same number of slices as native resolution data.", ) parser.add_argument("out_path", help="path to save output results") parser.add_argument( "--path_to_ontology", help="path to save output results", type=str, default=os.path.expanduser("~/CloudReg/cloudreg/scripts/ARA_stuff/ara_ontology.json"), ) parser.add_argument( "--num_procs", help="number of processes to use", default=16, type=int ) parser.add_argument( "--outfile", help="path to CSV to store results. Example: /path/to/data/quantification.csv", default='./quantification.csv', type=str ) args = parser.parse_args() quantify_fluorescence_by_region( args.data_s3_path, args.atlas_s3_path, args.path_to_ontology, args.outfile, args.num_procs )
PypiClean
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/rimraf/CHANGELOG.md
# v3.0 - Add `--preserve-root` option to executable (default true) - Drop support for Node.js below version 6 # v2.7 - Make `glob` an optional dependency # 2.6 - Retry on EBUSY on non-windows platforms as well - Make `rimraf.sync` 10000% more reliable on Windows # 2.5 - Handle Windows EPERM when lstat-ing read-only dirs - Add glob option to pass options to glob # 2.4 - Add EPERM to delay/retry loop - Add `disableGlob` option # 2.3 - Make maxBusyTries and emfileWait configurable - Handle weird SunOS unlink-dir issue - Glob the CLI arg for better Windows support # 2.2 - Handle ENOENT properly on Windows - Allow overriding fs methods - Treat EPERM as indicative of non-empty dir - Remove optional graceful-fs dep - Consistently return null error instead of undefined on success - win32: Treat ENOTEMPTY the same as EBUSY - Add `rimraf` binary # 2.1 - Fix SunOS error code for a non-empty directory - Try rmdir before readdir - Treat EISDIR like EPERM - Remove chmod - Remove lstat polyfill, node 0.7 is not supported # 2.0 - Fix myGid call to check process.getgid - Simplify the EBUSY backoff logic. - Use fs.lstat in node >= 0.7.9 - Remove gently option - remove fiber implementation - Delete files that are marked read-only # 1.0 - Allow ENOENT in sync method - Throw when no callback is provided - Make opts.gently an absolute path - use 'stat' if 'lstat' is not available - Consistent error naming, and rethrow non-ENOENT stat errors - add fiber implementation
PypiClean
/KaTrain-1.14.0-py3-none-any.whl/katrain/core/contribute_engine.py
import json import os import random import shlex import shutil import signal import subprocess import threading import time import traceback from collections import defaultdict from katrain.core.constants import OUTPUT_DEBUG, OUTPUT_ERROR, OUTPUT_INFO, OUTPUT_KATAGO_STDERR, DATA_FOLDER from katrain.core.engine import BaseEngine from katrain.core.game import BaseGame from katrain.core.lang import i18n from katrain.core.sgf_parser import Move from katrain.core.utils import find_package_resource class KataGoContributeEngine(BaseEngine): """Starts and communicates with the KataGo contribute program""" DEFAULT_MAX_GAMES = 8 SHOW_RESULT_TIME = 5 GIVE_UP_AFTER = 120 def __init__(self, katrain): super().__init__(katrain, katrain.config("contribute")) self.katrain = katrain base_dir = os.path.expanduser("~/.katrain/katago_contribute") self.katago_process = None self.stdout_thread = None self.stderr_thread = None self.shell = False self.active_games = {} self.finished_games = set() self.showing_game = None self.last_advance = 0 self.move_count = 0 self.uploaded_games_count = 0 self.last_move_for_game = defaultdict(int) self.visits_count = 0 self.start_time = 0 self.server_error = None self.paused = False self.save_sgf = self.config.get("savesgf", False) self.save_path = self.config.get("savepath", "./dist_sgf/") self.move_speed = self.config.get("movespeed", 2.0) exe = self.get_engine_path(self.config.get("katago")) cacert_path = os.path.join(os.path.split(exe)[0], "cacert.pem") if not os.path.isfile(cacert_path): try: shutil.copyfile(find_package_resource("katrain/KataGo/cacert.pem"), cacert_path) except Exception as e: self.katrain.log( f"Could not copy cacert file ({e}), please add it manually to your katago.exe directory", OUTPUT_ERROR, ) cfg = find_package_resource(self.config.get("config")) settings_dict = { "username": self.config.get("username"), "password": self.config.get("password"), "maxSimultaneousGames": self.config.get("maxgames") or self.DEFAULT_MAX_GAMES, "includeOwnership": self.config.get("ownership") or False, "logGamesAsJson": True, "homeDataDir": os.path.expanduser(DATA_FOLDER), } self.max_buffer_games = 2 * settings_dict["maxSimultaneousGames"] settings = {f"{k}={v}" for k, v in settings_dict.items()} self.command = shlex.split( f'"{exe}" contribute -config "{cfg}" -base-dir "{base_dir}" -override-config "{",".join(settings)}"' ) self.start() @staticmethod def game_ended(game): cn = game.current_node if cn.is_pass and cn.analysis_exists: moves = cn.candidate_moves if moves and moves[0]["move"] == "pass": game.play(Move(None, player=game.current_node.next_player)) # play pass return game.end_result def advance_showing_game(self): current_game = self.active_games.get(self.showing_game) if current_game: end_result = self.game_ended(current_game) if end_result is not None: self.finished_games.add(self.showing_game) if time.time() - self.last_advance > self.SHOW_RESULT_TIME: del self.active_games[self.showing_game] if self.save_sgf: filename = os.path.join(self.save_path, f"{self.showing_game}.sgf") self.katrain.log(current_game.write_sgf(filename, self.katrain.config("trainer")), OUTPUT_INFO) self.katrain.log(f"Game {self.showing_game} finished, finding a new one", OUTPUT_INFO) self.showing_game = None elif time.time() - self.last_advance > self.move_speed or len(self.active_games) > self.max_buffer_games: if current_game.current_node.children: current_game.redo(1) self.last_advance = time.time() self.katrain("update-state") elif time.time() - self.last_advance > self.GIVE_UP_AFTER: self.katrain.log( f"Giving up on game {self.showing_game} which appears stuck, finding a new one", OUTPUT_INFO ) self.showing_game = None else: if self.active_games: self.showing_game = None best_count = -1 for game_id, game in self.active_games.items(): # find game with most moves left to show count = 0 node = game.current_node while node.children: node = node.children[0] count += 1 if count > best_count: best_count = count self.showing_game = game_id self.last_advance = time.time() self.katrain.log(f"Showing game {self.showing_game}, {best_count} moves left to show.", OUTPUT_INFO) self.katrain.game = self.active_games[self.showing_game] self.katrain("update-state", redraw_board=True) def status(self): return f"Contributing to distributed training\nGames: {self.uploaded_games_count} uploaded, {len(self.active_games)} in buffer, {len(self.finished_games)} shown\n{self.move_count} moves played ({60*self.move_count/(time.time()-self.start_time):.1f}/min, {self.visits_count / (time.time() - self.start_time):.1f} visits/s)\n" def is_idle(self): return False def queries_remaining(self): return 1 def start(self): try: self.katrain.log(f"Starting Distributed KataGo with {self.command}", OUTPUT_INFO) startupinfo = None if hasattr(subprocess, "STARTUPINFO"): startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW # stop command box popups on win/pyinstaller self.katago_process = subprocess.Popen( self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, startupinfo=startupinfo, shell=self.shell, ) except (FileNotFoundError, PermissionError, OSError) as e: self.katrain.log( i18n._("Starting Kata failed").format(command=self.command, error=e), OUTPUT_ERROR, ) return # don't start self.paused = False self.stdout_thread = threading.Thread(target=self._read_stdout_thread, daemon=True) self.stderr_thread = threading.Thread(target=self._read_stderr_thread, daemon=True) self.stdout_thread.start() self.stderr_thread.start() def check_alive(self, os_error="", maybe_open_help=False): ok = self.katago_process and self.katago_process.poll() is None if not ok: if self.katago_process: code = self.katago_process and self.katago_process.poll() if code == 3221225781: died_msg = i18n._("Engine missing DLL") else: os_error += f"status {code}" died_msg = i18n._("Engine died unexpectedly").format(error=os_error) if code != 1 and not self.server_error: # deliberate exit, already showed message? self.katrain.log(died_msg, OUTPUT_ERROR) self.katago_process = None return ok def shutdown(self, finish=False): process = self.katago_process if process: self.katago_process.stdin.write(b"forcequit\n") self.katago_process.stdin.flush() self.katago_process = None process.terminate() if finish is not None: for t in [self.stderr_thread, self.stdout_thread]: if t: t.join() def graceful_shutdown(self): """respond to esc""" if self.katago_process: self.katago_process.stdin.write(b"quit\n") self.katago_process.stdin.flush() self.katrain.log("Finishing games in progress and stopping contribution", OUTPUT_KATAGO_STDERR) def pause(self): """respond to pause""" if self.katago_process: if not self.paused: self.katago_process.stdin.write(b"pause\n") self.katago_process.stdin.flush() self.katrain.log("Pausing contribution", OUTPUT_KATAGO_STDERR) else: self.katago_process.stdin.write(b"resume\n") self.katago_process.stdin.flush() self.katrain.log("Resuming contribution", OUTPUT_KATAGO_STDERR) self.paused = not self.paused def _read_stderr_thread(self): while self.katago_process is not None: try: line = self.katago_process.stderr.readline() if line: try: message = line.decode(errors="ignore").strip() if any( s in message for s in ["not status code 200 OK", "Server returned error", "Uncaught exception:"] ): message = message.replace("what():", "").replace("Uncaught exception:", "").strip() self.server_error = message # don't be surprised by engine dying self.katrain.log(message, OUTPUT_ERROR) return else: self.katrain.log(message, OUTPUT_KATAGO_STDERR) except Exception as e: print("ERROR in processing KataGo stderr:", line, "Exception", e) elif self.katago_process and not self.check_alive(): return except Exception as e: self.katrain.log(f"Exception in reading stdout {e}", OUTPUT_DEBUG) return def _read_stdout_thread(self): while self.katago_process is not None: try: line = self.katago_process.stdout.readline() if line: line = line.decode(errors="ignore").strip() if line.startswith("{"): try: analysis = json.loads(line) if "gameId" in analysis: game_id = analysis["gameId"] if game_id in self.finished_games: continue current_game = self.active_games.get(game_id) new_game = current_game is None if new_game: board_size = [analysis["boardXSize"], analysis["boardYSize"]] placements = { f"A{bw}": [ Move.from_gtp(move, pl).sgf(board_size) for pl, move in analysis["initialStones"] if pl == bw ] for bw in "BW" } game_properties = {k: v for k, v in placements.items() if v} game_properties["SZ"] = f"{board_size[0]}:{board_size[1]}" game_properties["KM"] = analysis["rules"]["komi"] game_properties["RU"] = json.dumps(analysis["rules"]) game_properties["PB"] = analysis["blackPlayer"] game_properties["PW"] = analysis["whitePlayer"] current_game = BaseGame( self.katrain, game_properties=game_properties, bypass_config=True ) self.active_games[game_id] = current_game last_node = current_game.sync_branch( [Move.from_gtp(coord, pl) for pl, coord in analysis["moves"]] ) last_node.set_analysis(analysis) if new_game: current_game.set_current_node(last_node) self.start_time = self.start_time or time.time() - 1 self.move_count += 1 self.visits_count += analysis["rootInfo"]["visits"] last_move = self.last_move_for_game[game_id] self.last_move_for_game[game_id] = time.time() dt = self.last_move_for_game[game_id] - last_move if last_move else 0 self.katrain.log( f"[{time.time()-self.start_time:.1f}] Game {game_id} Move {analysis['turnNumber']}: {' '.join(analysis['move'])} Visits {analysis['rootInfo']['visits']} Time {dt:.1f}s\t Moves/min {60*self.move_count/(time.time()-self.start_time):.1f} Visits/s {self.visits_count/(time.time()-self.start_time):.1f}", OUTPUT_DEBUG, ) self.katrain("update-state") except Exception as e: traceback.print_exc() self.katrain.log(f"Exception {e} in parsing or processing JSON: {line}", OUTPUT_ERROR) elif "uploaded sgf" in line: self.uploaded_games_count += 1 else: self.katrain.log(line, OUTPUT_KATAGO_STDERR) elif self.katago_process and not self.check_alive(): # stderr will do this return except Exception as e: self.katrain.log(f"Exception in reading stdout {e}", OUTPUT_DEBUG) return
PypiClean
/Awespykit-2.1.0-py3-none-any.whl/awespykit/runpykit.py
__license__ = "GNU General Public License v3 (GPLv3)" import sys from functools import partial from os import path from fastpip import VERNUM from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * sys.path.append(path.dirname(__file__)) # rpk.exe 入口点所需 from __info__ import * from com import * from logic import * from res.res import * from settings import * from ui import * from utils.thmt import * if VERNUM[0] != REQ_FPVER[0]: raise Exception(f"当前环境的 fastpip 主版本号({VERNUM[0]})非本程序要求:{REQ_FPVER[0]}") if VERNUM[1] < REQ_FPVER[1]: raise Exception(f"当前环境的 fastpip 次版本号({VERNUM[1]})低于本程序要求:{REQ_FPVER[1]}") elif VERNUM[1] == REQ_FPVER[1] and VERNUM[2] < REQ_FPVER[2]: raise Exception(f"当前环境的 fastpip 修订号({VERNUM[2]})低于本程序要求:{REQ_FPVER[2]}") ################################################################ # 版本号的定义:主版本号.次版本号.修订号,对 fastpip 的版本号要求: # 1. 主版本号必须与要求一致,次版本号必须大于等于要求的次版本号 # 2. 如次版本号等于要求的次版本号,则修订号必须大于等于要求的修订号 ################################################################ _IS_MAIN_MODULE = False class MainEntrance(Ui_main_entrance, QMainWindow): def __init__(self): super().__init__() self.setupUi(self) self.setWindowFlags( Qt.WindowMinimizeButtonHint | Qt.WindowCloseButtonHint ) self.__config = MainEntranceConfig() self.__themes: Themes[ThemeData] = PreThemeList self.setWindowTitle(APP_NAME) self.__about_window = AboutWindow( self, PRE_VER if _IS_MAIN_MODULE else VERSION ) self.__pkgmgr_win = PackageManagerWindow(self) self.__pyitool_win = PyinstallerToolWindow(self) self.__indexmgr_win = IndexUrlManagerWindow(self) self.__pkgdl_win = PackageDownloadWindow(self) self.__cloudfunction_win = CloudFunctionWindow(self) self.__setup_other_widgets() self.__theme_action(self.__config.selected_thm) def display(self): self.resize(*self.__config.window_size) self.showNormal() def __store_window_size(self): if self.isMaximized() or self.isMinimized(): return self.__config.window_size = self.width(), self.height() def closeEvent(self, event: QCloseEvent): if ( self.__pkgmgr_win.thread_repo.is_empty() and self.__pyitool_win.thread_repo.is_empty() and self.__pkgdl_win.thread_repo.is_empty() and self.__cloudfunction_win.thread_repo.is_empty() ): event.accept() else: user_messagebox_role = MessageBox( "警告", "有后台任务正在运行,是否强制结束任务?", QMessageBox.Warning, (("accept", "强制退出"), ("reject", "取消")), self, ).exec_() if user_messagebox_role == 0: self.__pkgdl_win.thread_repo.kill_all() self.__pkgmgr_win.thread_repo.kill_all() self.__pyitool_win.thread_repo.kill_all() self.__cloudfunction_win.thread_repo.kill_all() event.accept() else: event.ignore() self.__store_window_size() self.__config.save_config() def keyPressEvent(self, event: QKeyEvent): if event.key() == Qt.Key_Escape: self.close() def _show_about(self): self.__about_window.display() def __setup_other_widgets(self): self.uiPushButton_pkg_mgr.setIcon(QIcon(":/manage.png")) self.uiPushButton_pkg_mgr.clicked.connect(self.__pkgmgr_win.display) self.uiPushButton_pyi_tool.setIcon(QIcon(":/bundle.png")) self.uiPushButton_pyi_tool.clicked.connect(self.__pyitool_win.display) self.uiPushButton_index_mgr.setIcon(QIcon(":/indexurl2.png")) self.uiPushButton_index_mgr.clicked.connect(self.__indexmgr_win.display) self.uiPushButton_pkg_dload.setIcon(QIcon(":/download.png")) self.uiPushButton_pkg_dload.clicked.connect(self.__pkgdl_win.display) self.uiPushButton_cloudfunction.setIcon(QIcon(":/cloudfunction.png")) self.uiPushButton_cloudfunction.clicked.connect( self.__cloudfunction_win.display ) self.uiPushButton_settings.setIcon(QIcon(":/settings.png")) # noinspection PyTypeChecker menu_setstyle = QMenu("主题", self) for theme in self.__themes: action = QAction(theme.name, self) action.triggered.connect(partial(self.__theme_action, theme.index)) menu_setstyle.addAction(action) menu_main_settings = QMenu(self) menu_main_settings.setObjectName("settings_menu") menu_main_settings.addMenu(menu_setstyle) menu_main_settings.addAction("关于", self._show_about) self.uiPushButton_settings.setMenu(menu_main_settings) def __theme_action(self, index: int): self.__config.selected_thm = self.__themes.apply_theme(index) def runpykit_and_sysexit(): translator = QTranslator() translator.load(":/trans/widgets_zh-CN.qm") _App.installTranslator(translator) _App.setWindowIcon(QIcon(":/icon2_64.png")) main = MainEntrance() main.display() sys.exit(_App.exec_()) if __name__ == "__main__": _IS_MAIN_MODULE = True runpykit_and_sysexit()
PypiClean
/AyiinXd-0.0.8-cp311-cp311-macosx_10_9_universal2.whl/fipper/types/bots_and_keyboards/inline_keyboard_button.py
from typing import Union import fipper from fipper import raw from fipper import types from ..object import Object class InlineKeyboardButton(Object): """One button of an inline keyboard. You must use exactly one of the optional fields. Parameters: text (``str``): Label text on the button. callback_data (``str`` | ``bytes``, *optional*): Data to be sent in a callback query to the bot when button is pressed, 1-64 bytes. url (``str``, *optional*): HTTP url to be opened when button is pressed. web_app (:obj:`~fipper.types.WebAppInfo`, *optional*): Description of the `Web App <https://core.telegram.org/bots/webapps>`_ that will be launched when the user presses the button. The Web App will be able to send an arbitrary message on behalf of the user using the method :meth:`~fipper.Client.answer_web_app_query`. Available only in private chats between a user and the bot. login_url (:obj:`~fipper.types.LoginUrl`, *optional*): An HTTP URL used to automatically authorize the user. Can be used as a replacement for the `Telegram Login Widget <https://core.telegram.org/widgets/login>`_. user_id (``int``, *optional*): User id, for links to the user profile. switch_inline_query (``str``, *optional*): If set, pressing the button will prompt the user to select one of their chats, open that chat and insert the bot's username and the specified inline query in the input field. Can be empty, in which case just the bot's username will be inserted.Note: This offers an easy way for users to start using your bot in inline mode when they are currently in a private chat with it. Especially useful when combined with switch_pm… actions – in this case the user will be automatically returned to the chat they switched from, skipping the chat selection screen. switch_inline_query_current_chat (``str``, *optional*): If set, pressing the button will insert the bot's username and the specified inline query in the current chat's input field. Can be empty, in which case only the bot's username will be inserted.This offers a quick way for the user to open your bot in inline mode in the same chat – good for selecting something from multiple options. callback_game (:obj:`~fipper.types.CallbackGame`, *optional*): Description of the game that will be launched when the user presses the button. **NOTE**: This type of button **must** always be the first button in the first row. """ def __init__( self, text: str, callback_data: Union[str, bytes] = None, url: str = None, web_app: "types.WebAppInfo" = None, login_url: "types.LoginUrl" = None, user_id: int = None, switch_inline_query: str = None, switch_inline_query_current_chat: str = None, callback_game: "types.CallbackGame" = None ): super().__init__() self.text = str(text) self.callback_data = callback_data self.url = url self.web_app = web_app self.login_url = login_url self.user_id = user_id self.switch_inline_query = switch_inline_query self.switch_inline_query_current_chat = switch_inline_query_current_chat self.callback_game = callback_game # self.pay = pay @staticmethod def read(b: "raw.base.KeyboardButton"): if isinstance(b, raw.types.KeyboardButtonCallback): # Try decode data to keep it as string, but if fails, fallback to bytes so we don't lose any information, # instead of decoding by ignoring/replacing errors. try: data = b.data.decode() except UnicodeDecodeError: data = b.data return InlineKeyboardButton( text=b.text, callback_data=data ) if isinstance(b, raw.types.KeyboardButtonUrl): return InlineKeyboardButton( text=b.text, url=b.url ) if isinstance(b, raw.types.KeyboardButtonUrlAuth): return InlineKeyboardButton( text=b.text, login_url=types.LoginUrl.read(b) ) if isinstance(b, raw.types.KeyboardButtonUserProfile): return InlineKeyboardButton( text=b.text, user_id=b.user_id ) if isinstance(b, raw.types.KeyboardButtonSwitchInline): if b.same_peer: return InlineKeyboardButton( text=b.text, switch_inline_query_current_chat=b.query ) else: return InlineKeyboardButton( text=b.text, switch_inline_query=b.query ) if isinstance(b, raw.types.KeyboardButtonGame): return InlineKeyboardButton( text=b.text, callback_game=types.CallbackGame() ) if isinstance(b, raw.types.KeyboardButtonWebView): return InlineKeyboardButton( text=b.text, web_app=types.WebAppInfo( url=b.url ) ) async def write(self, client: "fipper.Client"): if self.callback_data is not None: # Telegram only wants bytes, but we are allowed to pass strings too, for convenience. data = bytes(self.callback_data, "utf-8") if isinstance(self.callback_data, str) else self.callback_data return raw.types.KeyboardButtonCallback( text=self.text, data=data ) if self.url is not None: return raw.types.KeyboardButtonUrl( text=self.text, url=self.url ) if self.login_url is not None: return self.login_url.write( text=self.text, bot=await client.resolve_peer(self.login_url.bot_username or "self") ) if self.user_id is not None: return raw.types.InputKeyboardButtonUserProfile( text=self.text, user_id=await client.resolve_peer(self.user_id) ) if self.switch_inline_query is not None: return raw.types.KeyboardButtonSwitchInline( text=self.text, query=self.switch_inline_query ) if self.switch_inline_query_current_chat is not None: return raw.types.KeyboardButtonSwitchInline( text=self.text, query=self.switch_inline_query_current_chat, same_peer=True ) if self.callback_game is not None: return raw.types.KeyboardButtonGame( text=self.text ) if self.web_app is not None: return raw.types.KeyboardButtonWebView( text=self.text, url=self.web_app.url )
PypiClean
/Flask-JWT-Extended-4.5.2.tar.gz/Flask-JWT-Extended-4.5.2/flask_jwt_extended/default_callbacks.py
from http import HTTPStatus from typing import Any from flask import jsonify from flask.typing import ResponseReturnValue from flask_jwt_extended.config import config def default_additional_claims_callback(userdata: Any) -> dict: """ By default, we add no additional claims to the access tokens. :param userdata: data passed in as the ```identity``` argument to the ```create_access_token``` and ```create_refresh_token``` functions """ return {} def default_blocklist_callback(jwt_headers: dict, jwt_data: dict) -> bool: return False def default_jwt_headers_callback(default_headers) -> dict: """ By default header typically consists of two parts: the type of the token, which is JWT, and the signing algorithm being used, such as HMAC SHA256 or RSA. But we don't set the default header here we set it as empty which further by default set while encoding the token :return: default we set None here """ return {} def default_user_identity_callback(userdata: Any) -> Any: """ By default, we use the passed in object directly as the jwt identity. See this for additional info: :param userdata: data passed in as the ```identity``` argument to the ```create_access_token``` and ```create_refresh_token``` functions """ return userdata def default_expired_token_callback( _expired_jwt_header: dict, _expired_jwt_data: dict ) -> ResponseReturnValue: """ By default, if an expired token attempts to access a protected endpoint, we return a generic error message with a 401 status """ return jsonify({config.error_msg_key: "Token has expired"}), HTTPStatus.UNAUTHORIZED def default_invalid_token_callback(error_string: str) -> ResponseReturnValue: """ By default, if an invalid token attempts to access a protected endpoint, we return the error string for why it is not valid with a 422 status code :param error_string: String indicating why the token is invalid """ return ( jsonify({config.error_msg_key: error_string}), HTTPStatus.UNPROCESSABLE_ENTITY, ) def default_unauthorized_callback(error_string: str) -> ResponseReturnValue: """ By default, if a protected endpoint is accessed without a JWT, we return the error string indicating why this is unauthorized, with a 401 status code :param error_string: String indicating why this request is unauthorized """ return jsonify({config.error_msg_key: error_string}), HTTPStatus.UNAUTHORIZED def default_needs_fresh_token_callback( jwt_header: dict, jwt_data: dict ) -> ResponseReturnValue: """ By default, if a non-fresh jwt is used to access a ```fresh_jwt_required``` endpoint, we return a general error message with a 401 status code """ return ( jsonify({config.error_msg_key: "Fresh token required"}), HTTPStatus.UNAUTHORIZED, ) def default_revoked_token_callback( jwt_header: dict, jwt_data: dict ) -> ResponseReturnValue: """ By default, if a revoked token is used to access a protected endpoint, we return a general error message with a 401 status code """ return ( jsonify({config.error_msg_key: "Token has been revoked"}), HTTPStatus.UNAUTHORIZED, ) def default_user_lookup_error_callback( _jwt_header: dict, jwt_data: dict ) -> ResponseReturnValue: """ By default, if a user_lookup callback is defined and the callback function returns None, we return a general error message with a 401 status code """ identity = jwt_data[config.identity_claim_key] result = {config.error_msg_key: f"Error loading the user {identity}"} return jsonify(result), HTTPStatus.UNAUTHORIZED def default_token_verification_callback(_jwt_header: dict, _jwt_data: dict) -> bool: """ By default, we do not do any verification of the user claims. """ return True def default_token_verification_failed_callback( _jwt_header: dict, _jwt_data: dict ) -> ResponseReturnValue: """ By default, if the user claims verification failed, we return a generic error message with a 400 status code """ return ( jsonify({config.error_msg_key: "User claims verification failed"}), HTTPStatus.BAD_REQUEST, ) def default_decode_key_callback(jwt_header: dict, jwt_data: dict) -> str: """ By default, the decode key specified via the JWT_SECRET_KEY or JWT_PUBLIC_KEY settings will be used to decode all tokens """ return config.decode_key def default_encode_key_callback(identity: Any) -> str: """ By default, the encode key specified via the JWT_SECRET_KEY or JWT_PRIVATE_KEY settings will be used to encode all tokens """ return config.encode_key
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/gfx.js.uncompressed.js
This is an optimized version of Dojo, built for deployment and not for development. To get sources and documentation, please visit: http://dojotoolkit.org */ //>>built require({cache:{ 'dojox/gfx/_base':function(){ define("dojox/gfx/_base", ["dojo/_base/lang", "dojo/_base/html", "dojo/_base/Color", "dojo/_base/sniff", "dojo/_base/window", "dojo/_base/array","dojo/dom", "dojo/dom-construct","dojo/dom-geometry"], function(lang, html, Color, has, win, arr, dom, domConstruct, domGeom){ // module: // dojox/gfx // summary: // This module contains common core Graphics API used by different graphics renderers. var g = lang.getObject("dojox.gfx", true), b = g._base = {}; /*===== g = dojox.gfx; b = dojox.gfx._base; =====*/ // candidates for dojox.style (work on VML and SVG nodes) g._hasClass = function(/*DomNode*/node, /*String*/classStr){ // summary: // Returns whether or not the specified classes are a portion of the // class list currently applied to the node. // return (new RegExp('(^|\\s+)'+classStr+'(\\s+|$)')).test(node.className) // Boolean var cls = node.getAttribute("className"); return cls && (" " + cls + " ").indexOf(" " + classStr + " ") >= 0; // Boolean }; g._addClass = function(/*DomNode*/node, /*String*/classStr){ // summary: // Adds the specified classes to the end of the class list on the // passed node. var cls = node.getAttribute("className") || ""; if(!cls || (" " + cls + " ").indexOf(" " + classStr + " ") < 0){ node.setAttribute("className", cls + (cls ? " " : "") + classStr); } }; g._removeClass = function(/*DomNode*/node, /*String*/classStr){ // summary: Removes classes from node. var cls = node.getAttribute("className"); if(cls){ node.setAttribute( "className", cls.replace(new RegExp('(^|\\s+)' + classStr + '(\\s+|$)'), "$1$2") ); } }; // candidate for dojox.html.metrics (dynamic font resize handler is not implemented here) // derived from Morris John's emResized measurer b._getFontMeasurements = function(){ // summary: // Returns an object that has pixel equivilents of standard font // size values. var heights = { '1em': 0, '1ex': 0, '100%': 0, '12pt': 0, '16px': 0, 'xx-small': 0, 'x-small': 0, 'small': 0, 'medium': 0, 'large': 0, 'x-large': 0, 'xx-large': 0 }; var p; if(has("ie")){ // we do a font-size fix if and only if one isn't applied already. // NOTE: If someone set the fontSize on the HTML Element, this will kill it. win.doc.documentElement.style.fontSize="100%"; } // set up the measuring node. var div = domConstruct.create("div", {style: { position: "absolute", left: "0", top: "-100px", width: "30px", height: "1000em", borderWidth: "0", margin: "0", padding: "0", outline: "none", lineHeight: "1", overflow: "hidden" }}, win.body()); // do the measurements. for(p in heights){ div.style.fontSize = p; heights[p] = Math.round(div.offsetHeight * 12/16) * 16/12 / 1000; } win.body().removeChild(div); return heights; //object }; var fontMeasurements = null; b._getCachedFontMeasurements = function(recalculate){ if(recalculate || !fontMeasurements){ fontMeasurements = b._getFontMeasurements(); } return fontMeasurements; }; // candidate for dojox.html.metrics var measuringNode = null, empty = {}; b._getTextBox = function( /*String*/ text, /*Object*/ style, /*String?*/ className){ var m, s, al = arguments.length; var i; if(!measuringNode){ measuringNode = domConstruct.create("div", {style: { position: "absolute", top: "-10000px", left: "0" }}, win.body()); } m = measuringNode; // reset styles m.className = ""; s = m.style; s.borderWidth = "0"; s.margin = "0"; s.padding = "0"; s.outline = "0"; // set new style if(al > 1 && style){ for(i in style){ if(i in empty){ continue; } s[i] = style[i]; } } // set classes if(al > 2 && className){ m.className = className; } // take a measure m.innerHTML = text; if(m["getBoundingClientRect"]){ var bcr = m.getBoundingClientRect(); return {l: bcr.left, t: bcr.top, w: bcr.width || (bcr.right - bcr.left), h: bcr.height || (bcr.bottom - bcr.top)}; }else{ return domGeom.getMarginBox(m); } }; // candidate for dojo.dom var uniqueId = 0; b._getUniqueId = function(){ // summary: returns a unique string for use with any DOM element var id; do{ id = dojo._scopeName + "xUnique" + (++uniqueId); }while(dom.byId(id)); return id; }; lang.mixin(g, { // summary: // defines constants, prototypes, and utility functions for the core Graphics API // default shapes, which are used to fill in missing parameters defaultPath: { // summary: // Defines the default Path prototype object. type: "path", // type: String // Specifies this object is a Path, default value 'path'. path: "" // path: String // The path commands. See W32C SVG 1.0 specification. // Defaults to empty string value. }, defaultPolyline: { // summary: // Defines the default PolyLine prototype. type: "polyline", // type: String // Specifies this object is a PolyLine, default value 'polyline'. points: [] // points: Array // An array of point objects [{x:0,y:0},...] defining the default polyline's line segments. Value is an empty array []. }, defaultRect: { // summary: // Defines the default Rect prototype. type: "rect", // type: String // Specifies this default object is a type of Rect. Value is 'rect' x: 0, // x: Number // The X coordinate of the default rectangles position, value 0. y: 0, // y: Number // The Y coordinate of the default rectangle's position, value 0. width: 100, // width: Number // The width of the default rectangle, value 100. height: 100, // height: Number // The height of the default rectangle, value 100. r: 0 // r: Number // The corner radius for the default rectangle, value 0. }, defaultEllipse: { // summary: // Defines the default Ellipse prototype. type: "ellipse", // type: String // Specifies that this object is a type of Ellipse, value is 'ellipse' cx: 0, // cx: Number // The X coordinate of the center of the ellipse, default value 0. cy: 0, // cy: Number // The Y coordinate of the center of the ellipse, default value 0. rx: 200, // rx: Number // The radius of the ellipse in the X direction, default value 200. ry: 100 // ry: Number // The radius of the ellipse in the Y direction, default value 200. }, defaultCircle: { // summary: // An object defining the default Circle prototype. type: "circle", // type: String // Specifies this object is a circle, value 'circle' cx: 0, // cx: Number // The X coordinate of the center of the circle, default value 0. cy: 0, // cy: Number // The Y coordinate of the center of the circle, default value 0. r: 100 // r: Number // The radius, default value 100. }, defaultLine: { // summary: // An pbject defining the default Line prototype. type: "line", // type: String // Specifies this is a Line, value 'line' x1: 0, // x1: Number // The X coordinate of the start of the line, default value 0. y1: 0, // y1: Number // The Y coordinate of the start of the line, default value 0. x2: 100, // x2: Number // The X coordinate of the end of the line, default value 100. y2: 100 // y2: Number // The Y coordinate of the end of the line, default value 100. }, defaultImage: { // summary: // Defines the default Image prototype. type: "image", // type: String // Specifies this object is an image, value 'image'. x: 0, // x: Number // The X coordinate of the image's position, default value 0. y: 0, // y: Number // The Y coordinate of the image's position, default value 0. width: 0, // width: Number // The width of the image, default value 0. height: 0, // height:Number // The height of the image, default value 0. src: "" // src: String // The src url of the image, defaults to empty string. }, defaultText: { // summary: // Defines the default Text prototype. type: "text", // type: String // Specifies this is a Text shape, value 'text'. x: 0, // x: Number // The X coordinate of the text position, default value 0. y: 0, // y: Number // The Y coordinate of the text position, default value 0. text: "", // text: String // The text to be displayed, default value empty string. align: "start", // align: String // The horizontal text alignment, one of 'start', 'end', 'center'. Default value 'start'. decoration: "none", // decoration: String // The text decoration , one of 'none', ... . Default value 'none'. rotated: false, // rotated: Boolean // Whether the text is rotated, boolean default value false. kerning: true // kerning: Boolean // Whether kerning is used on the text, boolean default value true. }, defaultTextPath: { // summary: // Defines the default TextPath prototype. type: "textpath", // type: String // Specifies this is a TextPath, value 'textpath'. text: "", // text: String // The text to be displayed, default value empty string. align: "start", // align: String // The horizontal text alignment, one of 'start', 'end', 'center'. Default value 'start'. decoration: "none", // decoration: String // The text decoration , one of 'none', ... . Default value 'none'. rotated: false, // rotated: Boolean // Whether the text is rotated, boolean default value false. kerning: true // kerning: Boolean // Whether kerning is used on the text, boolean default value true. }, // default stylistic attributes defaultStroke: { // summary: // A stroke defines stylistic properties that are used when drawing a path. // This object defines the default Stroke prototype. type: "stroke", // type: String // Specifies this object is a type of Stroke, value 'stroke'. color: "black", // color: String // The color of the stroke, default value 'black'. style: "solid", // style: String // The style of the stroke, one of 'solid', ... . Default value 'solid'. width: 1, // width: Number // The width of a stroke, default value 1. cap: "butt", // cap: String // The endcap style of the path. One of 'butt', 'round', ... . Default value 'butt'. join: 4 // join: Number // The join style to use when combining path segments. Default value 4. }, defaultLinearGradient: { // summary: // An object defining the default stylistic properties used for Linear Gradient fills. // Linear gradients are drawn along a virtual line, which results in appearance of a rotated pattern in a given direction/orientation. type: "linear", // type: String // Specifies this object is a Linear Gradient, value 'linear' x1: 0, // x1: Number // The X coordinate of the start of the virtual line along which the gradient is drawn, default value 0. y1: 0, // y1: Number // The Y coordinate of the start of the virtual line along which the gradient is drawn, default value 0. x2: 100, // x2: Number // The X coordinate of the end of the virtual line along which the gradient is drawn, default value 100. y2: 100, // y2: Number // The Y coordinate of the end of the virtual line along which the gradient is drawn, default value 100. colors: [ { offset: 0, color: "black" }, { offset: 1, color: "white" } ] // colors: Array // An array of colors at given offsets (from the start of the line). The start of the line is // defined at offest 0 with the end of the line at offset 1. // Default value, [{ offset: 0, color: 'black'},{offset: 1, color: 'white'}], is a gradient from black to white. }, defaultRadialGradient: { // summary: // An object specifying the default properties for RadialGradients using in fills patterns. type: "radial", // type: String // Specifies this is a RadialGradient, value 'radial' cx: 0, // cx: Number // The X coordinate of the center of the radial gradient, default value 0. cy: 0, // cy: Number // The Y coordinate of the center of the radial gradient, default value 0. r: 100, // r: Number // The radius to the end of the radial gradient, default value 100. colors: [ { offset: 0, color: "black" }, { offset: 1, color: "white" } ] // colors: Array // An array of colors at given offsets (from the center of the radial gradient). // The center is defined at offest 0 with the outer edge of the gradient at offset 1. // Default value, [{ offset: 0, color: 'black'},{offset: 1, color: 'white'}], is a gradient from black to white. }, defaultPattern: { // summary: // An object specifying the default properties for a Pattern using in fill operations. type: "pattern", // type: String // Specifies this object is a Pattern, value 'pattern'. x: 0, // x: Number // The X coordinate of the position of the pattern, default value is 0. y: 0, // y: Number // The Y coordinate of the position of the pattern, default value is 0. width: 0, // width: Number // The width of the pattern image, default value is 0. height: 0, // height: Number // The height of the pattern image, default value is 0. src: "" // src: String // A url specifing the image to use for the pattern. }, defaultFont: { // summary: // An object specifying the default properties for a Font used in text operations. type: "font", // type: String // Specifies this object is a Font, value 'font'. style: "normal", // style: String // The font style, one of 'normal', 'bold', default value 'normal'. variant: "normal", // variant: String // The font variant, one of 'normal', ... , default value 'normal'. weight: "normal", // weight: String // The font weight, one of 'normal', ..., default value 'normal'. size: "10pt", // size: String // The font size (including units), default value '10pt'. family: "serif" // family: String // The font family, one of 'serif', 'sanserif', ..., default value 'serif'. }, getDefault: (function(){ // summary: // Returns a function used to access default memoized prototype objects (see them defined above). var typeCtorCache = {}; // a memoized delegate() return function(/*String*/ type){ var t = typeCtorCache[type]; if(t){ return new t(); } t = typeCtorCache[type] = new Function(); t.prototype = g[ "default" + type ]; return new t(); } })(), normalizeColor: function(/*dojo.Color|Array|string|Object*/ color){ // summary: // converts any legal color representation to normalized // dojo.Color object return (color instanceof Color) ? color : new Color(color); // dojo.Color }, normalizeParameters: function(existed, update){ // summary: // updates an existing object with properties from an 'update' // object // existed: Object // the target object to be updated // update: Object // the 'update' object, whose properties will be used to update // the existed object var x; if(update){ var empty = {}; for(x in existed){ if(x in update && !(x in empty)){ existed[x] = update[x]; } } } return existed; // Object }, makeParameters: function(defaults, update){ // summary: // copies the original object, and all copied properties from the // 'update' object // defaults: Object // the object to be cloned before updating // update: Object // the object, which properties are to be cloned during updating var i = null; if(!update){ // return dojo.clone(defaults); return lang.delegate(defaults); } var result = {}; for(i in defaults){ if(!(i in result)){ result[i] = lang.clone((i in update) ? update[i] : defaults[i]); } } return result; // Object }, formatNumber: function(x, addSpace){ // summary: converts a number to a string using a fixed notation // x: Number // number to be converted // addSpace: Boolean // whether to add a space before a positive number var val = x.toString(); if(val.indexOf("e") >= 0){ val = x.toFixed(4); }else{ var point = val.indexOf("."); if(point >= 0 && val.length - point > 5){ val = x.toFixed(4); } } if(x < 0){ return val; // String } return addSpace ? " " + val : val; // String }, // font operations makeFontString: function(font){ // summary: converts a font object to a CSS font string // font: Object: font object (see dojox.gfx.defaultFont) return font.style + " " + font.variant + " " + font.weight + " " + font.size + " " + font.family; // Object }, splitFontString: function(str){ // summary: // converts a CSS font string to a font object // description: // Converts a CSS font string to a gfx font object. The CSS font // string components should follow the W3C specified order // (see http://www.w3.org/TR/CSS2/fonts.html#font-shorthand): // style, variant, weight, size, optional line height (will be // ignored), and family. // str: String // a CSS font string var font = g.getDefault("Font"); var t = str.split(/\s+/); do{ if(t.length < 5){ break; } font.style = t[0]; font.variant = t[1]; font.weight = t[2]; var i = t[3].indexOf("/"); font.size = i < 0 ? t[3] : t[3].substring(0, i); var j = 4; if(i < 0){ if(t[4] == "/"){ j = 6; }else if(t[4].charAt(0) == "/"){ j = 5; } } if(j < t.length){ font.family = t.slice(j).join(" "); } }while(false); return font; // Object }, // length operations cm_in_pt: 72 / 2.54, // cm_in_pt: Number // points per centimeter (constant) mm_in_pt: 7.2 / 2.54, // mm_in_pt: Number // points per millimeter (constant) px_in_pt: function(){ // summary: returns the current number of pixels per point. return g._base._getCachedFontMeasurements()["12pt"] / 12; // Number }, pt2px: function(len){ // summary: converts points to pixels // len: Number // a value in points return len * g.px_in_pt(); // Number }, px2pt: function(len){ // summary: converts pixels to points // len: Number // a value in pixels return len / g.px_in_pt(); // Number }, normalizedLength: function(len) { // summary: converts any length value to pixels // len: String // a length, e.g., '12pc' if(len.length === 0){ return 0; } if(len.length > 2){ var px_in_pt = g.px_in_pt(); var val = parseFloat(len); switch(len.slice(-2)){ case "px": return val; case "pt": return val * px_in_pt; case "in": return val * 72 * px_in_pt; case "pc": return val * 12 * px_in_pt; case "mm": return val * g.mm_in_pt * px_in_pt; case "cm": return val * g.cm_in_pt * px_in_pt; } } return parseFloat(len); // Number }, pathVmlRegExp: /([A-Za-z]+)|(\d+(\.\d+)?)|(\.\d+)|(-\d+(\.\d+)?)|(-\.\d+)/g, // pathVmlRegExp: RegExp // a constant regular expression used to split a SVG/VML path into primitive components pathSvgRegExp: /([A-Za-z])|(\d+(\.\d+)?)|(\.\d+)|(-\d+(\.\d+)?)|(-\.\d+)/g, // pathVmlRegExp: RegExp // a constant regular expression used to split a SVG/VML path into primitive components equalSources: function(a /*Object*/, b /*Object*/){ // summary: compares event sources, returns true if they are equal // a: first event source // b: event source to compare against a return a && b && a === b; }, switchTo: function(renderer/*String|Object*/){ // summary: switch the graphics implementation to the specified renderer. // renderer: // Either the string name of a renderer (eg. 'canvas', 'svg, ...) or the renderer // object to switch to. var ns = typeof renderer == "string" ? g[renderer] : renderer; if(ns){ arr.forEach(["Group", "Rect", "Ellipse", "Circle", "Line", "Polyline", "Image", "Text", "Path", "TextPath", "Surface", "createSurface", "fixTarget"], function(name){ g[name] = ns[name]; }); } } }); return g; // defaults object api }); }, 'dojox/gfx/renderer':function(){ define(["./_base","dojo/_base/lang", "dojo/_base/sniff", "dojo/_base/window", "dojo/_base/config"], function(g, lang, has, win, config){ //>> noBuildResolver /*===== dojox.gfx.renderer = { // summary: // This module is an AMD loader plugin that loads the appropriate graphics renderer // implementation based on detected environment and current configuration settings. }; =====*/ var currentRenderer = null; return { load: function(id, require, load){ if(currentRenderer && id != "force"){ load(currentRenderer); return; } var renderer = config.forceGfxRenderer, renderers = !renderer && (lang.isString(config.gfxRenderer) ? config.gfxRenderer : "svg,vml,canvas,silverlight").split(","), silverlightObject, silverlightFlag; while(!renderer && renderers.length){ switch(renderers.shift()){ case "svg": // the next test is from https://github.com/phiggins42/has.js if("SVGAngle" in win.global){ renderer = "svg"; } break; case "vml": if(has("ie")){ renderer = "vml"; } break; case "silverlight": try{ if(has("ie")){ silverlightObject = new ActiveXObject("AgControl.AgControl"); if(silverlightObject && silverlightObject.IsVersionSupported("1.0")){ silverlightFlag = true; } }else{ if(navigator.plugins["Silverlight Plug-In"]){ silverlightFlag = true; } } }catch(e){ silverlightFlag = false; }finally{ silverlightObject = null; } if(silverlightFlag){ renderer = "silverlight"; } break; case "canvas": if(win.global.CanvasRenderingContext2D){ renderer = "canvas"; } break; } } if (renderer === 'canvas' && config.canvasEvents !== false) { renderer = "canvasWithEvents"; } if(config.isDebug){ console.log("gfx renderer = " + renderer); } function loadRenderer(){ require(["dojox/gfx/" + renderer], function(module){ g.renderer = renderer; // memorize the renderer module currentRenderer = module; // now load it load(module); }); } if(renderer == "svg" && typeof window.svgweb != "undefined"){ window.svgweb.addOnLoad(loadRenderer); }else{ loadRenderer(); } } }; }); }}}); require(["dojo/i18n"], function(i18n){ i18n._preloadLocalizations("dojox/nls/gfx", []); }); define("dojox/gfx", ["dojo/_base/lang", "./gfx/_base", "./gfx/renderer!"], function(lang, gfxBase, renderer){ // module: // dojox/gfx // summary: // This the root of the Dojo Graphics package gfxBase.switchTo(renderer); return gfxBase; });
PypiClean
/BinTools-0.2.0.zip/BinTools-0.2.0/bintools/dwarf/viewer.py
from bintools.dwarf import DWARF from bintools.dwarf.info import CU, DIE import wx class Info_Frame(wx.Frame): def __init__(self, dwarf): wx.Frame.__init__(self, None, title="DWARF Viewer", size=(520,1000)) self.dwarf = dwarf self.node_names = {} # Search self.search_text = wx.TextCtrl(self) self.Bind(wx.EVT_TEXT_ENTER, self.notify_search, self.search_text) self.search_button = wx.Button(self,wx.ID_ANY,label = 'Search') self.Bind(wx.EVT_BUTTON, self.notify_search, self.search_button) # Tree self.tree = wx.TreeCtrl(self, size=(520,200)) self.root = self.tree.AddRoot(".debug_info") for cu in dwarf.info.cus: cu_node = self.add_node(self.root, cu.short_description()) self.tree.SetItemData(cu_node, wx.TreeItemData(cu)) self.add_die(cu_node, cu.root) self.Bind(wx.EVT_TREE_SEL_CHANGED, self.OnSelChanged, self.tree) # Info Text Box self.die_text = wx.TextCtrl(self, size=(520,200), style=wx.TE_MULTILINE) # Layout self.hbox = wx.BoxSizer() self.hbox.Add(self.search_text , proportion=1, border=0) self.hbox.Add(self.search_button, proportion=0, border=0) self.vbox= wx.BoxSizer(wx.VERTICAL) self.vbox.Add(self.hbox , proportion=0, border=1, flag=wx.EXPAND|wx.ALL) self.vbox.Add(self.tree , proportion=1, border=1, flag=wx.EXPAND|wx.ALL) self.vbox.Add(self.die_text, proportion=0, border=1, flag=wx.EXPAND|wx.ALL) self.SetSizer(self.vbox) self.tree.ExpandAll() self.tree.EnsureVisible(self.root) self.Show() def add_node(self, parent, description): node = self.tree.AppendItem(parent, description) self.node_names[description] = node return node def add_die(self, parent, die): new_node = self.add_node(parent, die.short_description()) self.tree.SetItemData(new_node, wx.TreeItemData(die)) for c in die.children: self.add_die(new_node, c) def OnSelChanged(self, evt): item = self.tree.GetItemData(evt.GetItem()).GetData() description = '' if isinstance(item, CU): description = item.short_description() elif isinstance(item, DIE): description = str(item) self.die_text.SetValue(description) def notify_search(self, evt): text = self.search_text.GetValue() self.matching_nodes = [] for name, node in list(self.node_names.items()): if text in name: self.matching_nodes.append(node) # For the moment focus on the first one: if self.matching_nodes: self.tree.SelectItem(self.matching_nodes[0]) self.tree.EnsureVisible(self.matching_nodes[0]) else: print('Unable to find any match of the string: %s'%(text)) class Viewer: def __init__(self, dwarf): app = wx.PySimpleApp(None) frame = Info_Frame(dwarf) frame.Show() app.MainLoop()
PypiClean
/KiwiCoder-0.3.tar.gz/KiwiCoder-0.3/kiwi/util/graph.py
from collections import deque class DAG: def __init__(self): self.graph = dict() self.key2node = dict() def add_node(self, target_node) -> None: if target_node in self.graph: return self.graph[target_node] = set() self.key2node[target_node.key] = target_node def delete_node(self, target_node) -> None: if target_node not in self.graph: raise KeyError('node %s already exist' % target_node) self.graph.pop(target_node) for u in self.graph: for v in self.graph[u]: if v == target_node: self.graph[u].remove(v) def delete_node_by_key(self, target_node_key) -> None: target_node = self.key2node[target_node_key] if target_node in self.graph: raise KeyError('node %s already exist' % target_node) self.graph.pop(target_node) for u in self.graph: for v in self.graph[u]: if v == target_node: self.graph[u].remove(v) def add_edge(self, from_node, to_node) -> None: self.graph[from_node].add(to_node) def add_edge_by_key(self, from_node_key, to_node_key) -> None: from_node = self.key2node[from_node_key] to_node = self.key2node[to_node_key] self.graph[from_node].add(to_node) def delete_edge(self, from_node, to_node) -> None: if to_node not in self.graph.get(from_node, []): raise KeyError('edge not exist') self.graph[from_node].remove(to_node) def delete_edge_by_key(self, from_node_key, to_node_key) -> None: from_node = self.key2node[from_node_key] to_node = self.key2node[to_node_key] if to_node not in self.graph.get(from_node, []): raise KeyError('edge not exist') self.graph[from_node].remove(to_node) def is_edge_exist(self, from_node_key, to_node_key) -> bool: from_node = self.key2node[from_node_key] to_node = self.key2node[to_node_key] if to_node not in self.graph.get(from_node, []): return False return True def size(self): return len(self.graph) def predecessors(self, node): return [key for key in self.graph if node in self.graph[key]] def downstream(self, node): if node not in self.graph: raise KeyError('node %s not in graph' % node) return list(self.graph[node]) def available_nodes(self) -> []: nodes = [] in_degree = {} for u in self.graph: in_degree[u] = 0 for u in self.graph: if u.done(): continue for v in self.graph[u]: in_degree[v] += 1 for u in self.graph: if not u.done() and in_degree[u] == 0: nodes.append(u) return nodes def topological_sort(self): in_degree = {} for u in self.graph: in_degree[u] = 0 for u in self.graph: for v in self.graph[u]: in_degree[v] += 1 queue = deque() for u in in_degree: if in_degree[u] == 0: queue.appendleft(u) l = [] while queue: u = queue.pop() l.append(u) for v in self.graph[u]: in_degree[v] -= 1 if in_degree[v] == 0: queue.appendleft(v) if len(l) == len(self.graph): return l else: raise ValueError('not a acyclic graph')
PypiClean
/DI_engine-0.4.9-py3-none-any.whl/dizoo/gfootball/entry/gfootball_bc_rule_main.py
from copy import deepcopy import os import torch import logging import test_accuracy from ding.entry import serial_pipeline_bc, collect_demo_data from ding.config import read_config, compile_config from ding.policy import create_policy from dizoo.gfootball.entry.gfootball_bc_config import gfootball_bc_config, gfootball_bc_create_config from dizoo.gfootball.model.q_network.football_q_network import FootballNaiveQ from dizoo.gfootball.model.bots.rule_based_bot_model import FootballRuleBaseModel path = os.path.abspath(__file__) dir_path = os.path.dirname(path) logging.basicConfig(level=logging.INFO) # Note: in gfootball env, 3000 transitions = one episode # 3e5 transitions = 100 episode, the memory needs about 180G seed = 0 gfootball_bc_config.exp_name = 'gfootball_bc_rule_seed0_100eps_epc1000_bs512' demo_transitions = int(3e5) # key hyper-parameter data_path_transitions = dir_path + f'/gfootball_rule_{demo_transitions}-demo-transitions.pkl' """ phase 1: collect demo data utilizing rule model """ input_cfg = [deepcopy(gfootball_bc_config), deepcopy(gfootball_bc_create_config)] if isinstance(input_cfg, str): cfg, create_cfg = read_config(input_cfg) else: cfg, create_cfg = input_cfg cfg = compile_config(cfg, seed=seed, auto=True, create_cfg=create_cfg) football_rule_base_model = FootballRuleBaseModel() expert_policy = create_policy(cfg.policy, model=football_rule_base_model, enable_field=['learn', 'collect', 'eval']) # collect rule/expert demo data state_dict = expert_policy.collect_mode.state_dict() collect_config = [deepcopy(gfootball_bc_config), deepcopy(gfootball_bc_create_config)] # eval demo model # eval_config = deepcopy(collect_config) # # if save replay # eval(eval_config, seed=seed, model=football_rule_base_model, replay_path=dir_path + f'/gfootball_rule_replay/') # # if not save replay # eval(eval_config, seed=seed, model=football_rule_base_model, state_dict=state_dict) # collect demo data collect_demo_data( collect_config, seed=seed, expert_data_path=data_path_transitions, collect_count=demo_transitions, model=football_rule_base_model, state_dict=state_dict, ) """ phase 2: BC training """ bc_config = [deepcopy(gfootball_bc_config), deepcopy(gfootball_bc_create_config)] bc_config[0].policy.learn.train_epoch = 1000 # key hyper-parameter football_naive_q = FootballNaiveQ() _, converge_stop_flag = serial_pipeline_bc( bc_config, seed=seed, data_path=data_path_transitions, model=football_naive_q ) if bc_config[0].policy.show_train_test_accuracy: """ phase 3: test accuracy in train dataset and test dataset """ bc_model_path = bc_config[0].policy.bc_model_path # load trained bc model bc_config[0].policy.learn.batch_size = int(3000) state_dict = torch.load(bc_model_path) football_naive_q.load_state_dict(state_dict['model']) policy = create_policy(cfg.policy, model=football_naive_q, enable_field=['eval']) # calculate accuracy in train dataset print('==' * 10) print('calculate accuracy in train dataset') print('==' * 10) # Users should add their own bc train_data_path here. Absolute path is recommended. train_data_path = dir_path + f'/gfootball_rule_300000-demo-transitions_train.pkl' test_accuracy.test_accuracy_in_dataset(train_data_path, cfg.policy.learn.batch_size, policy) # calculate accuracy in test dataset print('==' * 10) print('calculate accuracy in test dataset') print('==' * 10) # Users should add their own bc test_data_path here. Absolute path is recommended. test_data_path = dir_path + f'/gfootball_rule_150000-demo-transitions_test.pkl' test_accuracy.test_accuracy_in_dataset(test_data_path, cfg.policy.learn.batch_size, policy)
PypiClean
/Nuitka-1.8.tar.gz/Nuitka-1.8/nuitka/build/inline_copy/lib/scons-3.1.2/SCons/SConsign.py
from __future__ import print_function __revision__ = "src/engine/SCons/SConsign.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan" import SCons.compat import os import pickle import SCons.dblite import SCons.Warnings from SCons.compat import PICKLE_PROTOCOL def corrupt_dblite_warning(filename): SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, "Ignoring corrupt .sconsign file: %s"%filename) SCons.dblite.ignore_corrupt_dbfiles = 1 SCons.dblite.corruption_warning = corrupt_dblite_warning # XXX Get rid of the global array so this becomes re-entrant. sig_files = [] # Info for the database SConsign implementation (now the default): # "DataBase" is a dictionary that maps top-level SConstruct directories # to open database handles. # "DB_Module" is the Python database module to create the handles. # "DB_Name" is the base name of the database file (minus any # extension the underlying DB module will add). DataBase = {} DB_Module = SCons.dblite DB_Name = ".sconsign" DB_sync_list = [] def Get_DataBase(dir): global DataBase, DB_Module, DB_Name top = dir.fs.Top if not os.path.isabs(DB_Name) and top.repositories: mode = "c" for d in [top] + top.repositories: if dir.is_under(d): try: return DataBase[d], mode except KeyError: path = d.entry_abspath(DB_Name) try: db = DataBase[d] = DB_Module.open(path, mode) except (IOError, OSError): pass else: if mode != "r": DB_sync_list.append(db) return db, mode mode = "r" try: return DataBase[top], "c" except KeyError: db = DataBase[top] = DB_Module.open(DB_Name, "c") DB_sync_list.append(db) return db, "c" except TypeError: print("DataBase =", DataBase) raise def Reset(): """Reset global state. Used by unit tests that end up using SConsign multiple times to get a clean slate for each test.""" global sig_files, DB_sync_list sig_files = [] DB_sync_list = [] normcase = os.path.normcase def write(): global sig_files for sig_file in sig_files: sig_file.write(sync=0) for db in DB_sync_list: try: syncmethod = db.sync except AttributeError: pass # Not all dbm modules have sync() methods. else: syncmethod() try: closemethod = db.close except AttributeError: pass # Not all dbm modules have close() methods. else: closemethod() class SConsignEntry(object): """ Wrapper class for the generic entry in a .sconsign file. The Node subclass populates it with attributes as it pleases. XXX As coded below, we do expect a '.binfo' attribute to be added, but we'll probably generalize this in the next refactorings. """ __slots__ = ("binfo", "ninfo", "__weakref__") current_version_id = 2 def __init__(self): # Create an object attribute from the class attribute so it ends up # in the pickled data in the .sconsign file. #_version_id = self.current_version_id pass def convert_to_sconsign(self): self.binfo.convert_to_sconsign() def convert_from_sconsign(self, dir, name): self.binfo.convert_from_sconsign(dir, name) def __getstate__(self): state = getattr(self, '__dict__', {}).copy() for obj in type(self).mro(): for name in getattr(obj,'__slots__',()): if hasattr(self, name): state[name] = getattr(self, name) state['_version_id'] = self.current_version_id try: del state['__weakref__'] except KeyError: pass return state def __setstate__(self, state): for key, value in state.items(): if key not in ('_version_id','__weakref__'): setattr(self, key, value) class Base(object): """ This is the controlling class for the signatures for the collection of entries associated with a specific directory. The actual directory association will be maintained by a subclass that is specific to the underlying storage method. This class provides a common set of methods for fetching and storing the individual bits of information that make up signature entry. """ def __init__(self): self.entries = {} self.dirty = False self.to_be_merged = {} def get_entry(self, filename): """ Fetch the specified entry attribute. """ return self.entries[filename] def set_entry(self, filename, obj): """ Set the entry. """ self.entries[filename] = obj self.dirty = True def do_not_set_entry(self, filename, obj): pass def store_info(self, filename, node): entry = node.get_stored_info() entry.binfo.merge(node.get_binfo()) self.to_be_merged[filename] = node self.dirty = True def do_not_store_info(self, filename, node): pass def merge(self): for key, node in self.to_be_merged.items(): entry = node.get_stored_info() try: ninfo = entry.ninfo except AttributeError: # This happens with SConf Nodes, because the configuration # subsystem takes direct control over how the build decision # is made and its information stored. pass else: ninfo.merge(node.get_ninfo()) self.entries[key] = entry self.to_be_merged = {} class DB(Base): """ A Base subclass that reads and writes signature information from a global .sconsign.db* file--the actual file suffix is determined by the database module. """ def __init__(self, dir): Base.__init__(self) self.dir = dir db, mode = Get_DataBase(dir) # Read using the path relative to the top of the Repository # (self.dir.tpath) from which we're fetching the signature # information. path = normcase(dir.get_tpath()) try: rawentries = db[path] except KeyError: pass else: try: self.entries = pickle.loads(rawentries) if not isinstance(self.entries, dict): self.entries = {} raise TypeError except KeyboardInterrupt: raise except Exception as e: SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, "Ignoring corrupt sconsign entry : %s (%s)\n"%(self.dir.get_tpath(), e)) for key, entry in self.entries.items(): entry.convert_from_sconsign(dir, key) if mode == "r": # This directory is actually under a repository, which means # likely they're reaching in directly for a dependency on # a file there. Don't actually set any entry info, so we # won't try to write to that .sconsign.dblite file. self.set_entry = self.do_not_set_entry self.store_info = self.do_not_store_info global sig_files sig_files.append(self) def write(self, sync=1): if not self.dirty: return self.merge() db, mode = Get_DataBase(self.dir) # Write using the path relative to the top of the SConstruct # directory (self.dir.path), not relative to the top of # the Repository; we only write to our own .sconsign file, # not to .sconsign files in Repositories. path = normcase(self.dir.get_internal_path()) for key, entry in self.entries.items(): entry.convert_to_sconsign() db[path] = pickle.dumps(self.entries, PICKLE_PROTOCOL) if sync: try: syncmethod = db.sync except AttributeError: # Not all anydbm modules have sync() methods. pass else: syncmethod() class Dir(Base): def __init__(self, fp=None, dir=None): """ fp - file pointer to read entries from """ Base.__init__(self) if not fp: return self.entries = pickle.load(fp) if not isinstance(self.entries, dict): self.entries = {} raise TypeError if dir: for key, entry in self.entries.items(): entry.convert_from_sconsign(dir, key) class DirFile(Dir): """ Encapsulates reading and writing a per-directory .sconsign file. """ def __init__(self, dir): """ dir - the directory for the file """ self.dir = dir self.sconsign = os.path.join(dir.get_internal_path(), '.sconsign') try: fp = open(self.sconsign, 'rb') except IOError: fp = None try: Dir.__init__(self, fp, dir) except KeyboardInterrupt: raise except Exception: SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning, "Ignoring corrupt .sconsign file: %s"%self.sconsign) try: fp.close() except AttributeError: pass global sig_files sig_files.append(self) def write(self, sync=1): """ Write the .sconsign file to disk. Try to write to a temporary file first, and rename it if we succeed. If we can't write to the temporary file, it's probably because the directory isn't writable (and if so, how did we build anything in this directory, anyway?), so try to write directly to the .sconsign file as a backup. If we can't rename, try to copy the temporary contents back to the .sconsign file. Either way, always try to remove the temporary file at the end. """ if not self.dirty: return self.merge() temp = os.path.join(self.dir.get_internal_path(), '.scons%d' % os.getpid()) try: file = open(temp, 'wb') fname = temp except IOError: try: file = open(self.sconsign, 'wb') fname = self.sconsign except IOError: return for key, entry in self.entries.items(): entry.convert_to_sconsign() pickle.dump(self.entries, file, PICKLE_PROTOCOL) file.close() if fname != self.sconsign: try: mode = os.stat(self.sconsign)[0] os.chmod(self.sconsign, 0o666) os.unlink(self.sconsign) except (IOError, OSError): # Try to carry on in the face of either OSError # (things like permission issues) or IOError (disk # or network issues). If there's a really dangerous # issue, it should get re-raised by the calls below. pass try: os.rename(fname, self.sconsign) except OSError: # An OSError failure to rename may indicate something # like the directory has no write permission, but # the .sconsign file itself might still be writable, # so try writing on top of it directly. An IOError # here, or in any of the following calls, would get # raised, indicating something like a potentially # serious disk or network issue. with open(self.sconsign, 'wb') as f, open(fname, 'rb') as f2: f.write(f2.read()) os.chmod(self.sconsign, mode) try: os.unlink(temp) except (IOError, OSError): pass ForDirectory = DB def File(name, dbm_module=None): """ Arrange for all signatures to be stored in a global .sconsign.db* file. """ global ForDirectory, DB_Name, DB_Module if name is None: ForDirectory = DirFile DB_Module = None else: ForDirectory = DB DB_Name = name if dbm_module is not None: DB_Module = dbm_module # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
PypiClean
/BenchExec-3.17.tar.gz/BenchExec-3.17/benchexec/runexecutor.py
import argparse import collections import datetime import errno import logging import multiprocessing import os import signal import subprocess import sys import threading import time import tempfile from typing import cast, Optional from benchexec import __version__ from benchexec import baseexecutor from benchexec import BenchExecException from benchexec import containerexecutor from benchexec.cgroups import BLKIO, CPUACCT, CPUSET, FREEZER, MEMORY, find_my_cgroups from benchexec.filehierarchylimit import FileHierarchyLimitThread from benchexec import intel_cpu_energy from benchexec import oomhandler from benchexec import resources from benchexec import systeminfo from benchexec import util sys.dont_write_bytecode = True # prevent creation of .pyc files _WALLTIME_LIMIT_DEFAULT_OVERHEAD = 30 # seconds more than cputime limit _BYTE_FACTOR = 1000 # byte in kilobyte _LOG_SHRINK_MARKER = "\n\n\nWARNING: YOUR LOGFILE WAS TOO LONG, SOME LINES IN THE MIDDLE WERE REMOVED.\n\n\n\n" def main(argv=None): """ A simple command-line interface for the runexecutor module of BenchExec. """ if argv is None: argv = sys.argv # parse options parser = argparse.ArgumentParser( fromfile_prefix_chars="@", description="""Execute a command with resource limits and measurements. Command-line parameters can additionally be read from a file if file name prefixed with '@' is given as argument. Part of BenchExec: https://github.com/sosy-lab/benchexec/""", ) resource_args = parser.add_argument_group("optional arguments for resource limits") resource_args.add_argument( "--memlimit", type=util.parse_memory_value, metavar="BYTES", help="memory limit in bytes", ) resource_args.add_argument( "--timelimit", type=util.parse_timespan_value, metavar="SECONDS", help="CPU time limit in seconds", ) resource_args.add_argument( "--softtimelimit", type=util.parse_timespan_value, metavar="SECONDS", help='"soft" CPU time limit in seconds (command will be send the TERM signal at this time)', ) resource_args.add_argument( "--walltimelimit", type=util.parse_timespan_value, metavar="SECONDS", help="wall time limit in seconds (default is CPU time limit plus a few seconds)", ) resource_args.add_argument( "--cores", type=util.parse_int_list, metavar="N,M-K", help="list of CPU cores to use", ) resource_args.add_argument( "--memoryNodes", type=util.parse_int_list, metavar="N,M-K", help="list of memory nodes to use", ) io_args = parser.add_argument_group("optional arguments for run I/O") io_args.add_argument( "--input", metavar="FILE", help="name of file used as stdin for command " "(default: /dev/null; use - for stdin passthrough)", ) io_args.add_argument( "--output", default="output.log", metavar="FILE", help="name of file where command output (stdout and stderr) is written", ) io_args.add_argument( "--maxOutputSize", type=util.parse_memory_value, metavar="BYTES", help="shrink output file to approximately this size if necessary " "(by removing lines from the middle of the output)", ) io_args.add_argument( "--filesCountLimit", type=int, metavar="COUNT", help="maximum number of files the tool may write to (checked periodically, counts only files written in container mode or to temporary directories, only supported with --no-tmpfs)", ) io_args.add_argument( "--filesSizeLimit", type=util.parse_memory_value, metavar="BYTES", help="maximum size of files the tool may write (checked periodically, counts only files written in container mode or to temporary directories, only supported with --no-tmpfs)", ) io_args.add_argument( "--skip-cleanup", action="store_false", dest="cleanup", help="do not delete files created by the tool in temp directory", ) container_args = parser.add_argument_group("optional arguments for run container") container_on_args = container_args.add_mutually_exclusive_group() container_on_args.add_argument( "--container", action="store_true", dest="_ignored_container", help="force isolation of run in container (default)", ) container_on_args.add_argument( "--no-container", action="store_false", dest="container", help="disable use of containers for isolation of runs", ) containerexecutor.add_basic_container_args(container_args) containerexecutor.add_container_output_args(container_args) environment_args = parser.add_argument_group( "optional arguments for run environment" ) environment_args.add_argument( "--require-cgroup-subsystem", action="append", default=[], metavar="SUBSYSTEM", help="additional cgroup system that should be enabled for runs " "(may be specified multiple times)", ) environment_args.add_argument( "--set-cgroup-value", action="append", dest="cgroup_values", default=[], metavar="SUBSYSTEM.OPTION=VALUE", help="additional cgroup values that should be set for runs (e.g., 'cpu.shares=1000')", ) environment_args.add_argument( "--dir", metavar="DIR", help="working directory for executing the command (default is current directory)", ) baseexecutor.add_basic_executor_options(parser) options = parser.parse_args(argv[1:]) baseexecutor.handle_basic_executor_options(options, parser) logging.debug("This is runexec %s.", __version__) if options.container: container_options = containerexecutor.handle_basic_container_args( options, parser ) container_output_options = containerexecutor.handle_container_output_args( options, parser ) if container_options["container_tmpfs"] and ( options.filesCountLimit or options.filesSizeLimit ): parser.error( "Files-count limit and files-size limit are not supported if tmpfs is used in container. Use --no-tmpfs to make these limits work or disable them (typically they are unnecessary if a tmpfs is used)." ) else: container_options = {} container_output_options = {} if options.input == "-": stdin = sys.stdin elif options.input is not None: if options.input == options.output: parser.error("Input and output files cannot be the same.") try: stdin = open(options.input, "rt") except OSError as e: parser.error(str(e)) else: stdin = None cgroup_subsystems = set(options.require_cgroup_subsystem) cgroup_values = {} for arg in options.cgroup_values: try: key, value = arg.split("=", 1) subsystem, option = key.split(".", 1) if not subsystem or not option: raise ValueError() except ValueError: parser.error( f'Cgroup value "{arg}" has invalid format, ' f'needs to be "subsystem.option=value".' ) cgroup_values[(subsystem, option)] = value cgroup_subsystems.add(subsystem) executor = RunExecutor( cleanup_temp_dir=options.cleanup, additional_cgroup_subsystems=list(cgroup_subsystems), use_namespaces=options.container, **container_options, ) # Ensure that process gets killed on interrupt/kill signal, # and avoid KeyboardInterrupt because it could occur anywhere. def signal_handler_kill(signum, frame): executor.stop() signal.signal(signal.SIGTERM, signal_handler_kill) signal.signal(signal.SIGQUIT, signal_handler_kill) signal.signal(signal.SIGINT, signal_handler_kill) formatted_args = " ".join(map(util.escape_string_shell, options.args)) logging.info("Starting command %s", formatted_args) if options.container and options.output_directory and options.result_files: logging.info( "Writing output to %s and result files to %s", util.escape_string_shell(options.output), util.escape_string_shell(options.output_directory), ) else: logging.info("Writing output to %s", util.escape_string_shell(options.output)) # actual run execution try: result = executor.execute_run( args=options.args, output_filename=options.output, stdin=stdin, hardtimelimit=options.timelimit, softtimelimit=options.softtimelimit, walltimelimit=options.walltimelimit, cores=options.cores, memlimit=options.memlimit, memory_nodes=options.memoryNodes, cgroupValues=cgroup_values, workingDir=options.dir, maxLogfileSize=options.maxOutputSize, files_count_limit=options.filesCountLimit, files_size_limit=options.filesSizeLimit, **container_output_options, ) finally: if stdin: stdin.close() # exit_code is a util.ProcessExitCode instance exit_code = cast(Optional[util.ProcessExitCode], result.pop("exitcode", None)) def print_optional_result(key, unit="", format_fn=str): if key in result: print(f"{key}={format_fn(result[key])}{unit}") # output results print_optional_result("starttime", unit="", format_fn=datetime.datetime.isoformat) print_optional_result("terminationreason") if exit_code is not None and exit_code.value is not None: print(f"returnvalue={exit_code.value}") if exit_code is not None and exit_code.signal is not None: print(f"exitsignal={exit_code.signal}") print_optional_result("walltime", "s") print_optional_result("cputime", "s") for key in sorted(result.keys()): if key.startswith("cputime-"): print(f"{key}={result[key]:.9f}s") print_optional_result("memory", "B") print_optional_result("blkio-read", "B") print_optional_result("blkio-write", "B") energy = intel_cpu_energy.format_energy_results(result.get("cpuenergy")) for energy_key, energy_value in energy.items(): print(f"{energy_key}={energy_value}J") class RunExecutor(containerexecutor.ContainerExecutor): # --- object initialization --- def __init__( self, cleanup_temp_dir=True, additional_cgroup_subsystems=[], *args, **kwargs ): """ Create an instance of of RunExecutor. @param cleanup_temp_dir Whether to remove the temporary directories created for the run. @param additional_cgroup_subsystems List of additional cgroup subsystems that should be required and used for runs. """ super(RunExecutor, self).__init__(*args, **kwargs) self._termination_reason = None self._should_cleanup_temp_dir = cleanup_temp_dir self._cgroup_subsystems = additional_cgroup_subsystems self._energy_measurement = ( intel_cpu_energy.EnergyMeasurement.create_if_supported() ) self._init_cgroups() def _init_cgroups(self): """ This function initializes the cgroups for the limitations and measurements. """ self.cgroups = find_my_cgroups() critical_cgroups = set() for subsystem in self._cgroup_subsystems: self.cgroups.require_subsystem(subsystem) if subsystem not in self.cgroups: critical_cgroups.add(subsystem) logging.error( 'Cgroup subsystem "%s" was required but is not available.', subsystem, ) # Feature is still experimental, do not warn loudly self.cgroups.require_subsystem(BLKIO, log_method=logging.debug) if BLKIO not in self.cgroups: logging.debug("Cannot measure I/O without blkio cgroup.") self.cgroups.require_subsystem(CPUACCT) if CPUACCT not in self.cgroups: logging.warning("Cannot measure CPU time without cpuacct cgroup.") self.cgroups.require_subsystem(FREEZER) if FREEZER not in self.cgroups and not self._use_namespaces: critical_cgroups.add(FREEZER) logging.error( "Cannot reliably kill sub-processes without freezer cgroup " "or container mode. Please enable at least one of them." ) self.cgroups.require_subsystem(MEMORY) if MEMORY not in self.cgroups: logging.warning("Cannot measure memory consumption without memory cgroup.") else: if systeminfo.has_swap() and ( not self.cgroups.has_value(MEMORY, "memsw.max_usage_in_bytes") ): logging.warning( "Kernel misses feature for accounting swap memory, but machine has swap. " "Memory usage may be measured inaccurately. " "Please set swapaccount=1 on your kernel command line or disable swap with " '"sudo swapoff -a".' ) self.cgroups.require_subsystem(CPUSET) self.cpus = None # to indicate that we cannot limit cores self.memory_nodes = None # to indicate that we cannot limit cores if CPUSET in self.cgroups: # Read available cpus/memory nodes: try: self.cpus = self.cgroups.read_allowed_cpus() except ValueError as e: logging.warning("Could not read available CPU cores from kernel: %s", e) logging.debug("List of available CPU cores is %s.", self.cpus) try: self.memory_nodes = util.parse_int_list( self.cgroups.get_value(CPUSET, "mems") ) except ValueError as e: logging.warning( "Could not read available memory nodes from kernel: %s", str(e) ) logging.debug("List of available memory nodes is %s.", self.memory_nodes) self.cgroups.handle_errors(critical_cgroups) # --- utility functions --- def _set_termination_reason(self, reason): if not self._termination_reason: self._termination_reason = reason # --- setup and cleanup for a single run --- def _setup_cgroups(self, my_cpus, memlimit, memory_nodes, cgroup_values): """ This method creates the CGroups for the following execution. @param my_cpus: None or a list of the CPU cores to use @param memlimit: None or memory limit in bytes @param memory_nodes: None or a list of memory nodes of a NUMA system to use @param cgroup_values: dict of additional values to set @return cgroups: a map of all the necessary cgroups for the following execution. Please add the process of the following execution to all those cgroups! """ logging.debug("Setting up cgroups for run.") # Setup cgroups, need a single call to create_cgroup() for all subsystems subsystems = [BLKIO, CPUACCT, FREEZER, MEMORY] + self._cgroup_subsystems if my_cpus is not None or memory_nodes is not None: subsystems.append(CPUSET) subsystems = [s for s in subsystems if s in self.cgroups] cgroups = self.cgroups.create_fresh_child_cgroup(*subsystems) logging.debug("Created cgroups %s.", cgroups) # First, set user-specified values such that they get overridden by our settings if necessary. for (subsystem, option), value in cgroup_values.items(): try: cgroups.set_value(subsystem, option, value) except OSError as e: cgroups.remove() sys.exit( f"{e.strerror} for setting cgroup option {subsystem}.{option} " f'to "{value}" (error code {e.errno}).' ) logging.debug( 'Cgroup value %s.%s was set to "%s", new value is now "%s".', subsystem, option, value, cgroups.get_value(subsystem, option), ) # Setup cpuset cgroup if necessary to limit the CPU cores/memory nodes to be used. if my_cpus is not None: my_cpus_str = ",".join(map(str, my_cpus)) cgroups.set_value(CPUSET, "cpus", my_cpus_str) my_cpus_str = cgroups.get_value(CPUSET, "cpus") logging.debug("Using cpu cores [%s].", my_cpus_str) if memory_nodes is not None: cgroups.set_value(CPUSET, "mems", ",".join(map(str, memory_nodes))) memory_nodesStr = cgroups.get_value(CPUSET, "mems") logging.debug("Using memory nodes [%s].", memory_nodesStr) # Setup memory limit if memlimit is not None: limit = "limit_in_bytes" cgroups.set_value(MEMORY, limit, memlimit) swap_limit = "memsw.limit_in_bytes" # We need swap limit because otherwise the kernel just starts swapping # out our process if the limit is reached. # Some kernels might not have this feature, # which is ok if there is actually no swap. if not cgroups.has_value(MEMORY, swap_limit): if systeminfo.has_swap(): sys.exit( 'Kernel misses feature for accounting swap memory, but machine has swap. Please set swapaccount=1 on your kernel command line or disable swap with "sudo swapoff -a".' ) else: try: cgroups.set_value(MEMORY, swap_limit, memlimit) except OSError as e: if e.errno == errno.ENOTSUP: # kernel responds with operation unsupported if this is disabled sys.exit( 'Memory limit specified, but kernel does not allow limiting swap memory. Please set swapaccount=1 on your kernel command line or disable swap with "sudo swapoff -a".' ) raise e memlimit = cgroups.get_value(MEMORY, limit) logging.debug("Effective memory limit is %s bytes.", memlimit) if MEMORY in cgroups: try: # Note that this disables swapping completely according to # https://www.kernel.org/doc/Documentation/cgroups/memory.txt # (unlike setting the global swappiness to 0). # Our process might get killed because of this. cgroups.set_value(MEMORY, "swappiness", "0") except OSError as e: logging.warning( "Could not disable swapping for benchmarked process: %s", e ) return cgroups def _cleanup_temp_dir(self, base_dir): """Delete given temporary directory and all its contents.""" if self._should_cleanup_temp_dir: logging.debug("Cleaning up temporary directory %s.", base_dir) util.rmtree(base_dir, onerror=util.log_rmtree_error) else: logging.info("Skipping cleanup of temporary directory %s.", base_dir) def _setup_environment(self, environments): """Return map with desired environment variables for run.""" # If keepEnv is set, start from a fresh environment, # otherwise with the current one. # keepEnv specifies variables to copy from the current environment, # newEnv specifies variables to set to a new value, # additionalEnv specifies variables where some value should be appended, and if environments.get("keepEnv", None) is not None: run_environment = {} else: run_environment = os.environ.copy() for key in environments.get("keepEnv", {}).keys(): if key in os.environ: run_environment[key] = os.environ[key] for key, value in environments.get("newEnv", {}).items(): run_environment[key] = value for key, value in environments.get("additionalEnv", {}).items(): run_environment[key] = os.environ.get(key, "") + value logging.debug("Using additional environment %s.", environments) return run_environment def _setup_output_file(self, output_filename, args, write_header=True): """Open and prepare output file.""" # write command line into outputFile # (without environment variables, they are documented by benchexec) try: parent_dir = os.path.dirname(output_filename) if parent_dir: os.makedirs(parent_dir, exist_ok=True) output_file = open(output_filename, "w") # override existing file except OSError as e: sys.exit("Could not write to output file: " + str(e)) if write_header: output_file.write( " ".join(map(util.escape_string_shell, args)) + "\n\n\n" + "-" * 80 + "\n\n\n" ) output_file.flush() return output_file def _setup_cgroup_time_limit( self, hardtimelimit, softtimelimit, walltimelimit, cgroups, cores, pid_to_kill ): """Start time-limit handler. @return None or the time-limit handler for calling cancel() """ if any([hardtimelimit, softtimelimit, walltimelimit]): # Start a timer to periodically check timelimit timelimitThread = _TimelimitThread( cgroups=cgroups, hardtimelimit=hardtimelimit, softtimelimit=softtimelimit, walltimelimit=walltimelimit, pid_to_kill=pid_to_kill, cores=cores, callbackFn=self._set_termination_reason, ) timelimitThread.start() return timelimitThread return None def _setup_cgroup_memory_limit(self, memlimit, cgroups, pid_to_kill): """Start memory-limit handler. @return None or the memory-limit handler for calling cancel() """ if memlimit is not None: try: oomThread = oomhandler.KillProcessOnOomThread( cgroups=cgroups, pid_to_kill=pid_to_kill, callbackFn=self._set_termination_reason, ) oomThread.start() return oomThread except OSError as e: logging.critical( "OSError %s during setup of OomEventListenerThread: %s.", e.errno, e.strerror, ) return None def _setup_file_hierarchy_limit( self, files_count_limit, files_size_limit, temp_dir, cgroups, pid_to_kill ): """Start thread that enforces any file-hiearchy limits.""" if files_count_limit is not None or files_size_limit is not None: file_hierarchy_limit_thread = FileHierarchyLimitThread( self._get_result_files_base(temp_dir), files_count_limit=files_count_limit, files_size_limit=files_size_limit, pid_to_kill=pid_to_kill, callbackFn=self._set_termination_reason, ) file_hierarchy_limit_thread.start() return file_hierarchy_limit_thread return None # --- run execution --- def execute_run( self, args, output_filename, stdin=None, hardtimelimit=None, softtimelimit=None, walltimelimit=None, cores=None, memlimit=None, memory_nodes=None, environments={}, workingDir=None, maxLogfileSize=None, cgroupValues={}, files_count_limit=None, files_size_limit=None, error_filename=None, write_header=True, **kwargs, ): """ This function executes a given command with resource limits, and writes the output to a file. Note that this method does not expect to be interrupted by KeyboardInterrupt and does not guarantee proper cleanup if KeyboardInterrupt is raised! If this method runs on the main thread of your program, make sure to set a signal handler for signal.SIGINT that calls stop() instead. @param args: the command line to run @param output_filename: the file where the output should be written to @param stdin: What to uses as stdin for the process (None: /dev/null, a file descriptor, or a file object) @param hardtimelimit: None or the CPU time in seconds after which the tool is forcefully killed. @param softtimelimit: None or the CPU time in seconds after which the tool is sent a kill signal. @param walltimelimit: None or the wall time in seconds after which the tool is forcefully killed (default: hardtimelimit + a few seconds) @param cores: None or a list of the CPU cores to use @param memlimit: None or memory limit in bytes @param memory_nodes: None or a list of memory nodes in a NUMA system to use @param environments: special environments for running the command @param workingDir: None or a directory which the execution should use as working directory @param maxLogfileSize: None or a number of bytes to which the output of the tool should be truncated approximately if there is too much output. @param cgroupValues: dict of additional cgroup values to set (key is tuple of subsystem and option, respective subsystem needs to be enabled in RunExecutor; cannot be used to override values set by BenchExec) @param files_count_limit: None or maximum number of files that may be written. @param files_size_limit: None or maximum size of files that may be written. @param error_filename: the file where the error output should be written to (default: same as output_filename) @param write_headers: Write informational headers to the output and the error file if separate (default: True) @param **kwargs: further arguments for ContainerExecutor.execute_run() @return: dict with result of run (measurement results and process exitcode) """ # Check argument values and call the actual method _execute() if stdin == subprocess.PIPE: sys.exit("Illegal value subprocess.PIPE for stdin") elif stdin is None: stdin = subprocess.DEVNULL critical_cgroups = set() if hardtimelimit is not None: if hardtimelimit <= 0: sys.exit(f"Invalid time limit {hardtimelimit}.") if CPUACCT not in self.cgroups: logging.error("Time limit cannot be specified without cpuacct cgroup.") critical_cgroups.add(CPUACCT) if softtimelimit is not None: if softtimelimit <= 0: sys.exit(f"Invalid soft time limit {softtimelimit}.") if hardtimelimit and (softtimelimit > hardtimelimit): sys.exit("Soft time limit cannot be larger than the hard time limit.") if CPUACCT not in self.cgroups: logging.error( "Soft time limit cannot be specified without cpuacct cgroup." ) critical_cgroups.add(CPUACCT) if walltimelimit is None: if hardtimelimit is not None: walltimelimit = hardtimelimit + _WALLTIME_LIMIT_DEFAULT_OVERHEAD elif softtimelimit is not None: walltimelimit = softtimelimit + _WALLTIME_LIMIT_DEFAULT_OVERHEAD else: if walltimelimit <= 0: sys.exit(f"Invalid wall time limit {walltimelimit}.") if cores is not None: if self.cpus is None: logging.error("Cannot limit CPU cores without cpuset cgroup.") critical_cgroups.add(CPUSET) elif not cores: sys.exit("Cannot execute run without any CPU core.") elif not set(cores).issubset(self.cpus): forbidden_cores = list(set(cores).difference(self.cpus)) sys.exit(f"Cores {forbidden_cores} are not allowed to be used") if memlimit is not None: if memlimit <= 0: sys.exit(f"Invalid memory limit {memlimit}.") if MEMORY not in self.cgroups: logging.error( "Memory limit specified, but cannot be implemented without cgroup support." ) critical_cgroups.add(MEMORY) if memory_nodes is not None: if self.memory_nodes is None: logging.error("Cannot restrict memory nodes without cpuset cgroup.") critical_cgroups.add(CPUSET) elif len(memory_nodes) == 0: sys.exit("Cannot execute run without any memory node.") elif not set(memory_nodes).issubset(self.memory_nodes): forbidden_nodes = list(set(memory_nodes).difference(self.memory_nodes)) sys.exit(f"Memory nodes {forbidden_nodes} are not allowed to be used") if workingDir: if not os.path.exists(workingDir): sys.exit(f"Working directory {workingDir} does not exist.") if not os.path.isdir(workingDir): sys.exit(f"Working directory {workingDir} is not a directory.") if not os.access(workingDir, os.X_OK): sys.exit(f"Permission denied for working directory {workingDir}.") self.cgroups.handle_errors(critical_cgroups) for (subsystem, option), _ in cgroupValues.items(): if subsystem not in self._cgroup_subsystems: sys.exit( f'Cannot set option "{option}" for subsystem "{subsystem}" ' f"that is not enabled. " f'Please specify "--require-cgroup-subsystem {subsystem}".' ) if not self.cgroups.has_value(subsystem, option): sys.exit( f'Cannot set option "{option}" for subsystem "{subsystem}", ' f"it does not exist." ) if files_count_limit is not None: if files_count_limit < 0: sys.exit(f"Invalid files-count limit {files_count_limit}.") if files_size_limit is not None: if files_size_limit < 0: sys.exit(f"Invalid files-size limit {files_size_limit}.") try: return self._execute( args, output_filename, error_filename, stdin, write_header, hardtimelimit, softtimelimit, walltimelimit, memlimit, cores, memory_nodes, cgroupValues, environments, workingDir, maxLogfileSize, files_count_limit, files_size_limit, **kwargs, ) except BenchExecException as e: logging.critical( "Cannot execute '%s': %s.", util.escape_string_shell(args[0]), e ) return {"terminationreason": "failed"} except OSError as e: logging.critical( "Error while starting '%s' in '%s': %s.", util.escape_string_shell(args[0]), workingDir or ".", e, ) logging.debug("Source of this OSError is:", exc_info=True) return {"terminationreason": "failed"} def _execute( self, args, output_filename, error_filename, stdin, write_header, hardtimelimit, softtimelimit, walltimelimit, memlimit, cores, memory_nodes, cgroup_values, environments, workingDir, max_output_size, files_count_limit, files_size_limit, **kwargs, ): """ This method executes the command line and waits for the termination of it, handling all setup and cleanup, but does not check whether arguments are valid. """ timelimitThread = None oomThread = None file_hierarchy_limit_thread = None if self._energy_measurement is not None: # Calculate which packages we should use for energy measurements if cores is None: packages = True # We use all cores and thus all packages else: all_siblings = set( util.flatten( resources.get_cores_of_same_package_as(core) for core in cores ) ) if all_siblings == set(cores): packages = { resources.get_cpu_package_for_core(core) for core in cores } else: # Disable energy measurements because we use only parts of a CPU packages = None def preParent(): """Setup that is executed in the parent process immediately before the actual tool is started.""" # start measurements if self._energy_measurement is not None and packages: self._energy_measurement.start() starttime = util.read_local_time() walltime_before = time.monotonic() return starttime, walltime_before def postParent(preParent_result, exit_code, base_path): """Cleanup that is executed in the parent process immediately after the actual tool terminated.""" # finish measurements starttime, walltime_before = preParent_result walltime = time.monotonic() - walltime_before energy = ( self._energy_measurement.stop() if self._energy_measurement else None ) # Because of https://github.com/sosy-lab/benchexec/issues/433, we want to # kill all processes here. Furthermore, we have experienced cases where the # container would just hang instead of killing all processes when its init # process existed, and killing via cgroups prevents this. # But if we do not have freezer, it is safer to just let all processes run # until the container is killed. if FREEZER in cgroups: cgroups.kill_all_tasks() # For a similar reason, we cancel all limits. Otherwise a run could have # terminationreason=walltime because copying output files took a long time. # Can be removed if #433 gets implemented properly. if timelimitThread: timelimitThread.cancel() if oomThread: oomThread.cancel() if file_hierarchy_limit_thread: file_hierarchy_limit_thread.cancel() if exit_code.value not in [0, 1]: _get_debug_output_after_crash(output_filename, base_path) return starttime, walltime, energy def preSubprocess(): """Setup that is executed in the forked process before the actual tool is started.""" os.setpgrp() # make subprocess to group-leader # preparations that are not time critical cgroups = self._setup_cgroups(cores, memlimit, memory_nodes, cgroup_values) temp_dir = tempfile.mkdtemp(prefix="BenchExec_run_") run_environment = self._setup_environment(environments) outputFile = self._setup_output_file( output_filename, args, write_header=write_header ) if error_filename is None: errorFile = outputFile else: errorFile = self._setup_output_file( error_filename, args, write_header=write_header ) pid = None returnvalue = 0 ru_child = None self._termination_reason = None result = collections.OrderedDict() throttle_check = systeminfo.CPUThrottleCheck(cores) swap_check = systeminfo.SwapCheck() logging.debug("Starting process.") try: pid, result_fn = self._start_execution( args=args, stdin=stdin, stdout=outputFile, stderr=errorFile, env=run_environment, cwd=workingDir, temp_dir=temp_dir, memlimit=memlimit, memory_nodes=memory_nodes, cgroups=cgroups, parent_setup_fn=preParent, child_setup_fn=preSubprocess, parent_cleanup_fn=postParent, **kwargs, ) with self.SUB_PROCESS_PIDS_LOCK: self.SUB_PROCESS_PIDS.add(pid) timelimitThread = self._setup_cgroup_time_limit( hardtimelimit, softtimelimit, walltimelimit, cgroups, cores, pid ) oomThread = self._setup_cgroup_memory_limit(memlimit, cgroups, pid) file_hierarchy_limit_thread = self._setup_file_hierarchy_limit( files_count_limit, files_size_limit, temp_dir, cgroups, pid ) # wait until process has terminated returnvalue, ru_child, (starttime, walltime, energy) = result_fn() if starttime: result["starttime"] = starttime result["walltime"] = walltime finally: # cleanup steps that need to get executed even in case of failure logging.debug("Process terminated, exit code %s.", returnvalue) with self.SUB_PROCESS_PIDS_LOCK: self.SUB_PROCESS_PIDS.discard(pid) if timelimitThread: timelimitThread.cancel() if oomThread: oomThread.cancel() if file_hierarchy_limit_thread: file_hierarchy_limit_thread.cancel() # Make sure to kill all processes if there are still some # (needs to come early to avoid accumulating more CPU time) cgroups.kill_all_tasks() # normally subprocess closes file, we do this again after all tasks terminated outputFile.close() if errorFile is not outputFile: errorFile.close() # measurements are not relevant in case of failure, but need to come before cgroup cleanup self._get_cgroup_measurements(cgroups, ru_child, result) logging.debug("Cleaning up cgroups.") cgroups.remove() self._cleanup_temp_dir(temp_dir) if timelimitThread: _try_join_cancelled_thread(timelimitThread) if oomThread: _try_join_cancelled_thread(oomThread) if file_hierarchy_limit_thread: _try_join_cancelled_thread(file_hierarchy_limit_thread) if self._energy_measurement: self._energy_measurement.stop() # cleanup steps that are only relevant in case of success if throttle_check.has_throttled(): logging.warning( "CPU throttled itself during benchmarking due to overheating. " "Benchmark results are unreliable!" ) if swap_check.has_swapped(): logging.warning( "System has swapped during benchmarking. " "Benchmark results are unreliable!" ) if error_filename is not None: _reduce_file_size_if_necessary(error_filename, max_output_size) _reduce_file_size_if_necessary(output_filename, max_output_size) result["exitcode"] = util.ProcessExitCode.from_raw(returnvalue) if energy: if packages is True: result["cpuenergy"] = energy else: result["cpuenergy"] = { pkg: energy[pkg] for pkg in energy if pkg in packages } if self._termination_reason: result["terminationreason"] = self._termination_reason elif memlimit and "memory" in result and result["memory"] >= memlimit: # The kernel does not always issue OOM notifications and thus the OOMHandler # does not always run even in case of OOM. We detect this there and report OOM. result["terminationreason"] = "memory" return result def _get_cgroup_measurements(self, cgroups, ru_child, result): """ This method calculates the exact results for time and memory measurements. It is not important to call this method as soon as possible after the run. """ logging.debug("Getting cgroup measurements.") cputime_wait = ru_child.ru_utime + ru_child.ru_stime if ru_child else 0 cputime_cgroups = None if CPUACCT in cgroups: # We want to read the value from the cgroup. # The documentation warns about outdated values. # So we read twice with 0.1s time difference, # and continue reading as long as the values differ. # This has never happened except when interrupting the script with Ctrl+C, # but just try to be on the safe side here. tmp = cgroups.read_cputime() tmp2 = None while tmp != tmp2: time.sleep(0.1) tmp2 = tmp tmp = cgroups.read_cputime() cputime_cgroups = tmp # Usually cputime_cgroups seems to be 0.01s greater than cputime_wait. # Furthermore, cputime_wait might miss some subprocesses, # therefore we expect cputime_cgroups to be always greater (and more correct). # However, sometimes cputime_wait is a little bit bigger than cputime2. # For small values, this is probably because cputime_wait counts since fork, # whereas cputime_cgroups counts only after cgroups.add_task() # (so overhead from runexecutor is correctly excluded in cputime_cgroups). # For large values, a difference may also indicate a problem with cgroups, # for example another process moving our benchmarked process between cgroups, # thus we warn if the difference is substantial and take the larger cputime_wait value. if cputime_wait > 0.5 and (cputime_wait * 0.95) > cputime_cgroups: logging.warning( "Cputime measured by wait was %s, cputime measured by cgroup was only %s, " "perhaps measurement is flawed.", cputime_wait, cputime_cgroups, ) result["cputime"] = cputime_wait else: result["cputime"] = cputime_cgroups for core, coretime in enumerate( cgroups.get_value(CPUACCT, "usage_percpu").split(" ") ): try: coretime = int(coretime) if coretime != 0: # convert nanoseconds to seconds result[f"cputime-cpu{core}"] = coretime / 1_000_000_000 except (OSError, ValueError) as e: logging.debug( "Could not read CPU time for core %s from kernel: %s", core, e ) if MEMORY in cgroups: # This measurement reads the maximum number of bytes of RAM+Swap the process used. # For more details, c.f. the kernel documentation: # https://www.kernel.org/doc/Documentation/cgroups/memory.txt memUsageFile = "memsw.max_usage_in_bytes" if not cgroups.has_value(MEMORY, memUsageFile): memUsageFile = "max_usage_in_bytes" if not cgroups.has_value(MEMORY, memUsageFile): logging.warning("Memory-usage is not available due to missing files.") else: try: result["memory"] = int(cgroups.get_value(MEMORY, memUsageFile)) except OSError as e: if e.errno == errno.ENOTSUP: # kernel responds with operation unsupported if this is disabled logging.critical( "Kernel does not track swap memory usage, cannot measure memory usage." " Please set swapaccount=1 on your kernel command line." ) else: raise e if BLKIO in cgroups: blkio_bytes_file = "throttle.io_service_bytes" if cgroups.has_value(BLKIO, blkio_bytes_file): bytes_read = 0 bytes_written = 0 for blkio_line in cgroups.get_file_lines(BLKIO, blkio_bytes_file): try: dev_no, io_type, bytes_amount = blkio_line.split(" ") if io_type == "Read": bytes_read += int(bytes_amount) elif io_type == "Write": bytes_written += int(bytes_amount) except ValueError: pass # There are irrelevant lines in this file with a different structure result["blkio-read"] = bytes_read result["blkio-write"] = bytes_written logging.debug( "Resource usage of run: walltime=%s, cputime=%s, cgroup-cputime=%s, memory=%s", result.get("walltime"), cputime_wait, cputime_cgroups, result.get("memory", None), ) # --- other public functions --- def stop(self): self._set_termination_reason("killed") super(RunExecutor, self).stop() def _reduce_file_size_if_necessary(fileName, maxSize): """ This function shrinks a file. We remove only the middle part of a file, the file-start and the file-end remain unchanged. """ fileSize = os.path.getsize(fileName) if maxSize is None: logging.debug( "Size of logfile '%s' is %s bytes, size limit disabled.", fileName, fileSize ) return # disabled, nothing to do if fileSize < (maxSize + 500): logging.debug( "Size of logfile '%s' is %s bytes, nothing to do.", fileName, fileSize ) return logging.warning( "Logfile '%s' is too big (size %s bytes). Removing lines.", fileName, fileSize ) util.shrink_text_file(fileName, maxSize, _LOG_SHRINK_MARKER) def _get_debug_output_after_crash(output_filename, base_path): """ Segmentation faults and some memory failures reference a file with more information (hs_err_pid_*). We append this file to the log. The format that we expect is a line "# An error report file with more information is saved as:" and the file name of the dump file on the next line. @param output_filename name of log file with tool output @param base_path string that needs to be preprended to paths for lookup of files """ logging.debug("Analysing output for crash info.") foundDumpFile = False try: with open(output_filename, "r+b") as outputFile: for line in outputFile: if foundDumpFile: dumpFileName = base_path.encode() + line.strip(b" #\n") outputFile.seek(0, os.SEEK_END) # jump to end of log file try: with open(dumpFileName, "rb") as dumpFile: util.copy_all_lines_from_to(dumpFile, outputFile) os.remove(dumpFileName) except OSError as e: logging.warning( "Could not append additional segmentation fault information " "from %s (%s)", dumpFileName, e.strerror, ) break try: if line.startswith( b"# An error report file with more information is saved as:" ): logging.debug("Going to append error report file") foundDumpFile = True except UnicodeDecodeError: pass # ignore invalid chars from logfile except OSError as e: logging.warning( "Could not analyze tool output for crash information (%s)", e.strerror ) def _try_join_cancelled_thread(thread): """Join a thread, but if the thread doesn't terminate for some time, ignore it instead of waiting infinitely.""" thread.join(10) if thread.is_alive(): logging.warning( "Thread %s did not terminate within grace period after cancellation", thread.name, ) class _TimelimitThread(threading.Thread): """ Thread that periodically checks whether the given process has already reached its timelimit. After this happens, the process is terminated. """ def __init__( self, cgroups, hardtimelimit, softtimelimit, walltimelimit, pid_to_kill, cores, callbackFn=lambda reason: None, ): super(_TimelimitThread, self).__init__() self.name = "TimelimitThread-" + self.name self.finished = threading.Event() if hardtimelimit or softtimelimit: assert CPUACCT in cgroups assert walltimelimit is not None if cores: self.cpuCount = len(cores) else: try: self.cpuCount = multiprocessing.cpu_count() except NotImplementedError: self.cpuCount = 1 self.cgroups = cgroups # set timelimits to large dummy value if no limit is given self.timelimit = hardtimelimit or (60 * 60 * 24 * 365 * 100) self.softtimelimit = softtimelimit or (60 * 60 * 24 * 365 * 100) self.latestKillTime = time.monotonic() + walltimelimit self.pid_to_kill = pid_to_kill self.callback = callbackFn def read_cputime(self): while True: try: return self.cgroups.read_cputime() except ValueError: # Sometimes the kernel produces strange values with linebreaks in them time.sleep(1) def run(self): while not self.finished.is_set(): usedCpuTime = self.read_cputime() if CPUACCT in self.cgroups else 0 remainingCpuTime = self.timelimit - usedCpuTime remainingSoftCpuTime = self.softtimelimit - usedCpuTime remainingWallTime = self.latestKillTime - time.monotonic() logging.debug( "TimelimitThread for process %s: used CPU time: %s, remaining CPU time: %s, " "remaining soft CPU time: %s, remaining wall time: %s.", self.pid_to_kill, usedCpuTime, remainingCpuTime, remainingSoftCpuTime, remainingWallTime, ) if remainingCpuTime <= 0: self.callback("cputime") logging.debug( "Killing process %s due to CPU time timeout.", self.pid_to_kill ) util.kill_process(self.pid_to_kill) self.finished.set() return if remainingWallTime <= 0: self.callback("walltime") logging.warning( "Killing process %s due to wall time timeout.", self.pid_to_kill ) util.kill_process(self.pid_to_kill) self.finished.set() return if remainingSoftCpuTime <= 0: self.callback("cputime-soft") # soft time limit violated, ask process to terminate util.kill_process(self.pid_to_kill, signal.SIGTERM) self.softtimelimit = self.timelimit remainingTime = min( remainingCpuTime / self.cpuCount, remainingSoftCpuTime / self.cpuCount, remainingWallTime, ) self.finished.wait(remainingTime + 1) def cancel(self): self.finished.set() if __name__ == "__main__": main()
PypiClean
/BIA_OBS-1.0.3.tar.gz/BIA_OBS-1.0.3/BIA/static/dist/node_modules/tailwindcss/lib/util/normalizeScreens.js
* @typedef {object} Screen * @property {string} name * @property {boolean} not * @property {ScreenValue[]} values */ /** * A function that normalizes the various forms that the screens object can be * provided in. * * Input(s): * - ['100px', '200px'] // Raw strings * - { sm: '100px', md: '200px' } // Object with string values * - { sm: { min: '100px' }, md: { max: '100px' } } // Object with object values * - { sm: [{ min: '100px' }, { max: '200px' }] } // Object with object array (multiple values) * * Output(s): * - [{ name: 'sm', values: [{ min: '100px', max: '200px' }] }] // List of objects, that contains multiple values * * @returns {Screen[]} */ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); function _export(target, all) { for(var name in all)Object.defineProperty(target, name, { enumerable: true, get: all[name] }); } _export(exports, { normalizeScreens: ()=>normalizeScreens, isScreenSortable: ()=>isScreenSortable, compareScreens: ()=>compareScreens, toScreen: ()=>toScreen }); function normalizeScreens(screens, root = true) { if (Array.isArray(screens)) { return screens.map((screen)=>{ if (root && Array.isArray(screen)) { throw new Error("The tuple syntax is not supported for `screens`."); } if (typeof screen === "string") { return { name: screen.toString(), not: false, values: [ { min: screen, max: undefined } ] }; } let [name, options] = screen; name = name.toString(); if (typeof options === "string") { return { name, not: false, values: [ { min: options, max: undefined } ] }; } if (Array.isArray(options)) { return { name, not: false, values: options.map((option)=>resolveValue(option)) }; } return { name, not: false, values: [ resolveValue(options) ] }; }); } return normalizeScreens(Object.entries(screens !== null && screens !== void 0 ? screens : {}), false); } function isScreenSortable(screen) { if (screen.values.length !== 1) { return { result: false, reason: "multiple-values" }; } else if (screen.values[0].raw !== undefined) { return { result: false, reason: "raw-values" }; } else if (screen.values[0].min !== undefined && screen.values[0].max !== undefined) { return { result: false, reason: "min-and-max" }; } return { result: true, reason: null }; } function compareScreens(type, a, z) { let aScreen = toScreen(a, type); let zScreen = toScreen(z, type); let aSorting = isScreenSortable(aScreen); let bSorting = isScreenSortable(zScreen); // These cases should never happen and indicate a bug in Tailwind CSS itself if (aSorting.reason === "multiple-values" || bSorting.reason === "multiple-values") { throw new Error("Attempted to sort a screen with multiple values. This should never happen. Please open a bug report."); } else if (aSorting.reason === "raw-values" || bSorting.reason === "raw-values") { throw new Error("Attempted to sort a screen with raw values. This should never happen. Please open a bug report."); } else if (aSorting.reason === "min-and-max" || bSorting.reason === "min-and-max") { throw new Error("Attempted to sort a screen with both min and max values. This should never happen. Please open a bug report."); } // Let the sorting begin let { min: aMin , max: aMax } = aScreen.values[0]; let { min: zMin , max: zMax } = zScreen.values[0]; // Negating screens flip their behavior. Basically `not min-width` is `max-width` if (a.not) [aMin, aMax] = [ aMax, aMin ]; if (z.not) [zMin, zMax] = [ zMax, zMin ]; aMin = aMin === undefined ? aMin : parseFloat(aMin); aMax = aMax === undefined ? aMax : parseFloat(aMax); zMin = zMin === undefined ? zMin : parseFloat(zMin); zMax = zMax === undefined ? zMax : parseFloat(zMax); let [aValue, zValue] = type === "min" ? [ aMin, zMin ] : [ zMax, aMax ]; return aValue - zValue; } function toScreen(value, type) { if (typeof value === "object") { return value; } return { name: "arbitrary-screen", values: [ { [type]: value } ] }; } function resolveValue({ "min-width": _minWidth , min =_minWidth , max , raw } = {}) { return { min, max, raw }; }
PypiClean
/NEMO_CE-1.6.12-py3-none-any.whl/NEMO/model_tree.py
from typing import List, Dict from django.db.models import QuerySet class TreeItem: """ Helper class for tree items """ item_type = None id: int = None item = None name: str = None tree_category: str = None category: str = None ancestors: List = [] descendants: List = [] children: List = [] child_items: List = [] is_leaf: bool = False is_root: bool = False def ancestor_ids(self, include_self=False): ids = [ancestor.id for ancestor in self.ancestors] if include_self: ids.append(self.id) return ids def __str__(self): return self.name class ModelTreeHelper: """ Helper class for trees with models. Create a tree in memory with links to ancestors and descendants to help limit database queries. """ def __init__(self, model_class, parent_field: str = "parent", children_field: str = "children", only_fields=None): self.only_fields = ["name", "category"] if only_fields is not None: self.only_fields.extend(only_fields) self.only_fields = list(set(self.only_fields)) query_set = type(model_class).objects.all().prefetch_related(children_field).only(*self.only_fields) self.roots: List = list(query_set.filter(**{f"{parent_field}__isnull": True})) self.leaves_queryset: QuerySet = query_set.filter(**{f"{children_field}__isnull": True}) self.leaf_ids: List[int] = list( query_set.filter(**{f"{children_field}__isnull": True}).values_list("id", flat=True) ) self.items: Dict[int, TreeItem] = {} self.build_tree(model_class, parent_field, children_field, [], None) # Second pass to populate descendants for key, item in self.items.items(): if not item.is_leaf: res = [] for child in item.child_items: res.append(self.items[child.id]) item.children = res.copy() result = [] for key1, item1 in self.items.items(): if item in item1.ancestors: result.append(self.items[item1.id]) item.descendants = result.copy() def build_tree(self, model_class, parent_field, children_field, ancestors: List[TreeItem], items=None): is_root = items is None if is_root: items = self.roots for item in items: tree_category = "/".join([ancestor.name for ancestor in ancestors]) if item.category: tree_category += "/" + item.category if tree_category else item.category tree_item = TreeItem() tree_item.id = item.id tree_item.name = item.name tree_item.item = item tree_item.item_type = type(model_class) tree_item.tree_category = tree_category tree_item.category = item.category tree_item.ancestors = ancestors tree_item.is_root = is_root # Add only fields for field in self.only_fields: setattr(tree_item, field, getattr(item, field)) if item.id in self.leaf_ids: tree_item.is_leaf = True else: children = list( type(model_class) .objects.filter(**{f"{parent_field}__id": item.id}) .prefetch_related(children_field) .only(*self.only_fields) ) tree_item.child_items = children tree_item.is_leaf = False new_ancestors = ancestors.copy() new_ancestors.append(tree_item) self.build_tree(model_class, parent_field, children_field, new_ancestors, children) self.items[item.id] = tree_item def get_areas(self, ids: List[int]) -> List[TreeItem]: return [self.items[pk] for pk in ids] def get_ancestor_areas(self, tree_items: List[TreeItem], include_self=False): return self.get_areas(list(set([pk for tree_item in tree_items for pk in tree_item.ancestor_ids(include_self)]))) def get_area(self, pk: int) -> TreeItem: return self.items.get(pk, None) def get_area_model_tree(): from NEMO.models import Area only_fields = ["name", "category", "maximum_capacity", "reservation_warning", "count_staff_in_occupancy", "count_service_personnel_in_occupancy"] return ModelTreeHelper(Area(), "parent_area", "area_children_set", only_fields)
PypiClean
/LrnXPAnaToolbox-0.4.1.tar.gz/LrnXPAnaToolbox-0.4.1/CONTRIBUTING.rst
.. highlight:: shell ============ Contributing ============ Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. You can contribute in many ways: Types of Contributions ---------------------- Report Bugs ~~~~~~~~~~~ Report bugs at https://github.com/robachkaya/LrnXPAnaToolbox/issues. If you are reporting a bug, please include: * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. * Detailed steps to reproduce the bug. Fix Bugs ~~~~~~~~ Look through the GitHub issues for bugs. Anything tagged with "bug" and "help wanted" is open to whoever wants to implement it. Implement Features ~~~~~~~~~~~~~~~~~~ Look through the GitHub issues for features. Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. Write Documentation ~~~~~~~~~~~~~~~~~~~ UQDKR LrnXPAnaToolbox could always use more documentation, whether as part of the official UQDKR LrnXPAnaToolbox docs, in docstrings, or even on the web in blog posts, articles, and such. Submit Feedback ~~~~~~~~~~~~~~~ The best way to send feedback is to file an issue at https://github.com/robachkaya/LrnXPAnaToolbox/issues. If you are proposing a feature: * Explain in detail how it would work. * Keep the scope as narrow as possible, to make it easier to implement. * Remember that this is a volunteer-driven project, and that contributions are welcome :) Get Started! ------------ Ready to contribute? Here's how to set up `LrnXPAnaToolbox` for local development. 1. Fork the `LrnXPAnaToolbox` repo on GitHub. 2. Clone your fork locally:: $ git clone git@github.com:your_name_here/LrnXPAnaToolbox.git 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: $ mkvirtualenv LrnXPAnaToolbox $ cd LrnXPAnaToolbox/ $ python setup.py develop 4. Create a branch for local development:: $ git checkout -b name-of-your-bugfix-or-feature Now you can make your changes locally. 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: $ flake8 LrnXPAnaToolbox tests $ python setup.py test or pytest $ tox To get flake8 and tox, just pip install them into your virtualenv. 6. Commit your changes and push your branch to GitHub:: $ git add . $ git commit -m "Your detailed description of your changes." $ git push origin name-of-your-bugfix-or-feature 7. Submit a pull request through the GitHub website. Pull Request Guidelines ----------------------- Before you submit a pull request, check that it meets these guidelines: 1. The pull request should include tests. 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the list in README.rst. 3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check https://travis-ci.com/robachkaya/LrnXPAnaToolbox/pull_requests and make sure that the tests pass for all supported Python versions. Tips ---- To run a subset of tests:: $ python -m unittest tests.test_LrnXPAnaToolbox Deploying --------- A reminder for the maintainers on how to deploy. Make sure all your changes are committed (including an entry in HISTORY.rst). Then run:: $ bump2version patch # possible: major / minor / patch $ git push $ git push --tags Travis will then deploy to PyPI if tests pass.
PypiClean
/Inventicode-1.0.2.tar.gz/Inventicode-1.0.2/inventicode/migrations/0007_auto_20170223_1948.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('inventicode', '0006_auto_20170223_1946'), ] operations = [ migrations.AlterField( model_name='printpaper', name='column_number', field=models.FloatField(blank=True, default=1, help_text='(mm)', verbose_name='Number of columns'), ), migrations.AlterField( model_name='printpaper', name='column_space', field=models.FloatField(blank=True, default=5, help_text='(mm)', verbose_name='Space between label columns'), ), migrations.AlterField( model_name='printpaper', name='label_height', field=models.FloatField(blank=True, default=33.9, help_text='(mm)', verbose_name='Label height'), ), migrations.AlterField( model_name='printpaper', name='label_width', field=models.FloatField(blank=True, default=63.5, help_text='(mm)', verbose_name='Label width'), ), migrations.AlterField( model_name='printpaper', name='margin_left', field=models.FloatField(blank=True, help_text='(mm)', verbose_name='Paper margin left'), ), migrations.AlterField( model_name='printpaper', name='margin_top', field=models.FloatField(blank=True, help_text='(mm)', verbose_name='Paper margin top'), ), migrations.AlterField( model_name='printpaper', name='paper_height', field=models.FloatField(blank=True, default=297, help_text='(mm)', verbose_name='Paper height'), ), migrations.AlterField( model_name='printpaper', name='paper_width', field=models.FloatField(blank=True, default=210, help_text='(mm)', verbose_name='Paper width'), ), migrations.AlterField( model_name='printpaper', name='row_number', field=models.FloatField(blank=True, default=1, help_text='(mm)', verbose_name='Number of rows'), ), migrations.AlterField( model_name='printpaper', name='row_space', field=models.FloatField(blank=True, default=5, help_text='(mm)', verbose_name='Space between label rows'), ), ]
PypiClean
/Catactor-0.1.2.tar.gz/Catactor-0.1.2/script/visualization/compare_down_sampled_auc.py
import pandas as pd import datetime from scipy import sparse import scipy.io from scipy.stats import zscore, wilcoxon, spearmanr from sklearn.preprocessing import binarize, normalize from sklearn import metrics from itertools import cycle from sklearn.metrics import roc_auc_score import os import pickle import seaborn as sns import subprocess import matplotlib import matplotlib.pyplot as plt import numpy as np import sys import re import math import datetime from scipy.spatial import distance from scipy.cluster import hierarchy import scanpy as sc from itertools import combinations from functools import reduce from scipy.cluster.hierarchy import linkage import scipy.spatial.distance as ssd from matplotlib import cm GENE_SIZES = [10, 50, 100, 200, 500, 1000] SET = 5 MSHAPES = ['o', 'P', 's', 's', '.', '^', '^', '^', '^', '^', '^'] USHAPES = ['o', 'P', 's', 's', '.', 'v', '^', '>', '<', 'D', 'd'] ALL_DATA = True SCANPY_OBJS = {'gene': ['GSE100033_gene_id_order_gene__all_bin_scanpy_obj_with_feat.pyn', 'GSE111586_gene_id_order_gene__all_scanpy_obj.pyn', 'GSE123576_gene_id_order_gene__all_scanpy_obj.pyn', 'GSE126074_gene_id_order_gene__all_scanpy_obj.pyn', 'GSE127257_distal_id_gene_order__all_scanpy_obj.pyn', 'GSE1303990_gene_id_order_gene__all_scanpy_obj.pyn', 'BICCN2_gene_id_order_gene__all_scanpy_obj.pyn'], 'distal': ['GSE100033_distal_id_order_distal__all_bin_scanpy_obj_with_feat.pyn', 'GSE111586_distal_id_order_distal__all_scanpy_obj.pyn', 'GSE123576_distal_id_order_distal__all_scanpy_obj.pyn', 'GSE126074_distal_id_order_distal__all_scanpy_obj.pyn', 'GSE127257_distal_id_gene_order__all_scanpy_obj.pyn', 'GSE1303990_distal_id_order_distal__all_scanpy_obj.pyn', 'BICCN2_distal_id_order_distal__all_scanpy_obj.pyn'], 'proximal': ['GSE100033_proximal_id_proximal__all_bin_scanpy_obj_with_feat.pyn', 'GSE111586_proximal_id_order_proximal__all_scanpy_obj.pyn', 'GSE123576_proximal_id_order_proximal__all_scanpy_obj.pyn', 'GSE126074_proximal_id_order_proximal__all_scanpy_obj.pyn', 'GSE127257_distal_id_gene_order__all_scanpy_obj.pyn', 'GSE1303990_proximal_id_order_proximal__all_scanpy_obj.pyn', 'BICCN2_proximal_id_order_proximal__all_scanpy_obj.pyn']} GSES = ['GSE100033', 'BICCN2', 'GSE111586', 'GSE123576', 'GSE126074', 'GSE127257', 'GSE1303990'] AMARKER = ['SF', 'CU', 'TA', 'TN', 'SC'] PALETTE = ['#E64B35FF', '#4DBBD5FF', '#00A087FF', '#91D1C2FF', '#3C5488FF'] def get_palette_shape(size, data=False): global ALL_DATA print(size) if data: if ALL_DATA: palette = ['#E64B35FF'] + sns.color_palette('Greys', 6)[::-1] shape = ['-', '--', '--', '--', '--', '--', '--'] else: palette = ['#E64B35FF'] + sns.color_palette('Greys', 5)[::-1] shape = ['-', '--', '--', '--', '--', '--'] else: if ALL_DATA: if size == 11: palette = ['#E64B35FF', '#4DBBD5FF', '#00A087FF', '#91D1C2FF', '#3C5488FF'] + sns.color_palette('Greys', 6)[::-1] shape = ['-', '-', '-', '-', '-', '--', '--', '--', '--', '--', '--'] else: palette = ['#E64B35FF', '#4DBBD5FF', '#00A087FF', '#91D1C2FF'] + sns.color_palette('Greys', 6)[::-1] shape = ['-', '-', '-', '-', '--', '--', '--', '--', '--', '--'] else: assert size <= 10 if size == 10: palette = ['#E64B35FF', '#4DBBD5FF', '#00A087FF', '#91D1C2FF', '#3C5488FF'] + sns.color_palette('Greys', 5)[::-1] shape = ['-', '-', '-', '-', '-', '--', '--', '--', '--', '--'] else: palette = ['#E64B35FF', '#4DBBD5FF', '#00A087FF', '#91D1C2FF'] + sns.color_palette('Greys', 5)[::-1] shape = ['-', '-', '-', '-', '--', '--', '--', '--', '--'] return palette, shape def norm_row_columns(X): from sklearn.preprocessing import MinMaxScaler X = np.array(X) print(X.shape) scaler = MinMaxScaler() X = np.apply_along_axis(lambda x: MinMaxScaler().fit_transform(x.reshape(-1, 1)), 0, X) X = np.squeeze(X) X = np.apply_along_axis(lambda x: MinMaxScaler().fit_transform(x.reshape(-1, 1)), 1, X) X = np.squeeze(X) print(X.shape) return X def get_celltype_category(sample_types): # order 'celltype' if 'AC' in sample_types: sample_uniq = ['AC', 'EX', 'IN', 'MG', 'OG', 'OT'] elif 'NN' in sample_types: sample_uniq = ['NN', 'EX', 'IN'] else: sample_uniq = ['OT', 'EX', 'IN', 'MG', 'OG'] sample_uniq = [x for x in sample_uniq if x in sample_types] return [str(sample_uniq.index(x))+'_'+x if x in sample_uniq else str(len(sample_uniq))+'_NA' for x in sample_types] def draw_boxplot(header, df, col_dict=None, sdf=None): print(df.head()) ax = sns.boxplot(x='marker', y="value", data=df, palette=col_dict, showfliers=False) if sdf is not None: ax = sns.swarmplot(x="marker", y="value", data=sdf, color=".2", dodge=True) else: ax = sns.swarmplot(x="marker", y="value", data=df, color=".2", dodge=True) ax.legend(bbox_to_anchor=(1.05, 1), loc=2, ncol=2, borderaxespad=0.) plt.savefig(header, bbox_inches='tight') plt.close('all') plt.clf() def collect_auc_from_down_sampled_exp(dir='./output/scobj/'): print('???') global AMARKER, PMARKER, SET, ALL_DATA auc_files = ['BICCN_gene_id_order_gene__all', 'GSE111586_gene_id_order_gene__all', 'GSE123576_gene_id_order_gene__all', 'GSE126074_gene_id_order_gene__all', 'GSE127257_distal_id_gene_order__all'] all_results = None if ALL_DATA: auc_files.extend(['GSE1303990_gene_id_order_gene__all']) for th in [1, 5, 10, 25, 50, 75, 100, 150, 200]: for fname in auc_files: print(os.path.join(dir, fname+'_simulate_add_noise_'+str(th)+'_auroc.csv')) df = pd.read_csv(os.path.join(dir, fname+'_simulate_down_sample_'+str(th)+'_auroc.csv'), header=0, index_col=0) value_column = ['auc', 'acc', 'precision', 'recall', 'whole', 'ppos', 'tpos', 'roc_file'] columns = [x for x in df.columns if x not in value_column] df = df.groupby(columns).agg(dict([(x, np.mean) for x in value_column if x != 'roc_file'])) df = df.reset_index(col_level=0) # print(df) gse = fname.split('_')[0] df = df.assign(gse=gse) df = df.assign(threshold=[th]*df.shape[0]) if all_results is None: all_results = df else: all_results = pd.concat([all_results, df]) type = 'with_SC_' header_list = AMARKER[0:SET] all_results = pd.concat((all_results, read_exp_table()), ignore_index=True) all_results['marker'] = pd.Categorical(all_results['marker'], header_list) all_results = all_results.loc[[x == x for x in all_results['marker']],:] print(all_results) collect_auc_exp_marker_set(all_results) def extract_celltype(problem): if problem in ['celltype', 'cluster']: celltypes = ['IN', 'EX', 'NN'] elif problem == 'inex': celltypes = ['IN', 'EX'] else: celltypes = ['P', 'N'] return celltypes def read_exp_table(): all_results = None for cluster in ['celltype', 'cluster', 'neuron', 'inex']: temp_results = None for mode in ['average', 'rankmean']: for celltype in extract_celltype(cluster): fname = cluster+'_'+mode+'_'+celltype+'_extable.csv' temp = pd.read_csv(fname) print(temp.loc[temp.marker == 'SF',:]) print(temp.loc[temp.marker == 'SF',:].shape) if cluster == 'cluster': temp = temp.assign(threshold=[201]*temp.shape[0]) temp.problem = ['celltype']*temp.shape[0] temp_results = pd.concat((temp_results, temp)) else: temp = temp.assign(threshold=[-1]*temp.shape[0]) if temp_results is None: temp_results = temp else: temp_results = pd.concat((temp_results, temp)) if all_results is None: all_results = temp_results else: all_results = pd.concat((all_results, temp_results)) all_results = all_results.drop('Unnamed: 0', axis=1) all_results = all_results.drop_duplicates() print(all_results.loc[all_results.threshold.isnull(),:]) return all_results def collect_auc_exp_marker_set(all_results): global AMARKER, PALETTE type = 'with_SC_' col_dict = dict([(m, p) for m, p in zip(AMARKER, PALETTE)]) for cluster in ['celltype', 'neuron', 'inex']: for mode in ['average']: for celltype in extract_celltype(cluster): temp = all_results.loc[all_results.loc[:,'mode'] == mode,:] print(cluster, mode, celltype) temp = temp.loc[(temp.celltype == celltype) & (temp.problem == cluster),:] thresholds = sorted(temp.threshold.unique()) print(temp) print(thresholds) print(temp.loc[(temp.marker == 'SF') & (temp.threshold == -1),:]) temp.threshold = pd.Categorical(temp.threshold, thresholds) plt.figure(figsize=(10,6)) sns_plot = sns.boxplot(data=temp, x='threshold', y='auc', hue='marker', palette=col_dict) plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.) plt.show() plt.savefig("down_sample_auc_"+cluster+'_'+mode+'_'+celltype+".pdf") plt.close() plt.clf() for th in temp.threshold.unique(): ttemp = temp.loc[temp.loc[:,'threshold'] == th,:] plt.figure(figsize=(3,6)) sns_plot = sns.boxplot(data=ttemp, x='marker', y='auc', palette=col_dict) sns_plot = sns.swarmplot(data=ttemp, x='marker', y='auc', color='gray') plt.ylim(0.4,1.0) plt.legend([],[], frameon=False) plt.show() plt.savefig("down_sample_auc_"+cluster+'_'+mode+'_'+celltype+"_"+str(th)+"_swarm.pdf") plt.close() plt.clf() # os.exit() def collect_auc_from_exp(dir='./output/scobj/'): global AMARKER, PMARKER, SET, ALL_DATA roc_files = ['BICCN2_gene_id_order_gene__all_auroc.csv', 'GSE111586_gene_id_order_gene__all_auroc.csv', 'GSE123576_gene_id_order_gene__all_auroc.csv', 'GSE126074_gene_id_order_gene__all_auroc.csv', 'GSE127257_distal_id_gene_order__all_auroc.csv'] if ALL_DATA: roc_files.extend(['GSE1303990_gene_id_order_gene__all_auroc.csv']) all_results = None for fname in roc_files: df = pd.read_csv(os.path.join(dir, fname), header=0, index_col=0) gse = fname.split('_')[0] df = df.assign(gse=gse) if all_results is None: all_results = df else: all_results = pd.concat([all_results, df]) for type in ['with_SC_', '', 'data_'][::-1]: if type == '': header_list = PMARKER[0:(SET-1)] else: if type == 'with_SC_': header_list = AMARKER[0:SET] else: header_list = ['SF', 'BICCN2', 'GSE111586', 'GSE123576', 'GSE126074', 'GSE127257'] if ALL_DATA: header_list.append('GSE1303990') temp = all_results.copy() temp['marker'] = pd.Categorical(temp['marker'], header_list) temp = temp.loc[[x == x for x in temp['marker']],:] collect_auc_exp_marker_set(temp, header_list) if __name__ == "__main__": if sys.argv[1] == 'exp': collect_auc_from_exp(sys.argv[2]) elif sys.argv[1] == 'down_sample': collect_auc_from_down_sampled_exp(sys.argv[2]) # plot_auc_different_resolution()
PypiClean
/INGInious-0.8.7.tar.gz/INGInious-0.8.7/inginious/frontend/static/js/codemirror/mode/mirc/mirc.js
//mIRC mode by Ford_Lawnmower :: Based on Velocity mode by Steve O'Hara (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineMIME("text/mirc", "mirc"); CodeMirror.defineMode("mirc", function() { function parseWords(str) { var obj = {}, words = str.split(" "); for (var i = 0; i < words.length; ++i) obj[words[i]] = true; return obj; } var specials = parseWords("$! $$ $& $? $+ $abook $abs $active $activecid " + "$activewid $address $addtok $agent $agentname $agentstat $agentver " + "$alias $and $anick $ansi2mirc $aop $appactive $appstate $asc $asctime " + "$asin $atan $avoice $away $awaymsg $awaytime $banmask $base $bfind " + "$binoff $biton $bnick $bvar $bytes $calc $cb $cd $ceil $chan $chanmodes " + "$chantypes $chat $chr $cid $clevel $click $cmdbox $cmdline $cnick $color " + "$com $comcall $comchan $comerr $compact $compress $comval $cos $count " + "$cr $crc $creq $crlf $ctime $ctimer $ctrlenter $date $day $daylight " + "$dbuh $dbuw $dccignore $dccport $dde $ddename $debug $decode $decompress " + "$deltok $devent $dialog $did $didreg $didtok $didwm $disk $dlevel $dll " + "$dllcall $dname $dns $duration $ebeeps $editbox $emailaddr $encode $error " + "$eval $event $exist $feof $ferr $fgetc $file $filename $filtered $finddir " + "$finddirn $findfile $findfilen $findtok $fline $floor $fopen $fread $fserve " + "$fulladdress $fulldate $fullname $fullscreen $get $getdir $getdot $gettok $gmt " + "$group $halted $hash $height $hfind $hget $highlight $hnick $hotline " + "$hotlinepos $ial $ialchan $ibl $idle $iel $ifmatch $ignore $iif $iil " + "$inelipse $ini $inmidi $inpaste $inpoly $input $inrect $inroundrect " + "$insong $instok $int $inwave $ip $isalias $isbit $isdde $isdir $isfile " + "$isid $islower $istok $isupper $keychar $keyrpt $keyval $knick $lactive " + "$lactivecid $lactivewid $left $len $level $lf $line $lines $link $lock " + "$lock $locked $log $logstamp $logstampfmt $longfn $longip $lower $ltimer " + "$maddress $mask $matchkey $matchtok $md5 $me $menu $menubar $menucontext " + "$menutype $mid $middir $mircdir $mircexe $mircini $mklogfn $mnick $mode " + "$modefirst $modelast $modespl $mouse $msfile $network $newnick $nick $nofile " + "$nopath $noqt $not $notags $notify $null $numeric $numok $oline $onpoly " + "$opnick $or $ord $os $passivedcc $pic $play $pnick $port $portable $portfree " + "$pos $prefix $prop $protect $puttok $qt $query $rand $r $rawmsg $read $readomo " + "$readn $regex $regml $regsub $regsubex $remove $remtok $replace $replacex " + "$reptok $result $rgb $right $round $scid $scon $script $scriptdir $scriptline " + "$sdir $send $server $serverip $sfile $sha1 $shortfn $show $signal $sin " + "$site $sline $snick $snicks $snotify $sock $sockbr $sockerr $sockname " + "$sorttok $sound $sqrt $ssl $sreq $sslready $status $strip $str $stripped " + "$syle $submenu $switchbar $tan $target $ticks $time $timer $timestamp " + "$timestampfmt $timezone $tip $titlebar $toolbar $treebar $trust $ulevel " + "$ulist $upper $uptime $url $usermode $v1 $v2 $var $vcmd $vcmdstat $vcmdver " + "$version $vnick $vol $wid $width $wildsite $wildtok $window $wrap $xor"); var keywords = parseWords("abook ajinvite alias aline ame amsg anick aop auser autojoin avoice " + "away background ban bcopy beep bread break breplace bset btrunc bunset bwrite " + "channel clear clearall cline clipboard close cnick color comclose comopen " + "comreg continue copy creq ctcpreply ctcps dcc dccserver dde ddeserver " + "debug dec describe dialog did didtok disable disconnect dlevel dline dll " + "dns dqwindow drawcopy drawdot drawfill drawline drawpic drawrect drawreplace " + "drawrot drawsave drawscroll drawtext ebeeps echo editbox emailaddr enable " + "events exit fclose filter findtext finger firewall flash flist flood flush " + "flushini font fopen fseek fsend fserve fullname fwrite ghide gload gmove " + "gopts goto gplay gpoint gqreq groups gshow gsize gstop gtalk gunload hadd " + "halt haltdef hdec hdel help hfree hinc hload hmake hop hsave ial ialclear " + "ialmark identd if ignore iline inc invite iuser join kick linesep links list " + "load loadbuf localinfo log mdi me menubar mkdir mnick mode msg nick noop notice " + "notify omsg onotice part partall pdcc perform play playctrl pop protect pvoice " + "qme qmsg query queryn quit raw reload remini remote remove rename renwin " + "reseterror resetidle return rlevel rline rmdir run ruser save savebuf saveini " + "say scid scon server set showmirc signam sline sockaccept sockclose socklist " + "socklisten sockmark sockopen sockpause sockread sockrename sockudp sockwrite " + "sound speak splay sreq strip switchbar timer timestamp titlebar tnick tokenize " + "toolbar topic tray treebar ulist unload unset unsetall updatenl url uwho " + "var vcadd vcmd vcrem vol while whois window winhelp write writeint if isalnum " + "isalpha isaop isavoice isban ischan ishop isignore isin isincs isletter islower " + "isnotify isnum ison isop isprotect isreg isupper isvoice iswm iswmcs " + "elseif else goto menu nicklist status title icon size option text edit " + "button check radio box scroll list combo link tab item"); var functions = parseWords("if elseif else and not or eq ne in ni for foreach while switch"); var isOperatorChar = /[+\-*&%=<>!?^\/\|]/; function chain(stream, state, f) { state.tokenize = f; return f(stream, state); } function tokenBase(stream, state) { var beforeParams = state.beforeParams; state.beforeParams = false; var ch = stream.next(); if (/[\[\]{}\(\),\.]/.test(ch)) { if (ch == "(" && beforeParams) state.inParams = true; else if (ch == ")") state.inParams = false; return null; } else if (/\d/.test(ch)) { stream.eatWhile(/[\w\.]/); return "number"; } else if (ch == "\\") { stream.eat("\\"); stream.eat(/./); return "number"; } else if (ch == "/" && stream.eat("*")) { return chain(stream, state, tokenComment); } else if (ch == ";" && stream.match(/ *\( *\(/)) { return chain(stream, state, tokenUnparsed); } else if (ch == ";" && !state.inParams) { stream.skipToEnd(); return "comment"; } else if (ch == '"') { stream.eat(/"/); return "keyword"; } else if (ch == "$") { stream.eatWhile(/[$_a-z0-9A-Z\.:]/); if (specials && specials.propertyIsEnumerable(stream.current().toLowerCase())) { return "keyword"; } else { state.beforeParams = true; return "builtin"; } } else if (ch == "%") { stream.eatWhile(/[^,\s()]/); state.beforeParams = true; return "string"; } else if (isOperatorChar.test(ch)) { stream.eatWhile(isOperatorChar); return "operator"; } else { stream.eatWhile(/[\w\$_{}]/); var word = stream.current().toLowerCase(); if (keywords && keywords.propertyIsEnumerable(word)) return "keyword"; if (functions && functions.propertyIsEnumerable(word)) { state.beforeParams = true; return "keyword"; } return null; } } function tokenComment(stream, state) { var maybeEnd = false, ch; while (ch = stream.next()) { if (ch == "/" && maybeEnd) { state.tokenize = tokenBase; break; } maybeEnd = (ch == "*"); } return "comment"; } function tokenUnparsed(stream, state) { var maybeEnd = 0, ch; while (ch = stream.next()) { if (ch == ";" && maybeEnd == 2) { state.tokenize = tokenBase; break; } if (ch == ")") maybeEnd++; else if (ch != " ") maybeEnd = 0; } return "meta"; } return { startState: function() { return { tokenize: tokenBase, beforeParams: false, inParams: false }; }, token: function(stream, state) { if (stream.eatSpace()) return null; return state.tokenize(stream, state); } }; }); });
PypiClean
/GeoNode-3.2.0-py3-none-any.whl/geonode/static/geonode/js/upload/LayerInfo.js
'use strict'; define(function (require, exports) { var _ = require('underscore'), fileTypes = require('./FileTypes'), path = require('./path'), common = require('./common'), LayerInfo; /** Creates an instance of a LayerInfo * @constructor * @author Ivan Willig * @this {LayerInfo} * @param {name, files} */ LayerInfo = function (options) { this.id = null; this.name = null; this.files = null; this.type = null; this.main = null; this.element = null; $.extend(this, options || {}); if (!this.main || !this.type) { this.guessFileType(); } // need to find a way converting this name to a safe selector this.name = LayerInfo.safeSelector(this.name.split('.')[0]); this.selector = '#' + this.name + '-element'; this.errors = this.collectErrors(); this.polling = false; }; /** Function to safely select a filename * * @params name * @returns string */ LayerInfo.safeSelector = function (name) { return name.replace(/\[|\]|\(|\)| /g, '_'); }; /** Function to return the success template * * @params {options} * @returns */ LayerInfo.prototype.successTemplate = function (options) { var template = _.template($('#successTemplate').html()); return template(options); }; /* Function to iterates through all of the known types and returns the * type if it matches, if not return null * @params {File} * @returns {object} */ LayerInfo.prototype.findFileType = function (file) { var i, type, res; var extensions = this.getExtensions(); $.each(fileTypes, function (name, type) { if (type.isType(file, extensions)) { res = {type: type, file: file}; // return false; } }); return res; }; /** Function to check the type of a Layer * * @params {options} * @returns {string} */ LayerInfo.prototype.guessFileType = function () { var self = this; $.each(this.files, function (idx, file) { var results = self.findFileType(file); // if we find the type of the file, we also find the "main" file if (results) { if (results.type.main == 'kml') { // Assume the kml file always as main one self.type = results.type; self.main = results.file; } else if ((results.type.main == 'xml' || results.type.main == 'sld') && self.main != undefined) { // Do not assume the metadata or sld file as main one self.type = self.type; self.main = self.main; } else if ((self.type == undefined) || (self.type != undefined && self.type.main != 'kml')) { self.type = results.type; self.main = results.file; } } }); }; /** Delegates to the Layer Type to find all of the errors * associated with this type. * * @params {options} * @returns {string} */ LayerInfo.prototype.collectErrors = function () { var errors = []; var mosaic_is_valid = true; var is_granule = $('#' + this.name + '-mosaic').is(':checked'); var is_time_enabled = $('#' + this.name + '-timedim').is(':checked'); var is_time_valid = is_time_enabled && !$('#' + this.name + '-timedim-value-valid').is(':visible'); if (is_granule && is_time_enabled) { mosaic_is_valid = is_time_valid; } if (is_granule && !mosaic_is_valid) { errors.push('The configuration of the file as a Mosaic Granule is not valid, please fix the issue and try again'); } if (this.type) { errors = this.type.findTypeErrors(this.getExtensions()); } else { errors.push('Unknown type, please try again'); } return errors; }; /** Function to get all the file extensions in * the current list of files being handled. * * @params {options} * @returns {string} */ LayerInfo.prototype.getExtensions = function () { var files = this.files, extension, file, res = [], i; for (i = 0; i < files.length; i += 1) { file = files[i]; extension = path.getExt(file); res.push(extension); } return res; }; /** Build a new FormData object from the current state of the * LayerInfo object. * * @returns {FromData} */ LayerInfo.prototype.prepareFormData = function (form_data) { var i, ext, file, perm, time, mosaic; var base_ext = this.main.name.split('.').pop(); var base_name = this.name; var base_ext = this.main.name.split('.').pop(); var base_name = this.name; var base_ext = this.main.name.split('.').pop(); var base_name = this.name; if (!form_data) { form_data = new FormData(); } // this should be generated from the permission widget if (typeof permissionsString == 'undefined'){ perm = {} } else { perm = permissionsString('#permission_form','layers'); } if (time_enabled) { time = (this.type && (this.type.main === 'shp' || this.type.main === 'csv')); form_data.append('time', time); } if (mosaic_enabled) { mosaic = $('#' + base_name + '-mosaic').is(':checked'); var is_time_valid = $('#' + base_name + '-timedim').is(':checked') && !$('#' + base_name + '-timedim-value-valid').is(':visible'); if (mosaic /*&& is_time_valid*/) { form_data.append('mosaic', mosaic); var append_to_mosaic_opts = $('#' + base_name + '-mosaic-granule').is(':checked'); var append_to_mosaic_name = $('#' + base_name + '-mosaic-granule-format-select').val(); //console.log("append_to_mosaic_opts:" + append_to_mosaic_opts + " / append_to_mosaic_name:" + append_to_mosaic_name); if (is_time_valid) { var time_regex = $('#' + base_name + '-timedim-format-select').val(); var time_value = $('#' + base_name + '-timedim-value').val(); //console.log("time_regex:" + time_regex + " / time_value:" + time_value); var time_presentation_opts = $('#' + base_name + '-timedim-presentation').is(':checked'); var time_presentation = "LIST"; var time_presentation_res = 0; var time_presentation_default_value = ""; var time_presentation_reference_value = ""; if (time_presentation_opts) { time_presentation = $('#' + base_name + '-timedim-presentation-format-select').val(); if (time_presentation === 'DISCRETE_INTERVAL') { // Years time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-years').val() ) * 31536000000; // Months time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-months').val() ) * 2628000000; // Weeks time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-weeks').val() ) * 604800000; // Days time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-days').val() ) * 86400000; // Hours time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-hours').val() ) * 3600000; // Minutes time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-minutes').val() ) * 60000; // Seconds time_presentation_res += parseInt( $('#' + base_name + '-timedim-presentation-seconds').val() ) * 1000; } time_presentation_default_value = $('#' + base_name + '-timedim-defaultvalue-format-select').val(); if (time_presentation_default_value == 'NEAREST' || time_presentation_default_value == 'FIXED') { time_presentation_reference_value = $('#' + base_name + '-timedim-defaultvalue-ref-value').val(); } } //console.log("time_presentation:" + time_presentation + " / time_presentation_res:" + time_presentation_res); form_data.append('mosaic_time_regex', time_regex); form_data.append('mosaic_time_value', time_value); form_data.append('time_presentation', time_presentation); form_data.append('time_presentation_res', time_presentation_res); form_data.append('time_presentation_default_value', time_presentation_default_value); form_data.append('time_presentation_reference_value', time_presentation_reference_value); } form_data.append('append_to_mosaic_opts', append_to_mosaic_opts); if (append_to_mosaic_opts) { form_data.append('append_to_mosaic_name', append_to_mosaic_name); } } } form_data.append('base_file', this.main); form_data.append('permissions', JSON.stringify(perm)); for (i = 0; i < this.files.length; i += 1) { file = this.files[i]; if (file.name !== this.name) { ext = path.getExt(file); form_data.append(ext + '_file', file); } } form_data.append('charset', $('#charset').val()); if ($('#id_metadata_upload_form').prop('checked')) { form_data.append('metadata_upload_form', true); form_data.append('layer_title', $('#id_layer_title').val()); } if ($('#id_metadata_uploaded_preserve').prop('checked')) { form_data.append('metadata_uploaded_preserve', true); } if ($('#id_style_upload_form').prop('checked')) { form_data.append('style_upload_form', true); form_data.append('layer_title', $('#id_layer_title').val()); } return form_data; }; /** Log the status to the status div * * @params {options} * @returns {string} */ LayerInfo.prototype.logStatus = function (options) { options.element = this.element.find('#status'); common.logStatus(options); }; /** Function to mark errors in the the status * * @params {error} * @returns {string} */ LayerInfo.prototype.markError = function (error, status) { var default_message = gettext("Unexpected error!"); if (status == 400) { default_message += gettext(" - 400 Bad Request. Server cannot or will not process the request due to something that is perceived to be a client error (e.g., malformed request syntax, invalid request message framing, or deceptive request routing)."); } else if (status == 401) { default_message += gettext(" - 401 Unauthorized. Request was not sent with the proper authentication credentials."); } else if (status == 403) { default_message += gettext(" - 403 Forbidden. This is generally related to permission rules on your server. Contact the system administrator for more information regarding this error message."); } else if (status == 404) { default_message += gettext(" - 404 Not Found. Origin server was unable or unwilling to find the resource requested."); } else if (status == 405) { default_message += gettext(" - 405 Method Not Allowed. Origin server is aware of the requested resource, but the request method used is not supported."); } else if (status == 406) { default_message += gettext(" - 406 Not Acceptable. Resource is not available at the origin that adheres to negotiation headers that were set prior (e.g. via 'Accept-Charset' and 'Accept-Language' headers)."); } else if (status == 407) { default_message += gettext(" - 407 Authentication Required. The client did not send the required authentication with the request."); } else if (status == 408) { default_message += gettext(" - 408 Request Timeout. The origin server did not receive the complete request in what it considers a reasonable time."); } else if (status == 409) { default_message += gettext(" - 409 Conflict. The request did not complete because of a conflict with the current state of the resource. Typically happens on a PUT request where multiple clients are attempting to edit the same resource."); } else if (status == 410) { default_message += gettext(" - 410 Gone. The resource requested is permanently missing at the origin."); } else if (status == 411) { default_message += gettext(" - 411 Length Required. Client did not define the 'Content-Length' of the request body in the headers and this is required to obtain the resource."); } else if (status == 412) { default_message += gettext(" - 412 Precondition Failed. Server denies the request because the resource failed to meet the conditions specified by the client."); } else if (status == 413) { default_message += gettext(" - 413 Payload Too Large. Refusal from the server to process the request because the payload sent from the client is larger than the server wished to accept. Server has the optional to close the connection."); } else if (status == 414) { default_message += gettext(" - 414 URI Too Long. Refusal from the server that the URI was too long to be processed. For example, if a client is attempting a GET request with an unusually long URI after a POST, this could be seen as a security risk and a 414 gets generated."); } else if (status == 415) { default_message += gettext(" - 415 Unsupported Media Type. Refusal from the server to process the format of the current payload. One way to identify and fix this issue would be to look at the 'Content-Type' or 'Content-Encoding' headers sent in the client’s request."); } else if (status == 417) { default_message += gettext(" - 417 Expectation Failed. Failure of server to meet the requirements specified in the 'Expect' header of the client’s request."); } else if (status == 429) { default_message += gettext(" - 429 Too Many Requests. Client has sent too many requests in the specified amount of time according to the server."); } else if (status == 499) { default_message += gettext(" - 499 Client Close Request. Nginx specific response code to indicate when the connection has been closed by the client while the server is still processing its request, making server unable to send a status code back."); } else if (status == 500) { default_message += gettext(" - 500 Internal Server Error. This error indicates that the server has encountered an unexpected condition. This often occurs when an application request cannot be fulfilled due to the application being configured incorrectly on the server."); } else if (status == 501) { default_message += gettext(" - 501 Not Implemented. This error indicates that the HTTP method sent by the client is not supported by the server. This is most often caused by the server being out of date. It is a very rare error and generally requires that the web server be updated."); } else if (status == 502) { default_message += gettext(" - 502 Bad Gateway. This error is usually due to improperly configured proxy servers. The first step in resolving the issue is to clear the client's cache."); } else if (status == 503) { default_message += gettext(" - 503 Service Unavailable. This error occurs when the server is unable to handle requests due to a temporary overload or due to the server being temporarily closed for maintenance. The error indicates that the server will only temporarily be down."); } else if (status == 504) { default_message += gettext(" - 504 Gateway Timeout. GeoNode lost the connection with GeoServer or DB due to a connection timeout. Consider using the management commands to import data!"); } else if (status == 505) { default_message += gettext(" - 505 HTTP Version Not Supported. This error occurs when the server refuses to support the HTTP protocol that has been specified by the client computer. This can be caused by the protocol not being specified properly by the client computer; for example, if an invalid version number has been specified."); } else if (status == 506) { default_message += gettext(" - 506 Variant Also Negotiates. This error indicates that the server is not properly configured. Contact the system administrator to resolve this issue."); } else if (status == 507) { default_message += gettext(" - 507 Insufficient Storage. This error indicates that the server is out of free memory. This is most likely to occur when an application that is being requested cannot allocate the necessary system resources to run. To resolve the issue, the server's hard disk may need to be cleaned of any unnecessary documents to free up more hard disk space, its memory may need to be expanded, or it may simply need to be restarted. Contact the system administrator for more information regarding this error message."); } else if (status == 509) { default_message += gettext(" - 509 Bandwidth Limit Exceeded. This error occurs when the bandwidth limit imposed by the system administrator has been reached. The only fix for this issue is to wait until the limit is reset in the following cycle. Consult the system administrator for information about acquiring more bandwidth."); } else if (status == 510) { default_message += gettext(" - 510 Not Extended. This error occurs when an extension attached to the HTTP request is not supported by the web server. To resolve the issue, you may need to update the server."); } else { default_message += " - " + gettext("Unknwon") + gettext(" Error Code. Contact the system administrator for more information regarding this error message."); } if (error != undefined) { default_message += " " + gettext("Additional info: ") + "[" + error + "]"; } common.logError(default_message, this.element.find('#status')); }; /** Function to mark the start of the upload * * @params {options} * @returns {string} * * TODO: make this into an abstract method so we can mark events in a * more generic way */ LayerInfo.prototype.markStart = function () { this.logStatus({ msg: 'Your upload has started<div class="progress" id="prog"><div class="progress-bar progress-bar-success" style="width:0%"></div>', level: 'alert-success', empty: 'true' }); }; LayerInfo.prototype.doResume = function (event) { $(this).text(gettext('Finalizing')).attr('disabled', 'disabled').after('<img class="pull-right" src="../../static/geonode/img/loading.gif">'); var id = (new Date()).getTime(); /* **** * AF: Switching those two below allows to open a new window instead of redirecting * the active one. * ****/ // var newWin = window.open(window.location.href, // id, "toolbar=1,scrollbars=1,location=0,statusbar=0,menubar=1,resizable=1,width=1100,height=800,left = 240,top = 100"); common.make_request({ url: event.data.url, async: true, failure: function (resp, status) { if (resp && resp.errors) { self.markError(resp.errors, status); } else { self.markError(gettext('Unexpected Error'), status); } }, success: function (resp, status) { if(resp.url && resp.input_required){ /* **** * AF: Switching those two below allows to open a new window instead of redirecting * the active one. * ****/ window.location = resp.url; /* newWin.location = resp.url; newWin.focus(); */ }else { /* **** * AF: Switching those two below allows to open a new window instead of redirecting * the active one. * ****/ window.location = resp.redirect_to; /* newWin.location = resp.redirect_to; newWin.focus(); */ } }, }); return false; }; String.prototype.capitalize = function() { return this.charAt(0).toUpperCase() + this.slice(1); }; LayerInfo.prototype.displayUploadedLayerLinks = function(resp) { var self = this; var resourceType = 'layer'; try { resourceType = /^\/(.*)s\/.*/.exec(resp.url)[1]; } catch (err) { // pass } var info_message = gettext('Your ' + resourceType +' was successfully created.'); var a = '<a href="' + resp.url + '" class="btn btn-success">' + gettext(resourceType.capitalize() + ' Info') + '</a>&nbsp;&nbsp;&nbsp;'; var b = '<a href="' + resp.url + '/metadata" class="btn btn-warning">' + gettext('Edit Metadata') + '</a>&nbsp;&nbsp;&nbsp;'; var c = '<a href="' + resp.url + '/metadata_upload" class="btn btn-warning">' + gettext('Upload Metadata') + '</a>&nbsp;&nbsp;&nbsp;'; var d = '<a href="' + resp.url + '/style_upload" class="btn btn-warning">' + gettext('Upload SLD') + '</a>&nbsp;&nbsp;&nbsp;'; var e = '<a href="' + resp.url.replace(/^\/layers/, '/gs') + '/style/manage" class="btn btn-warning">' + gettext('Manage Styles') + '</a>&nbsp;&nbsp;&nbsp;'; if(resourceType != 'layer') { // Only Layers have Metadata and SLD Upload features for the moment c = ''; d = ''; e = ''; } else { info_message += ' ' + gettext('Please wait until GeoNode finished configuring it!'); a = ''; } var msg_col = ""; if (resp.info){ var msg_template = gettext('The column %1 was renamed to %2 <br/>'); for (var key in resp.info){ msg_col += format(msg_template,[key,resp.info[key]]); } } self.logStatus({ msg: '<p>' + info_message + '<br/>' + msg_col + '<br/>' + a + b + c + d + e + '</p>', level: 'alert-success', empty: 'true' }); }; LayerInfo.prototype.startPolling = function() { var self = this; if (self.polling) { $.ajax({ url: updateUrl(siteUrl + "upload/progress", 'id', self.id), type: 'GET', success: function(data){ // TODO: Not sure we need to do anything here? //console.log('polling'); }, dataType: "json", complete: setTimeout(function() {self.startPolling()}, 3000), timeout: 30000 }); } }; /** Function to deal with the final step in the upload process * * @params {options} * @returns {string} */ LayerInfo.prototype.doFinal = function (resp, callback, array) { var self = this; if (resp.hasOwnProperty('redirect_to') && resp.redirect_to.indexOf('upload/final') > -1) { common.make_request({ url: resp.redirect_to, async: true, beforeSend: function() { self.logStatus({ msg: '<p>' + gettext('Performing Final GeoServer Config Step') + '<img class="pull-right" src="../../static/geonode/img/loading.gif"></p>', level: 'alert-success', empty: 'true' }); self.polling = true; self.startPolling(); }, failure: function (resp, status) { self.polling = false; var error = (resp.errors != undefined ? resp.errors : resp.error_msg); self.markError(error, status); callback(array); }, success: function (resp, status) { self.polling = false; if (resp.status === "other") { self.logStatus({ msg:'<p>' + gettext('You need to specify more information in order to complete your upload') + '</p>', level: 'alert-success', empty: 'true' }); } else if (resp.status === "pending") { setTimeout(function() { self.doFinal(resp, callback, array); }, 5000); } else if (resp.status === 'error') { self.polling = false; self.markError(resp.error_msg, resp.status); callback(array); } else { self.displayUploadedLayerLinks(resp); callback(array); } } }); } else if (resp.status === "incomplete") { var id = common.parseQueryString(resp.url).id; var element = 'next_step_' + id var a = '<a id="' + element + '" class="btn btn-primary" target="_blank">Continue</a>'; var msg = '<p>' + gettext('Files are ready to be ingested!') if (resp.redirect_to.indexOf('time') !== -1 || resp.url.indexOf('time') !== -1) { msg += '&nbsp;' + gettext('A temporal dimension may be added to this Layer.') + '&nbsp;' + a + '</p>' } else { msg += '&nbsp;' + a + '</p>' } self.logStatus({ msg: msg, level: 'alert-success', empty: 'true' }); $("#" + element).on('click', resp, self.doResume); callback(array); return; } else if (resp.status === "other") { self.logStatus({ msg:'<p>' + gettext('You need to specify more information in order to complete your upload') + '</p>', level: 'alert-success', empty: 'true' }); callback(array); } else if (resp.status === 'error') { self.polling = false; self.markError(resp.error_msg, resp.status); callback(array); } else if (resp.success === true) { self.polling = false; self.displayUploadedLayerLinks(resp); callback(array); } else { self.polling = false; resp.errors = 'Unexpected Error'; self.logStatus({ msg:'<p>' + gettext('Unexpected Error') + '</p>', level: 'alert-error', empty: 'true' }); callback(array); } }; /** Function to deal with the Steps in the upload process * * @params {options} * @returns {string} */ LayerInfo.prototype.doStep = function (resp, callback, array) { var self = this; self.logStatus({ msg: '<p>' + gettext('Performing GeoServer Config Step') + '<img class="pull-right" src="../../static/geonode/img/loading.gif"></p>', level: 'alert-success', empty: 'true' }); if (resp.success === true && resp.status === 'incomplete') { common.make_request({ url: updateUrl(resp.redirect_to, 'force_ajax', 'true'), async: true, failure: function (resp, status) { self.polling = false; if (resp.status && resp.status !== 'success') { self.markError(resp.error_msg, resp.status); } else { self.markError(resp.errors, status); } callback(array); }, success: function (resp, status) { self.id = resp.id; if (resp.status === 'incomplete') { if (resp.input_required === true) { self.doFinal(resp, callback, array); } else { self.doStep(resp, callback, array); } } else if (resp.status === 'error') { self.polling = false; self.markError(resp.error_msg, resp.status); callback(array); } else if (resp.redirect_to.indexOf('upload/final') > -1) { self.doFinal(resp, callback, array); } else { window.location = resp.url; } } }); } else if (resp.success === true && resp.status === 'error') { self.polling = false; self.markError(resp.error_msg, resp.status); callback(array); } else if (resp.success === true && typeof resp.url != 'undefined') { self.doFinal(resp, callback, array); } else if (resp.success === true && resp.redirect_to.indexOf('upload/final') > -1) { self.doFinal(resp, callback, array); } }; /** Function to upload the files against the specified endpoint * * @params * @returns */ LayerInfo.prototype.uploadFiles = function (callback, array) { var form_data = this.prepareFormData(), self = this; var prog = ""; $.ajax({ url: form_target, async: true, mode: "queue", type: "POST", data: form_data, timeout: 600000, // sets timeout to 10 minutes processData: false, contentType: false, xhr: function() { var req = $.ajaxSettings.xhr(); if (req) { req.upload.addEventListener('progress', function(evt) { if(evt.lengthComputable) { var pct = (evt.loaded / evt.total) * 100; $('#prog > .progress-bar').css('width', pct.toPrecision(3) + '%'); } }, false); } return req; }, beforeSend: function () { self.markStart(); self.polling = true; self.startPolling(); }, error: function (jqXHR) { self.polling = false; if(jqXHR.status === 500 || jqXHR.status === 0 || jqXHR.readyState === 0){ self.markError('Server Error: ' + jqXHR.statusText + gettext('<br>Please check your network connection. In case of Layer Upload make sure GeoServer is running and accepting connections.')); } else if (jqXHR.status === 400 || jqXHR.status === 404) { if (jqXHR.responseJSON !== undefined && jqXHR.responseJSON !== null) { if (jqXHR.responseJSON.errors !== undefined) { self.markError('Client Error: ' + jqXHR.statusText + gettext('<br>' + jqXHR.responseJSON.errors)); } } else if (jqXHR.responseText !== undefined && jqXHR.responseText !== null) { self.markError('Client Error: ' + jqXHR.statusText + gettext('<br>' + jqXHR.responseText)); } else { self.markError('Client Error: ' + jqXHR.statusText + gettext('<br>Bad request or URL not found.')); } } else { if (jqXHR.responseJSON !== undefined && jqXHR.responseJSON !== null) { if (jqXHR.responseJSON.errors !== undefined) { self.markError('Unexpected Error: ' + jqXHR.statusText + gettext('<br>' + jqXHR.responseJSON.errors)); } } else if (jqXHR.responseText !== undefined && jqXHR.responseText !== null) { self.markError('Unexpected Error: ' + jqXHR.statusText + gettext('<br>' + jqXHR.responseText)); } else { self.markError('Unexpected Error: ' + jqXHR.statusText + gettext('<br>Unknown.')); } } callback(array); }, success: function (resp, status) { self.logStatus({ msg: '<p>' + gettext('Layer files uploaded, configuring in GeoServer') + '</p>', level: 'alert-success', empty: 'true' }); self.id = resp.id; self.doStep(resp, callback, array); } }); }; /** Function to display the layers collected from the files * selected for uploading * * @params {file_queue} * @returns {string} */ LayerInfo.prototype.display = function (file_queue) { var layerTemplate = _.template($('#layerTemplate').html()), li = layerTemplate({ name: this.name, selector: LayerInfo.safeSelector(this.name), type: this.type.name, format: this.type.format, time: time_enabled, mosaic: mosaic_enabled }); file_queue.append(li); this.errors = this.collectErrors(); this.displayFiles(); this.displayErrors(); this.element = $(this.selector); var time_re_txt = "[0-9]{8}"; $('#' + this.name + '-mosaic').on('change', this.doImageMosaicToggle); $('#' + this.name + '-mosaic-granule').on('change', this.doImageMosaicGranuleOptionsToggle); $('#' + this.name + '-timedim').on('change', this.doImageMosaicTimedimOptionsToggle); $('#' + this.name + '-timedim-presentation').on('change', this.doImageMosaicTimedimPresentationOptionsToggle); $('#' + this.name + '-mosaic-granule-format-select').on('change', this.doImageMosaicGranuleLayerSelect); $('#' + this.name + '-timedim-format-select').on('change', function() { var input = $(this); time_re_txt = input.val(); var base_name = this.name.split('-timedim')[0]; $('#' + base_name + '-timedim-value-valid').show(); }); $('#' + this.name + '-timedim-presentation-format-select').on('change', function() { var input = $(this); var base_name = this.name.split('-timedim')[0]; if (input.val() === 'DISCRETE_INTERVAL') { $('#' + base_name + '-mosaic-timedim-presentation-res-options').show(); } else { $('#' + base_name + '-mosaic-timedim-presentation-res-options').hide(); } }); $('#' + this.name + '-timedim-defaultvalue-format-select').on('change', function() { var input = $(this); var base_name = this.name.split('-timedim')[0]; if (input.val() === 'NEAREST' || input.val() === 'FIXED') { $('#' + base_name + '-mosaic-timedim-defaultvalue-res-options').show(); } else { $('#' + base_name + '-mosaic-timedim-defaultvalue-res-options').hide(); } }); $('#' + this.name + '-timedim-value').on('input', function() { var input = $(this); var re = new RegExp(time_re_txt, "g"); var is_valid = re.test(input.val()); if(is_valid){ $('#' + this.name + '-valid').hide(); } else { $('#' + this.name + '-valid').show(); } }); $('#' + this.name + '-timedim-defaultvalue-ref-value').on('input', function() { var input = $(this); var re = /(\d{4})-(\d{2})-(\d{2})T(\d{2})\:(\d{2})\:(\d{2})[+-](\d{2})\:(\d{2})/; var is_valid = re.test(input.val()); if(is_valid){ $('#' + this.name + '-valid').hide(); } else { $('#' + this.name + '-valid').show(); } }); return li; }; /** Event handler to deal with user clicking on remove link * * @params event * @returns none */ LayerInfo.prototype.removeFileHandler = function (event) { var target = $(event.target), layer_info, layer_name = target.data('layer'), file_name = target.data('file'); this.removeFile(file_name); this.displayRefresh(); }; /** Function to remove a file from the file list * * @params {options} * @returns {string} */ LayerInfo.prototype.removeFile = function (name) { var length = this.files.length, i, file; for (i = 0; i < length; i += 1) { file = this.files[i]; if (name === file.name) { this.files.splice(i, 1); break; } } }; /** Function to display the files selected for uploading * * @params * @returns */ LayerInfo.prototype.displayFiles = function () { var self = this, ul = $('#' + LayerInfo.safeSelector(this.name) + '-element .files'); ul.empty(); $.each(this.files, function (idx, file) { var file_ext = file.name.substr(file.name.lastIndexOf('.') + 1); var li = $('<li/>').appendTo(ul), p = $('<p/>', {text: file.name}).appendTo(li), a = $('<a/>', {text: ' ' + gettext('Remove')}); if (file_ext === 'xml') { $('#metadata_uploaded_preserve_check').show(); } a.data('layer', self.name); a.data('file', file.name); a.attr('class', 'remove-file'); a.appendTo(p); a.on('click', function (event) { var target = $(event.target), layer_info, layer_name = target.data('layer'), file_name = target.data('file'); self.removeFile(file_name); if (self.files.length == 0) { delete layers[self.name]; } if (file_ext === 'xml') { $('#metadata_uploaded_preserve_check').hide(); } self.errors = self.collectErrors(); self.displayErrors(); }); }); }; /** Function to display errors * * @params * @returns */ LayerInfo.prototype.displayErrors = function () { var ul = $('#' + LayerInfo.safeSelector(this.name) + '-element .errors').first(); ul.empty(); $.each(this.errors, function (idx, error) { var li = $('<li/>', {text: error, 'class': 'alert alert-error'}); li.appendTo(ul); li.animate({opacity:1}, 5000, 'linear', function() { li.animate({opacity:0}, 1000, 'linear', function() {li.remove(); }); }); }); }; /** Function to refresh display after adding or removing files * * @params {options} * @returns {string} */ LayerInfo.prototype.displayRefresh = function () { this.errors = this.collectErrors(); this.displayFiles(); this.displayErrors(); }; LayerInfo.prototype.doImageMosaicToggle = function (event) { var target = event.target || event.srcElement; var id = target.id; var base_name = id.split('-mosaic')[0]; var mosaic_chkbox = $('#' + id).is(':checked'); if (mosaic_chkbox) { $('#' + base_name + '-mosaic-options').show(); } else { $('#' + base_name + '-mosaic-options').hide(); } }; LayerInfo.prototype.doImageMosaicTimedimOptionsToggle = function (event) { var target = event.target || event.srcElement; var id = target.id; var base_name = id.split('-timedim')[0]; var mosaic_chkbox = $('#' + id).is(':checked'); if (mosaic_chkbox) { $('#' + base_name + '-mosaic-timedim-options').show(); } else { $('#' + base_name + '-mosaic-timedim-options').hide(); } }; LayerInfo.prototype.doImageMosaicTimedimPresentationOptionsToggle = function (event) { var target = event.target || event.srcElement; var id = target.id; var base_name = id.split('-timedim')[0]; var mosaic_chkbox = $('#' + id).is(':checked'); if (mosaic_chkbox) { $('#' + base_name + '-mosaic-timedim-presentation-options').show(); } else { $('#' + base_name + '-mosaic-timedim-presentation-options').hide(); } }; LayerInfo.prototype.doImageMosaicGranuleOptionsToggle = function (event) { var target = event.target || event.srcElement; var id = target.id; var base_name = id.split('-mosaic')[0]; var mosaic_chkbox = $('#' + id).is(':checked'); if (mosaic_chkbox) { $('#' + base_name + '-mosaic-granule-format-options').show(); var dropdown = $('#' + base_name + '-mosaic-granule-format-select'); // Clear drop down list $(dropdown).empty(); $("<option />", { val: '', text: 'Select one Mosaic layer ...', selected: 'selected' }).appendTo(dropdown); // Fill drop down list with new data $(json_mosaics).each(function () { $("<option />", { val: this.name, text: this.name }).appendTo(dropdown); }); } else { $('#' + base_name + '-mosaic-granule-format-options').hide(); $('#' + base_name + '-timedim').prop("checked", false); $('#' + base_name + '-timedim').prop("disabled", false); $('#' + base_name + '-mosaic-timedim-options').hide(); $('#' + base_name + '-timedim-presentation').prop("checked", false); $('#' + base_name + '-timedim-presentation').prop("disabled", false); $('#' + base_name + '-mosaic-timedim-presentation-options').hide(); $('#' + base_name + '-timedim-format-select').val($('#' + base_name + '-timedim-format-select option:first').val()); $('#' + base_name + '-timedim-format-select').prop("disabled", false); } }; LayerInfo.prototype.doImageMosaicGranuleLayerSelect = function (event) { var target = event.target || event.srcElement; var id = target.id; var val = target.value; var base_name = id.split('-mosaic')[0]; if (val !== '') { $(json_mosaics).each(function () { if (this.name === val) { if (this.has_time === "True") { $('#' + base_name + '-timedim').prop("checked", true); $('#' + base_name + '-timedim').prop("disabled", true); $('#' + base_name + '-mosaic-timedim-options').show(); $('#' + base_name + '-timedim-presentation').prop("checked", false); $('#' + base_name + '-timedim-presentation').prop("disabled", true); $('#' + base_name + '-mosaic-timedim-presentation-options').hide(); $('#' + base_name + '-timedim-format-select').val(this.time_regex); $('#' + base_name + '-timedim-format-select').prop("disabled", true); } else { $('#' + base_name + '-timedim').prop("checked", false); $('#' + base_name + '-timedim').prop("disabled", false); $('#' + base_name + '-mosaic-timedim-options').hide(); $('#' + base_name + '-timedim-presentation').prop("checked", false); $('#' + base_name + '-timedim-presentation').prop("disabled", false); $('#' + base_name + '-mosaic-timedim-presentation-options').hide(); $('#' + base_name + '-timedim-format-select').val($('#' + base_name + '-timedim-format-select option:first').val()); $('#' + base_name + '-timedim-format-select').prop("disabled", false); } } }); } }; return LayerInfo; }); function updateUrl(url, key, value){ if (key == null || value == null){ return url; } var pair = key.concat('=').concat(value); return (url.lastIndexOf('?') > -1)? url.concat('&').concat(pair): url.concat('?').concat(pair); } function format(str, arr) { return str.replace(/%(\d+)/g, function(_,m) { return arr[--m]; }); }
PypiClean
/ATACFragQC-0.4.7.tar.gz/ATACFragQC-0.4.7/README.md
# ATACFragQC The Python toolkits designed to control the fragment quality of Bulk/SingCell ATAC-seq. ## Installation ~~~ python3 -m pip install --upgrade ATACFragQC ~~~ ## Usage ~~~ # Basic usage ATACFragQC [options] -i <input.bam> -r <reference.gtf> # For more information ATACFragQC -h ~~~ ## Features * The distrubution of fragments in chromosomes * The distrubution of fragment lengths * The distrubution of fragments around transcription start sites (TSSs) * Other feature would be supported in the future ... ## Overview ![Overview of ATACFragQC](https://raw.githubusercontent.com/0CBH0/ATACFragQC/main/Images/MCBULK_qc.png)
PypiClean
/Djaloha-0.4.2.tar.gz/Djaloha-0.4.2/djaloha/static/aloha.0.20/plugins/djaloha/align/lib/align-plugin.js
define( ['aloha', 'aloha/plugin', 'aloha/floatingmenu', 'i18n!align/nls/i18n', 'i18n!aloha/nls/i18n', 'aloha/jquery', 'css!align/css/align.css'], function(Aloha, Plugin, FloatingMenu, i18n, i18nCore, jQuery) { var GENTICS = window.GENTICS; /** * register the plugin with unique name */ return Plugin.create('align', { _constructor: function(){ this._super('align'); }, /** * Configure the available languages */ languages: ['en', 'fr'], /** * Configuration (available align options) */ config: { alignment: ['right','left','center','justify'] }, /** * Alignment wanted by the user */ alignment: '', /** * Alignment of the selection before modification */ lastAlignment: '', /** * Initialize the plugin and set initialize flag on true */ init: function () { this.createButtons(); var that = this; // apply specific configuration if an editable has been activated Aloha.bind('aloha-editable-activated', function (e, params) { that.applyButtonConfig(params.editable.obj); }); // add the event handler for selection change Aloha.bind('aloha-selection-changed', function(event, rangeObject) { if (Aloha.activeEditable) { that.buttonPressed(rangeObject); } }); }, buttonPressed: function (rangeObject) { var that = this; rangeObject.findMarkup(function() { that.alignment = jQuery(this).css('text-align'); }, Aloha.activeEditable.obj); if(this.alignment != this.lastAlignment) { switch(this.lastAlignment) { case 'right': this.alignRightButton.setPressed(false); break; case 'left': this.alignLeftButton.setPressed(false); break; case 'center': this.alignCenterButton.setPressed(false); break; case 'justify': this.alignJustifyButton.setPressed(false); break; } switch(this.alignment) { case 'right': this.alignRightButton.setPressed(true); break; case 'center': this.alignCenterButton.setPressed(true); break; case 'justify': this.alignJustifyButton.setPressed(true); break; default: this.alignLeftButton.setPressed(true); this.alignment = 'left'; break; } } this.lastAlignment = this.alignment; }, /** * applys a configuration specific for an editable * buttons not available in this configuration are hidden * @param {Object} id of the activated editable * @return void */ applyButtonConfig: function (obj) { var config = this.getEditableConfig(obj); if ( config && config.alignment && !this.settings.alignment ) { config = config; } else if ( config[0] && config[0].alignment) { config = config[0]; } else if ( this.settings.alignment ) { config.alignment = this.settings.alignment; } if (typeof config.alignment === 'undefined') { config = this.config; } if ( jQuery.inArray('right', config.alignment) != -1) { this.alignRightButton.show(); } else { this.alignRightButton.hide(); } if ( jQuery.inArray('left', config.alignment) != -1) { this.alignLeftButton.show(); } else { this.alignLeftButton.hide(); } if ( jQuery.inArray('center', config.alignment) != -1) { this.alignCenterButton.show(); } else { this.alignCenterButton.hide(); } if ( jQuery.inArray('justify', config.alignment) != -1) { this.alignJustifyButton.show(); } else { this.alignJustifyButton.hide(); } }, createButtons: function () { var that = this; // create a new button this.alignLeftButton = new Aloha.ui.Button({ 'name' : 'alignLeft', 'iconClass' : 'aloha-button-align aloha-button-align-left', 'size' : 'small', 'onclick' : function () { that.align('left'); }, 'tooltip' : i18n.t('button.alignleft.tooltip'), 'toggle' : true }); // add it to the floating menu FloatingMenu.addButton( 'Aloha.continuoustext', this.alignLeftButton, i18nCore.t('floatingmenu.tab.format'), 1 ); // create a new button this.alignCenterButton = new Aloha.ui.Button({ 'name' : 'alignCenter', 'iconClass' : 'aloha-button-align aloha-button-align-center', 'size' : 'small', 'onclick' : function () { that.align('center'); }, 'tooltip' : i18n.t('button.aligncenter.tooltip'), 'toggle' : true }); // add it to the floating menu FloatingMenu.addButton( 'Aloha.continuoustext', this.alignCenterButton, i18nCore.t('floatingmenu.tab.format'), 1 ); // create a new button this.alignRightButton = new Aloha.ui.Button({ 'name' : 'alignRight', 'iconClass' : 'aloha-button-align aloha-button-align-right', 'size' : 'small', 'onclick' : function () { that.align('right'); }, 'tooltip' : i18n.t('button.alignright.tooltip'), 'toggle' : true }); // add it to the floating menu FloatingMenu.addButton( 'Aloha.continuoustext', this.alignRightButton, i18nCore.t('floatingmenu.tab.format'), 1 ); // create a new button this.alignJustifyButton = new Aloha.ui.Button({ 'name' : 'alignJustify', 'iconClass' : 'aloha-button-align aloha-button-align-justify', 'size' : 'small', 'onclick' : function () { that.align('justify'); }, 'tooltip' : i18n.t('button.alignjustify.tooltip'), 'toggle' : true }); // add it to the floating menu FloatingMenu.addButton( 'Aloha.continuoustext', this.alignJustifyButton, i18nCore.t('floatingmenu.tab.format'), 1 ); }, /** * Check whether inside a align tag * @param {GENTICS.Utils.RangeObject} range range where to insert the object (at start or end) * @return markup * @hide */ findAlignMarkup: function ( range ) { var that = this; if ( typeof range === 'undefined' ) { var range = Aloha.Selection.getRangeObject(); } if ( Aloha.activeEditable ) { return range.findMarkup(function() { return jQuery(this).css('text-align') == that.alignment; }, Aloha.activeEditable.obj); } else { return null; } }, /** * Align the selection or remove it */ align: function ( tempAlignment ) { var range = Aloha.Selection.getRangeObject(); this.lastAlignment = this.alignment; this.alignment = tempAlignment; if (Aloha.activeEditable) { if ( this.findAlignMarkup( range ) ) { this.removeAlign(); } else { this.insertAlign(); } } }, /** * Align the selection */ insertAlign: function () { var that = this; // do not align the range if ( this.findAlignMarkup( range ) ) { return; } // current selection or cursor position var range = Aloha.Selection.getRangeObject(); // Iterates the whole selectionTree and align jQuery.each(Aloha.Selection.getRangeObject().getSelectionTree(), function () { if(this.selection !== 'none' && this.domobj.nodeType !== 3) { jQuery(this.domobj).css('text-align', that.alignment); } }); if(this.alignment != this.lastAlignment) { switch(this.lastAlignment) { case 'right': this.alignRightButton.setPressed(false); break; case 'left': this.alignLeftButton.setPressed(false); break; case 'center': this.alignCenterButton.setPressed(false); break; case 'justify': this.alignJustifyButton.setPressed(false); break; } } // select the (possibly modified) range range.select(); }, /** * Remove the alignment */ removeAlign: function () { var range = Aloha.Selection.getRangeObject(); if ( this.findAlignMarkup( range ) ) { // Remove the alignment range.findMarkup(function() { jQuery(this).css('text-align', ''); }, Aloha.activeEditable.obj); // select the (possibly modified) range range.select(); } } }); });
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/grid/enhanced/nls/ca/Filter.js
define( "dojox/grid/enhanced/nls/ca/Filter", //begin v1.x content ({ "clearFilterDialogTitle": "Netejar el filtre", "filterDefDialogTitle": "Filtre", "ruleTitleTemplate": "Regla ${0}", "conditionEqual": "igual que", "conditionNotEqual": "no és igual que", "conditionLess": "és menys que", "conditionLessEqual": "és menys o igual que", "conditionLarger": "és més que", "conditionLargerEqual": "és més o igual que", "conditionContains": "conté", "conditionIs": "és", "conditionStartsWith": "comença per", "conditionEndWith": "acaba per", "conditionNotContain": "no conté", "conditionIsNot": "no és", "conditionNotStartWith": "no comença per", "conditionNotEndWith": "no acaba per", "conditionBefore": "abans", "conditionAfter": "després", "conditionRange": "interval", "conditionIsEmpty": "és buida", "all": "tot", "any": "qualsevol", "relationAll": "totes les regles", "waiRelAll": "Fes coincidir totes les regles següents:", "relationAny": "qualsevol regla", "waiRelAny": "Fes coincidir qualsevol de les regles següents:", "relationMsgFront": "Coincidència", "relationMsgTail": "", "and": "i", "or": "o", "addRuleButton": "Afegeix regla", "waiAddRuleButton": "Afegeix una regla nova", "removeRuleButton": "Elimina regla", "waiRemoveRuleButtonTemplate": "Elimina la regla ${0}", "cancelButton": "Cancel·la", "waiCancelButton": "Cancel·la aquest diàleg", "clearButton": "Esborra", "waiClearButton": "Neteja el filtre", "filterButton": "Filtre", "waiFilterButton": "Envia el filtre", "columnSelectLabel": "Columna", "waiColumnSelectTemplate": "Columna per a la regla ${0}", "conditionSelectLabel": "Condició", "waiConditionSelectTemplate": "Condició per a la regla ${0}", "valueBoxLabel": "Valor", "waiValueBoxTemplate": "Especifiqueu el valor de filtre per a la regla ${0}", "rangeTo": "a", "rangeTemplate": "de ${0} a ${1}", "statusTipHeaderColumn": "Columna", "statusTipHeaderCondition": "Regles", "statusTipTitle": "Barra de filtre", "statusTipMsg": "Feu clic aquí a la barra de filtre per filtrar els valors a ${0}.", "anycolumn": "qualsevol columna", "statusTipTitleNoFilter": "Barra de filtre", "statusTipTitleHasFilter": "Filtre", "statusTipRelAny": "Coincideix amb qualsevol regla.", "statusTipRelAll": "Coincideix amb totes les regles.", "defaultItemsName": "elements", "filterBarMsgHasFilterTemplate": "Es mostren ${0} de ${1} ${2}.", "filterBarMsgNoFilterTemplate": "No s'ha aplicat cap filtre", "filterBarDefButton": "Defineix filtre", "waiFilterBarDefButton": "Filtra la taula", "a11yFilterBarDefButton": "Filtre...", "filterBarClearButton": "Netejar filtre", "waiFilterBarClearButton": "Neteja el filtre", "closeFilterBarBtn": "Tancar la barra de filtre", "clearFilterMsg": "Això eliminarà el filtre i mostrarà tots els registres disponibles.", "anyColumnOption": "Qualsevol columna", "trueLabel": "Cert", "falseLabel": "Fals" }) //end v1.x content );
PypiClean
/Auptimizer-2.0.tar.gz/Auptimizer-2.0/src/aup/compression/torch/pruning/agp.py
# Modified work Copyright (c) 2018 LG Electronics Inc. # SPDX-License-Identifier: GPL-3.0-or-later """ An automated gradual pruning algorithm that prunes the smallest magnitude weights to achieve a preset level of network sparsity. Michael Zhu and Suyog Gupta, "To prune, or not to prune: exploring the efficacy of pruning for model compression", 2017 NIPS Workshop on Machine Learning of Phones and other Consumer Devices. """ import logging import torch from schema import And, Optional from .constants import MASKER_DICT from ..utils.config_validation import CompressorSchema from ..compressor import Pruner __all__ = ['AGPPruner'] logger = logging.getLogger('torch pruner') class AGPPruner(Pruner): """ Parameters ---------- model : torch.nn.Module Model to be pruned. config_list : listlist Supported keys: - initial_sparsity: This is to specify the sparsity when compressor starts to compress. - final_sparsity: This is to specify the sparsity when compressor finishes to compress. - start_epoch: This is to specify the epoch number when compressor starts to compress, default start from epoch 0. - end_epoch: This is to specify the epoch number when compressor finishes to compress. - frequency: This is to specify every *frequency* number epochs compressor compress once, default frequency=1. optimizer: torch.optim.Optimizer Optimizer used to train model. pruning_algorithm: str Algorithms being used to prune model, choose from `['level', 'slim', 'l1', 'l2', 'fpgm', 'taylorfo', 'apoz', 'mean_activation']`, by default `level` """ def __init__(self, model, config_list, optimizer, pruning_algorithm='level'): super().__init__(model, config_list, optimizer) assert isinstance(optimizer, torch.optim.Optimizer), "AGP pruner is an iterative pruner, please pass optimizer of the model to it" self.masker = MASKER_DICT[pruning_algorithm](model, self) self.now_epoch = 0 self.set_wrappers_attribute("if_calculated", False) def validate_config(self, model, config_list): """ Parameters ---------- model : torch.nn.Module Model to be pruned config_list : list List on pruning configs """ schema = CompressorSchema([{ 'initial_sparsity': And(float, lambda n: 0 <= n <= 1), 'final_sparsity': And(float, lambda n: 0 <= n <= 1), 'start_epoch': And(int, lambda n: n >= 0), 'end_epoch': And(int, lambda n: n >= 0), 'frequency': And(int, lambda n: n > 0), Optional('op_types'): [str], Optional('op_names'): [str] }], model, logger) schema.validate(config_list) def calc_mask(self, wrapper, wrapper_idx=None): """ Calculate the mask of given layer. Scale factors with the smallest absolute value in the BN layer are masked. Parameters ---------- wrapper : Module the layer to instrument the compression operation wrapper_idx: int index of this wrapper in pruner's all wrappers Returns ------- dict | None Dictionary for storing masks, keys of the dict: 'weight_mask': weight mask tensor 'bias_mask': bias mask tensor (optional) """ config = wrapper.config start_epoch = config.get('start_epoch', 0) freq = config.get('frequency', 1) if wrapper.if_calculated: return None if not (self.now_epoch >= start_epoch and (self.now_epoch - start_epoch) % freq == 0): return None target_sparsity = self.compute_target_sparsity(config) new_mask = self.masker.calc_mask(sparsity=target_sparsity, wrapper=wrapper, wrapper_idx=wrapper_idx) if new_mask is not None: wrapper.if_calculated = True return new_mask def compute_target_sparsity(self, config): """ Calculate the sparsity for pruning Parameters ---------- config : dict Layer's pruning config Returns ------- float Target sparsity to be pruned """ end_epoch = config.get('end_epoch', 1) start_epoch = config.get('start_epoch', 0) freq = config.get('frequency', 1) final_sparsity = config.get('final_sparsity', 0) initial_sparsity = config.get('initial_sparsity', 0) if end_epoch <= start_epoch or initial_sparsity >= final_sparsity: logger.warning('your end epoch <= start epoch or initial_sparsity >= final_sparsity') return final_sparsity if end_epoch <= self.now_epoch: return final_sparsity span = ((end_epoch - start_epoch - 1) // freq) * freq assert span > 0 target_sparsity = (final_sparsity + (initial_sparsity - final_sparsity) * (1.0 - ((self.now_epoch - start_epoch) / span)) ** 3) return target_sparsity def update_epoch(self, epoch): """ Update epoch Parameters ---------- epoch : int current training epoch """ if epoch > 0: self.now_epoch = epoch for wrapper in self.get_modules_wrapper(): wrapper.if_calculated = False
PypiClean
/Mathics-1.0.tar.gz/Mathics-1.0/mathics/web/media/js/mathjax/jax/output/SVG/jax.js
(function(h,c,e,a){var g;var b="http://www.w3.org/2000/svg";var j="http://www.w3.org/1999/xlink";a.Augment({config:{styles:{".MathJax_SVG":{display:"inline","font-style":"normal","font-weight":"normal","line-height":"normal","font-size":"100%","font-size-adjust":"none","text-indent":0,"text-align":"left","text-transform":"none","letter-spacing":"normal","word-spacing":"normal","word-wrap":"normal","white-space":"nowrap","float":"none",direction:"ltr",border:0,padding:0,margin:0},".MathJax_SVG_Display":{position:"relative",display:"block",width:"100%"},".MathJax_SVG_Processing":{visibility:"hidden",position:"absolute",top:0,left:0,width:0,height:0,overflow:"hidden",display:"block"},".MathJax_SVG_Processed":{display:"none!important"},".MathJax_SVG_ExBox":{display:"block",overflow:"hidden",width:"1px",height:"60ex"},"#MathJax_SVG_Tooltip":{position:"absolute",left:0,top:0,width:"auto",height:"auto",display:"none"}}},hideProcessedMath:true,Config:function(){var k=c.config.menuSettings;if(k.scale){this.config.scale=k.scale}this.SUPER(arguments).Config.apply(this,arguments);this.fontInUse=this.config.font;this.fontDir+="/"+this.config.font;if(!this.require){this.require=[]}this.require.push(this.fontDir+"/fontdata.js");this.require.push(MathJax.OutputJax.extensionDir+"/MathEvents.js")},Startup:function(){EVENT=MathJax.Extension.MathEvents.Event;TOUCH=MathJax.Extension.MathEvents.Touch;HOVER=MathJax.Extension.MathEvents.Hover;this.ContextMenu=EVENT.ContextMenu;this.Mousedown=EVENT.AltContextMenu;this.Mouseover=HOVER.Mouseover;this.Mouseout=HOVER.Mouseout;this.Mousemove=HOVER.Mousemove;this.hiddenDiv=e.Element("div",{style:{visibility:"hidden",overflow:"hidden",position:"absolute",top:0,height:"1px",width:"auto",padding:0,border:0,margin:0,textAlign:"left",textIndent:0,textTransform:"none",lineHeight:"normal",letterSpacing:"normal",wordSpacing:"normal"}});if(!document.body.firstChild){document.body.appendChild(this.hiddenDiv)}else{document.body.insertBefore(this.hiddenDiv,document.body.firstChild)}this.hiddenDiv=e.addElement(this.hiddenDiv,"div",{id:"MathJax_SVG_Hidden"});var k=e.addElement(this.hiddenDiv,"div",{style:{width:"5in"}});this.pxPerInch=k.offsetWidth/5;this.hiddenDiv.removeChild(k);this.textSVG=this.Element("svg");d=this.addElement(this.addElement(this.hiddenDiv.parentNode,"svg"),"defs",{id:"MathJax_SVG_glyphs"});f={};this.ExSpan=e.Element("span",{style:{position:"absolute","font-size-adjust":"none"}},[["span",{className:"MathJax_SVG_ExBox"}]]);this.linebreakSpan=e.Element("span",null,[["hr",{style:{width:"auto",size:1,padding:0,border:0,margin:0}}]]);return h.Styles(this.config.styles,["InitializeSVG",this])},InitializeSVG:function(){document.body.appendChild(this.ExSpan);document.body.appendChild(this.linebreakSpan);this.defaultEx=this.ExSpan.firstChild.offsetHeight/60;this.defaultWidth=this.linebreakSpan.firstChild.offsetWidth;document.body.removeChild(this.linebreakSpan);document.body.removeChild(this.ExSpan)},preTranslate:function(o){var t=o.jax[this.id],u,r=t.length,z,s,A,n,y,l,x,q,k,w=false,v,B=this.config.linebreaks.automatic,p=this.config.linebreaks.width;if(B){w=(p.match(/^\s*(\d+(\.\d*)?%\s*)?container\s*$/)!=null);if(w){p=p.replace(/\s*container\s*/,"")}else{k=this.defaultWidth}if(p===""){p="100%"}}else{k=100000}for(u=0;u<r;u++){z=t[u];if(!z.parentNode){continue}s=z.previousSibling;if(s&&String(s.className).match(/^MathJax(_SVG)?(_Display)?$/)){s.parentNode.removeChild(s)}l=z.MathJax.elementJax;l.SVG={display:(l.root.Get("display")==="block")};A=n=e.Element("span",{style:{"font-size":this.scale+"%",display:"inline-block"},className:"MathJax_SVG",id:l.inputID+"-Frame",isMathJax:true,jaxID:this.id,oncontextmenu:EVENT.Menu,onmousedown:EVENT.Mousedown,onmouseover:EVENT.Mouseover,onmouseout:EVENT.Mouseout,onmousemove:EVENT.Mousemove,onclick:EVENT.Click,ondblclick:EVENT.DblClick});if(c.Browser.noContextMenu){A.ontouchstart=TOUCH.start;A.ontouchend=TOUCH.end}if(l.SVG.display){n=e.Element("div",{className:"MathJax_SVG_Display"});n.appendChild(A)}n.setAttribute("role","textbox");n.setAttribute("aria-readonly","true");n.className+=" MathJax_SVG_Processing";z.parentNode.insertBefore(n,z);z.parentNode.insertBefore(this.ExSpan.cloneNode(true),z);n.parentNode.insertBefore(this.linebreakSpan.cloneNode(true),n)}for(u=0;u<r;u++){z=t[u];if(!z.parentNode){continue}y=z.previousSibling;n=y.previousSibling;l=z.MathJax.elementJax;x=y.firstChild.offsetHeight/60;v=n.previousSibling.firstChild.offsetWidth;if(w){k=v}if(x===0||x==="NaN"){this.hiddenDiv.appendChild(n);l.SVG.isHidden=true;x=this.defaultEx;v=this.defaultWidth;if(w){k=this.defaultWidth}}l.SVG.ex=x;l.SVG.cwidth=v;l.SVG.em=q=x/a.TeX.x_height*1000;l.SVG.lineWidth=(B?this.length2em(p,1,k/q):1000000)}for(u=0;u<r;u++){z=t[u];if(!z.parentNode){continue}y=t[u].previousSibling;l=t[u].MathJax.elementJax;A=y.previousSibling;if(!l.SVG.isHidden){A=A.previousSibling}A.parentNode.removeChild(A);y.parentNode.removeChild(y)}o.SVGeqn=o.SVGlast=0;o.SVGchunk=this.config.EqnChunk;o.SVGdelay=false},Translate:function(l,p){if(!l.parentNode){return}if(p.SVGdelay){p.SVGdelay=false;c.RestartAfter(MathJax.Callback.Delay(this.config.EqnChunkDelay))}var k=l.MathJax.elementJax,o=k.root,m=document.getElementById(k.inputID+"-Frame"),q=(k.SVG.display?m.parentNode:m);this.em=g.mbase.prototype.em=k.SVG.em;this.ex=k.SVG.ex;this.linebreakWidth=k.SVG.lineWidth*1000;this.cwidth=k.SVG.cwidth;this.mathDiv=q;m.appendChild(this.textSVG);this.initSVG(o,m);o.setTeXclass();try{o.toSVG(m,q)}catch(n){if(n.restart){while(m.firstChild){m.removeChild(m.firstChild)}}throw n}m.removeChild(this.textSVG);if(k.SVG.isHidden){l.parentNode.insertBefore(q,l)}q.className=q.className.split(/ /)[0];if(this.hideProcessedMath){q.className+=" MathJax_SVG_Processed";if(l.MathJax.preview){k.SVG.preview=l.MathJax.preview;delete l.MathJax.preview}p.SVGeqn++;if(p.SVGeqn>=p.SVGlast+p.SVGchunk){this.postTranslate(p);p.SVGchunk=Math.floor(p.SVGchunk*this.config.EqnChunkFactor);p.SVGdelay=true}}},postTranslate:function(q){var l=q.jax[this.id];if(!this.hideProcessedMath){return}for(var o=q.SVGlast,k=q.SVGeqn;o<k;o++){var n=l[o];if(n){n.previousSibling.className=n.previousSibling.className.split(/ /)[0];var p=n.MathJax.elementJax.SVG;if(p.preview){p.preview.innerHTML="";n.MathJax.preview=p.preview;delete p.preview}}}q.SVGlast=q.SVGeqn},getJaxFromMath:function(k){if(k.parentNode.className==="MathJax_SVG_Display"){k=k.parentNode}return c.getJaxFor(k.nextSibling)},getHoverSpan:function(k,l){l.style.position="relative";return l.firstChild},getHoverBBox:function(k,l,m){var n=EVENT.getBBox(l.parentNode);n.h+=2;n.d-=2;return n},Zoom:function(l,s,q,k,p){s.className="MathJax_SVG";var u=s.appendChild(this.ExSpan.cloneNode(true));var o=u.firstChild.offsetHeight/60;this.em=g.mbase.prototype.em=o/a.TeX.x_height*1000;this.cwidth=0.85*a.defaultWidth;u.parentNode.removeChild(u);this.idPostfix="-zoom";l.root.toSVG(s,s);this.idPostfix="";s.style.position=q.style.position="absolute";var r=s.offsetWidth,n=s.offsetHeight,t=q.offsetHeight,m=q.offsetWidth;s.style.position=q.style.position="";return{Y:-EVENT.getBBox(s).h,mW:m,mH:t,zW:r,zH:n}},initSVG:function(l,k){},Remove:function(k){var l=document.getElementById(k.inputID+"-Frame");if(l){if(k.SVG.display){l=l.parentNode}l.parentNode.removeChild(l)}delete k.SVG},Em:function(k){if(Math.abs(k)<0.0006){return"0em"}return k.toFixed(3).replace(/\.?0+$/,"")+"em"},Ex:function(k){k=k/this.TeX.x_height;if(Math.abs(k)<0.0006){return"0ex"}return k.toFixed(3).replace(/\.?0+$/,"")+"ex"},Percent:function(k){return(100*k).toFixed(1).replace(/\.?0+$/,"")+"%"},length2em:function(q,l,o){if(typeof(q)!=="string"){q=q.toString()}if(q===""){return""}if(q===g.SIZE.NORMAL){return 1000}if(q===g.SIZE.BIG){return 2000}if(q===g.SIZE.SMALL){return 710}if(q==="infinity"){return a.BIGDIMEN}if(q.match(/mathspace$/)){return 1000*a.MATHSPACE[q]}var n=q.match(/^\s*([-+]?(?:\.\d+|\d+(?:\.\d*)?))?(pt|em|ex|mu|px|pc|in|mm|cm|%)?/);var k=parseFloat(n[1]||"1")*1000,p=n[2];if(o==null){o=1000}if(l==null){l=1}if(p==="em"){return k}if(p==="ex"){return k*a.TeX.x_height/1000}if(p==="%"){return k/100*o/1000}if(p==="px"){return k/a.em}if(p==="pt"){return k/10}if(p==="pc"){return k*1.2}if(p==="in"){return k*this.pxPerInch/a.em}if(p==="cm"){return k*this.pxPerInch/a.em/2.54}if(p==="mm"){return k*this.pxPerInch/a.em/25.4}if(p==="mu"){return k/18*l}return k*o/1000},thickness2em:function(l,k){var m=a.TeX.rule_thickness;if(l===g.LINETHICKNESS.MEDIUM){return m}if(l===g.LINETHICKNESS.THIN){return 0.67*m}if(l===g.LINETHICKNESS.THICK){return 1.67*m}return this.length2em(l,k,m)},getPadding:function(l){var n={top:0,right:0,bottom:0,left:0},k=false;for(var o in n){if(n.hasOwnProperty(o)){var m=l.style["padding"+o.charAt(0).toUpperCase()+o.substr(1)];if(m){n[o]=this.length2em(m);k=true}}}return(k?n:false)},getBorders:function(o){var m={top:0,right:0,bottom:0,left:0},l=false;for(var p in m){if(m.hasOwnProperty(p)){var k="border"+p.charAt(0).toUpperCase()+p.substr(1);var n=o.style[k+"Style"];if(n){l=true;m[p]=this.length2em(o.style[k+"Width"]);m[p+"Style"]=o.style[k+"Style"];m[p+"Color"]=o.style[k+"Color"];if(m[p+"Color"]==="initial"){m[p+"Color"]=""}}}}return(l?m:false)},Element:function(k,l){var m=(typeof(k)==="string"?document.createElementNS(b,k):k);m.isMathJax=true;if(l){for(var n in l){if(l.hasOwnProperty(n)){m.setAttribute(n,l[n].toString())}}}return m},addElement:function(l,k,m){return l.appendChild(this.Element(k,m))},TextNode:e.TextNode,addText:e.addText,ucMatch:e.ucMatch,HandleVariant:function(s,r,B){var u=i.G();var o,w,y,p,z,t,q,l,x,k;if(!s){s=this.FONTDATA.VARIANT[g.VARIANT.NORMAL]}if(s.forceFamily){B=i.TEXT(r,B,s.font);if(s.h!=null){B.h=s.h}if(s.d!=null){B.d=s.d}u.Add(B);B=""}z=s;for(t=0,q=B.length;t<q;t++){s=z;o=B.charCodeAt(t);y=B.charAt(t);if(o>=55296&&o<56319){t++;o=(((o-55296)<<10)+(B.charCodeAt(t)-56320))+65536;if(this.FONTDATA.RemapPlane1){var v=this.FONTDATA.RemapPlane1(o,s);o=v.n;s=v.variant}}else{k=this.FONTDATA.RANGES;for(l=0,x=k.length;l<x;l++){if(k[l].name==="alpha"&&s.noLowerCase){continue}w=s["offset"+k[l].offset];if(w&&o>=k[l].low&&o<=k[l].high){if(k[l].remap&&k[l].remap[o]){o=w+k[l].remap[o]}else{o=o-k[l].low+w;if(k[l].add){o+=k[l].add}}if(s["variant"+k[l].offset]){s=this.FONTDATA.VARIANT[s["variant"+k[l].offset]]}break}}}if(s.remap&&s.remap[o]){if(s.remap[o] instanceof Array){var A=s.remap[o];o=A[0];s=this.FONTDATA.VARIANT[A[1]]}else{if(typeof(s.remap[o])==="string"){B=s.remap[o]+B.substr(t+1);t=0;q=B.length;o=B.charCodeAt(0)}else{o=s.remap[o];if(s.remap.variant){s=this.FONTDATA.VARIANT[s.remap.variant]}}}}if(this.FONTDATA.REMAP[o]&&!s.noRemap){o=this.FONTDATA.REMAP[o];if(o instanceof Array){s=this.FONTDATA.VARIANT[o[1]];o=o[0]}if(typeof(o)==="string"){B=o+B.substr(t+1);t=0;q=B.length;o=o.charCodeAt(0)}}p=this.lookupChar(s,o);y=p[o];if(y){y=[r,p.id+"-"+o.toString(16).toUpperCase()].concat(y);u.Add(i.GLYPH.apply(i,y),u.w,0)}else{if(this.FONTDATA.DELIMITERS[o]){u.Add(this.createDelimiter(o,0,1,p),u.w,0)}else{if(o<=65535){y=String.fromCharCode(o)}else{w=o-65536;y=String.fromCharCode((w>>10)+55296)+String.fromCharCode((w&1023)+56320)}B=i.TEXT(r,y,{"font-family":s.defaultFamily||a.config.undefinedFamily,"font-style":(s.italic?"italic":""),"font-weight":(s.bold?"bold":"")});if(s.h!=null){B.h=s.h}if(s.d!=null){B.d=s.d}y=i.G();y.Add(B);u.Add(y,u.w,0);B="";c.signal.Post(["SVG Jax - unknown char",o,s])}}}if(B.length==1&&p.skew&&p.skew[o]){u.skew=p.skew[o]*1000}if(u.element.childNodes.length===1){u.element=u.element.firstChild;u.removeable=false;u.scale=r}return u},lookupChar:function(p,s){var o,k;if(!p.FONTS){var r=this.FONTDATA.FONTS;var q=(p.fonts||this.FONTDATA.VARIANT.normal.fonts);if(!(q instanceof Array)){q=[q]}if(p.fonts!=q){p.fonts=q}p.FONTS=[];for(o=0,k=q.length;o<k;o++){if(r[q[o]]){p.FONTS.push(r[q[o]])}}}for(o=0,k=p.FONTS.length;o<k;o++){var l=p.FONTS[o];if(typeof(l)==="string"){delete p.FONTS;this.loadFont(l)}if(l[s]){return l}else{this.findBlock(l,s)}}return{id:"unknown"}},findBlock:function(l,q){if(l.Ranges){for(var p=0,k=l.Ranges.length;p<k;p++){if(q<l.Ranges[p][0]){return}if(q<=l.Ranges[p][1]){var o=l.Ranges[p][2];for(var n=l.Ranges.length-1;n>=0;n--){if(l.Ranges[n][2]==o){l.Ranges.splice(n,1)}}this.loadFont(l.directory+"/"+o+".js")}}}},loadFont:function(k){c.RestartAfter(h.Require(this.fontDir+"/"+k))},createDelimiter:function(k,n,q,o){if(!q){q=1}var s=i.G();if(!k){s.Clean();delete s.element;s.w=s.r=this.TeX.nulldelimiterspace*q;return s}if(!(n instanceof Array)){n=[n,n]}var t=n[1];n=n[0];var l={alias:k};while(l.alias){k=l.alias;l=this.FONTDATA.DELIMITERS[k];if(!l){l={HW:[0,this.FONTDATA.VARIANT[g.VARIANT.NORMAL]]}}}if(l.load){c.RestartAfter(h.Require(this.fontDir+"/fontdata-"+l.load+".js"))}for(var r=0,p=l.HW.length;r<p;r++){if(l.HW[r][0]*q>=n-10-a.config.blacker||(r==p-1&&!l.stretch)){if(l.HW[r][2]){q*=l.HW[r][2]}if(l.HW[r][3]){k=l.HW[r][3]}return this.createChar(q,[k,l.HW[r][1]],o).With({stretched:true})}}if(l.stretch){this["extendDelimiter"+l.dir](s,t,l.stretch,q,o)}return s},createChar:function(s,q,n){var r="",p={fonts:[q[1]],noRemap:true};if(n&&n===g.VARIANT.BOLD){p.fonts=[q[1]+"-bold",q[1]]}if(typeof(q[1])!=="string"){p=q[1]}if(q[0] instanceof Array){for(var o=0,k=q[0].length;o<k;o++){r+=String.fromCharCode(q[0][o])}}else{r=String.fromCharCode(q[0])}var l=this.HandleVariant(p,s,r);if(q[2]){l.x=q[2]*1000}if(q[3]){l.y=q[3]*1000}if(q[5]){l.h+=q[5]*1000}if(q[6]){l.d+=q[6]*1000}return l},extendDelimiterV:function(r,A,m,o,n){var x=this.createChar(o,(m.top||m.ext),n);var u=this.createChar(o,(m.bot||m.ext),n);var q=x.h+x.d+u.h+u.d;var w=-x.h;r.Add(x,0,w);w-=x.d;if(m.mid){var z=this.createChar(o,m.mid,n);q+=z.h+z.d}if(A>q){var l=this.createChar(o,m.ext,n);var p=(m.mid?2:1),v=(A-q)/p,B=(v+100)/(l.h+l.d);while(p-->0){var t=a.Element("g",{transform:"translate("+l.y+","+(w-B*l.h+50+l.y)+") scale(1,"+B+")"});t.appendChild(l.element.cloneNode(false));r.element.appendChild(t);w-=v;if(m.mid&&p){r.Add(z,0,w-z.h);w-=(z.h+z.d)}}}else{if(m.mid){w+=(q-A)/2;r.Add(z,0,w-z.h);w+=-(z.h+z.d)+(q-A)/2}else{w+=(q-A)}}r.Add(u,0,w-u.h);r.Clean();r.scale=o;r.isMultiChar=true},extendDelimiterH:function(t,o,m,q,n){var p=this.createChar(q,(m.left||m.rep),n);var C=this.createChar(q,(m.right||m.rep),n);t.Add(p,-p.l,0);var B=(p.r-p.l)+(C.r-C.l),z=p.r-p.l;if(m.mid){var A=this.createChar(q,m.mid,n);B+=A.w}if(o>B){var y=this.createChar(q,m.rep,n),l=m.fuzz||0;var r=(m.mid?2:1),v=(o-B)/r,E=(v+l)/(y.r-y.l);while(r-->0){var u=a.Element("g",{transform:"translate("+(z-l/2-E*y.l+y.x)+","+y.y+") scale("+E+",1)"});u.appendChild(y.element.cloneNode(false));t.element.appendChild(u);z+=v;if(m.mid&&r){t.Add(A,z,0);z+=A.w}}}else{if(m.mid){var D=Math.min(B-o,p.w/2);z-=D/2;t.Add(A,z,0);z+=A.w-D/2}else{z-=(B-o)}}t.Add(C,z-C.l,0);t.Clean();t.scale=q;t.isMultiChar=true},MATHSPACE:{veryverythinmathspace:1/18,verythinmathspace:2/18,thinmathspace:3/18,mediummathspace:4/18,thickmathspace:5/18,verythickmathspace:6/18,veryverythickmathspace:7/18,negativeveryverythinmathspace:-1/18,negativeverythinmathspace:-2/18,negativethinmathspace:-3/18,negativemediummathspace:-4/18,negativethickmathspace:-5/18,negativeverythickmathspace:-6/18,negativeveryverythickmathspace:-7/18},TeX:{x_height:430.554,quad:1000,num1:676.508,num2:393.732,num3:443.73,denom1:685.951,denom2:344.841,sup1:412.892,sup2:362.892,sup3:288.888,sub1:150,sub2:247.217,sup_drop:386.108,sub_drop:50,delim1:2390,delim2:1000,axis_height:250,rule_thickness:60,big_op_spacing1:111.111,big_op_spacing2:166.666,big_op_spacing3:200,big_op_spacing4:600,big_op_spacing5:100,scriptspace:100,nulldelimiterspace:120,delimiterfactor:901,delimitershortfall:100,min_rule_thickness:1.25,min_root_space:1.5},BIGDIMEN:10000000,NBSP:"\u00A0"});var i=a.BBOX=MathJax.Object.Subclass({type:"g",removeable:true,Init:function(k){this.h=this.d=-a.BIGDIMEN;this.H=this.D=0;this.w=this.r=0;this.l=a.BIGDIMEN;this.x=this.y=0;this.scale=1;this.n=0;if(this.type){this.element=a.Element(this.type,k)}},With:function(k){return c.Insert(this,k)},Add:function(n,m,l,p,o){if(m){n.x+=m}if(l){n.y+=l}if(n.element){if(n.removeable&&n.element.childNodes.length===1&&n.n===1){var r=n.element.firstChild;if(r.nodeName==="use"||r.nodeName==="rect"){n.element=r;n.scale=n.childScale;var k=n.childX,q=n.childY;n.x+=k;n.y+=q;n.h-=q;n.d+=q;n.H-=q;n.D+=q;n.w-=k;n.r-=k;n.l+=k;n.removeable=false}}if(Math.abs(n.x)<1&&Math.abs(n.y)<1){n.remove=n.removeable}else{if(n.element.nodeName==="g"){if(!n.element.firstChild){n.remove=n.removeable}else{n.element.setAttribute("transform","translate("+Math.floor(n.x)+","+Math.floor(n.y)+")")}}else{if(n.element.nodeName==="line"||n.element.nodeName==="polygon"||n.element.nodeName==="path"||n.element.nodeName==="a"){n.element.setAttribute("transform","translate("+Math.floor(n.x)+","+Math.floor(n.y)+")")}else{n.element.setAttribute("x",Math.floor(n.x/n.scale));n.element.setAttribute("y",Math.floor(n.y/n.scale))}}}if(n.remove){this.n+=n.n;while(n.element.firstChild){if(o&&this.element.firstChild){this.element.insertBefore(n.element.firstChild,this.element.firstChild)}else{this.element.appendChild(n.element.firstChild)}}}else{if(o){this.element.insertBefore(n.element,this.element.firstChild)}else{this.element.appendChild(n.element)}}delete n.element}if(n.hasIndent){this.hasIndent=n.hasIndent}if(n.d-n.y>this.d){this.d=n.d-n.y;if(this.d>this.D){this.D=this.d}}if(n.y+n.h>this.h){this.h=n.y+n.h;if(this.h>this.H){this.H=this.h}}if(n.D-n.y>this.D){this.D=n.D-n.y}if(n.y+n.H>this.H){this.H=n.y+n.H}if(n.x+n.l<this.l){this.l=n.x+n.l}if(n.x+n.r>this.r){this.r=n.x+n.r}if(p||n.x+n.w+(n.X||0)>this.w){this.w=n.x+n.w+(n.X||0)}this.childScale=n.scale;this.childX=n.x;this.childY=n.y;this.n++;return n},Align:function(m,n,l,k){l=({left:l,center:(this.w-m.w)/2,right:this.w-m.w-l})[n]||0;this.Add(m,l,k)},Clean:function(){if(this.h===-a.BIGDIMEN){this.h=this.d=this.l=0}return this}});i.ROW=i.Subclass({Init:function(){this.SUPER(arguments).Init.call(this);this.svg=[];this.sh=this.sd=0},Check:function(l){var k=l.toSVG();this.svg.push(k);if(l.SVGcanStretch("Vertical")){k.mml=l}if(k.h>this.sh){this.sh=k.h}if(k.d>this.sd){this.sd=k.d}},Stretch:function(){for(var n=0,k=this.svg.length;n<k;n++){var l=this.svg[n];if(l.mml){l=l.mml.SVGstretchV(this.sh,this.sd)}if(l.ic){this.ic=l.ic}else{delete this.ic}this.Add(l,this.w,0,true)}delete this.svg}});i.RECT=i.Subclass({type:"rect",removeable:false,Init:function(l,n,k,m){if(m==null){m={stroke:"none"}}m.width=Math.floor(k);m.height=Math.floor(l+n);this.SUPER(arguments).Init.call(this,m);this.w=this.r=k;this.h=this.H=l+n;this.d=this.D=this.l=0;this.y=-n}});i.FRAME=i.Subclass({type:"rect",removeable:false,Init:function(n,q,k,m,p,l,o){if(o==null){o={}}o.fill="none";o["stroke-width"]=m.toFixed(2).replace(/\.?0+$/,"");o.width=Math.floor(k-m);o.height=Math.floor(n+q-m);o.transform="translate("+Math.floor(m/2)+","+Math.floor(-q+m/2)+")";if(p==="dashed"){o["stroke-dasharray"]=[Math.floor(6*a.em),Math.floor(6*a.em)].join(" ")}this.SUPER(arguments).Init.call(this,o);this.w=this.r=k;this.h=this.H=n;this.d=this.D=q;this.l=0}});i.HLINE=i.Subclass({type:"line",removeable:false,Init:function(l,p,r,o,q){if(q==null){q={}}if(o&&o!==""){q.stroke=o}q["stroke-width"]=p.toFixed(2).replace(/\.?0+$/,"");q.x1=0;q.y1=q.y2=p/2;q.x2=Math.floor(l);if(r==="dashed"){var s=Math.floor(l/(6*p)),k=Math.floor(l/(2*s+1));q["stroke-dasharray"]=k+" "+k}this.SUPER(arguments).Init.call(this,q);this.w=this.r=l;this.l=0;this.h=this.H=p;this.d=this.D=0}});i.VLINE=i.Subclass({type:"line",removeable:false,Init:function(p,o,r,l,q){if(q==null){q={}}if(l&&l!==""){q.stroke=l}q["stroke-width"]=o.toFixed(2).replace(/\.?0+$/,"");q.x1=q.x2=o/2;q.y1=0;q.y2=Math.floor(p);if(r==="dashed"){var s=Math.floor(p/(6*o)),k=Math.floor(p/(2*s+1));q["stroke-dasharray"]=k+" "+k}this.SUPER(arguments).Init.call(this,q);this.w=this.r=o;this.l=0;this.h=this.H=p;this.d=this.D=0}});i.TEXT=i.Subclass({type:"text",removeable:false,Init:function(n,m,k){if(!k){k={}}k.stroke="none";this.SUPER(arguments).Init.call(this,k);a.addText(this.element,m);a.textSVG.appendChild(this.element);var l=this.element.getBBox();a.textSVG.removeChild(this.element);n*=1000/a.em;this.element.setAttribute("transform","scale("+n+") matrix(1 0 0 -1 0 0)");this.w=this.r=l.width*n;this.l=0;this.h=this.H=-l.y*n;this.d=this.D=(l.height+l.y)*n}});i.G=i;i.NULL=i.Subclass({Init:function(){this.SUPER(arguments).Init.apply(this,arguments);this.Clean()}});var f,d;i.GLYPH=i.Subclass({type:"path",removeable:false,Init:function(q,m,u,v,x,s,k,n){var o,y=a.config.blacker;if(!f[m]){o={id:m,"stroke-width":y};if(n!==""){o.d="M"+n+"Z"}this.SUPER(arguments).Init.call(this,o);d.appendChild(this.element);f[m]=true}o={};if(q!==1){o.transform="scale("+q+")"}this.element=a.Element("use",o);this.element.setAttributeNS(j,"href","#"+m);this.h=(u+y)*q;this.d=(v+y)*q;this.w=(x+y/2)*q;this.l=(s+y/2)*q;this.r=(k+y/2)*q;this.H=Math.max(0,this.h);this.D=Math.max(0,this.d);this.x=this.y=0;this.scale=q}});c.Register.StartupHook("mml Jax Ready",function(){g=MathJax.ElementJax.mml;g.mbase.Augment({SVG:i,toSVG:function(){this.SVGgetStyles();var o=this.SVGgetVariant();var l=this.SVG();l.scale=this.SVGgetScale();this.SVGhandleSpace(l);for(var n=0,k=this.data.length;n<k;n++){if(this.data[n]){var q=l.Add(this.data[n].toSVG(o,l.scale),l.w,0,true);if(q.skew){l.skew=q.skew}}}l.Clean();var p=this.data.join("");if(l.skew&&p.length!==1){delete l.skew}if(l.r>l.w&&p.length===1&&!o.noIC){l.ic=l.r-l.w;l.w=l.r}this.SVGhandleColor(l);this.SVGsaveData(l);return l},SVGdataStretched:function(l,k,m){this.SVGdata={HW:k,D:m};if(m!=null){return this.data[l].SVGstretchV(k,m)}if(k!=null){return this.data[l].SVGstretchH(k)}return this.data[l].toSVG()},SVGsaveData:function(l){if(!this.SVGdata){this.SVGdata={}}this.SVGdata.w=l.w,this.SVGdata.x=l.x;if(l.X!=null){this.SVGdata.X=l.X}if(this["class"]){l.removeable=false;a.Element(l.element,{"class":this["class"]})}if(this.id){l.removeable=false;a.Element(l.element,{id:this.id})}if(this.href){var k=a.Element("a");k.setAttributeNS(j,"href",this.href);a.addElement(k,"rect",{width:l.w,height:l.h+l.d,y:-l.d,fill:"none",stroke:"none","pointer-events":"all"});if(l.type==="svg"){var n=l.element.firstChild;while(n.firstChild){k.appendChild(n.firstChild)}n.appendChild(k)}else{if(l.removeable&&l.element.nodeName==="g"){while(l.element.firstChild){k.appendChild(l.element.firstChild)}}else{k.appendChild(l.element)}l.element=k}l.removeable=false}if(a.config.addMMLclasses){l.removeable=false;l.element.setAttribute("className","mjx-svg-"+this.type)}var m=this.Get("style");if(m){l.element.style.cssText=m;if(l.element.style.fontSize){l.element.style.fontSize=""}l.element.style.border=l.element.style.padding="";if(l.removeable){l.removeable=l.element.style.cssText===""}}},SVGgetStyles:function(){if(this.style){var k=e.Element("span");k.style.cssText=this.style;this.styles={border:a.getBorders(k),padding:a.getPadding(k)};if(k.style.fontSize){this.styles.fontSize=k.style.fontSize}if(k.style.color){this.styles.color=k.style.color}if(k.style.backgroundColor){this.styles.background=k.style.backgroundColor}if(k.style.fontStyle){this.styles.fontStyle=k.style.fontStyle}if(k.style.fontWeight){this.styles.fontWeight=k.style.fontWeight}if(k.style.fontFamily){this.styles.fontFamily=k.style.fontFamily}if(this.styles.fontWeight&&this.styles.fontWeight.match(/^\d+$/)){this.styles.fontWeight=(parseInt(this.styles.fontWeight)>600?"bold":"normal")}}},SVGhandleSpace:function(n){if(this.useMMLspacing){if(this.type!=="mo"){return}var m=this.getValues("scriptlevel","lspace","rspace");if(m.scriptlevel<=0||this.hasValue("lspace")||this.hasValue("rspace")){var l=this.SVGgetMu(n);m.lspace=Math.max(0,a.length2em(m.lspace,l));m.rspace=Math.max(0,a.length2em(m.rspace,l));var k=this,o=this.Parent();while(o&&o.isEmbellished()&&o.Core()===k){k=o;o=o.Parent()}if(m.lspace){n.x+=m.lspace}if(m.rspace){n.X=m.rspace}}}else{var p=this.texSpacing();if(p!==""){n.x+=a.length2em(p,this.SVGgetScale())/n.scale}}},SVGhandleColor:function(o){var w=this.getValues("mathcolor","color");if(this.styles&&this.styles.color&&!w.color){w.color=this.styles.color}if(w.color&&!this.mathcolor){w.mathcolor=w.color}if(w.mathcolor){a.Element(o.element,{fill:w.mathcolor,stroke:w.mathcolor});o.removeable=false}var s=(this.styles||{}).border,u=(this.styles||{}).padding,t=((s||{}).left||0),q=((u||{}).left||0),k;w.background=(this.mathbackground||this.background||(this.styles||{}).background||g.COLOR.TRANSPARENT);if(t+q){var l=i();for(k in o){if(o.hasOwnProperty(k)){l[k]=o[k]}}l.x=0;l.y=0;o.element=a.Element("g");o.removeable=true;o.Add(l,t+q,0)}if(u){o.w+=u.right;o.h+=u.top;o.d+=u.bottom}if(s){o.w+=s.right;o.h+=s.top;o.d+=s.bottom}if(w.background!==g.COLOR.TRANSPARENT){if(o.element.nodeName!=="g"&&o.element.nodeName!=="svg"){var p=a.Element("g");p.appendChild(o.element);o.element=p;o.removable=true}o.Add(i.RECT(o.h,o.d,o.w,{fill:w.background,stroke:"none"}),0,0,false,true)}if(s){var v=5;var m={left:["V",o.h+o.d,-v,-o.d],right:["V",o.h+o.d,o.w-s.right+v,-o.d],top:["H",o.w,0,o.h-s.top+v],bottom:["H",o.w,0,-o.d-v]};for(k in m){if(m.hasOwnProperty(k)){if(s[k]){var r=m[k],n=i[r[0]+"LINE"];o.Add(n(r[1],s[k],s[k+"Style"],s[k+"Color"]),r[2],r[3])}}}}},SVGhandleVariant:function(k,m,l){return a.HandleVariant(k,m,l)},SVGgetVariant:function(){var k=this.getValues("mathvariant","fontfamily","fontweight","fontstyle");var l=k.mathvariant;if(this.variantForm){l="-TeX-variant"}k.hasVariant=this.Get("mathvariant",true);if(!k.hasVariant){k.family=k.fontfamily;k.weight=k.fontweight;k.style=k.fontstyle}if(this.styles){if(!k.style&&this.styles.fontStyle){k.style=this.styles.fontStyle}if(!k.weight&&this.styles.fontWeight){k.weight=this.styles.fontWeight}if(!k.family&&this.styles.fontFamily){k.family=this.styles.fontFamily}}if(k.family&&!k.hasVariant){if(!k.weight&&k.mathvariant.match(/bold/)){k.weight="bold"}if(!k.style&&k.mathvariant.match(/italic/)){k.style="italic"}l={forceFamily:true,font:{"font-family":k.family}};if(k.style){l.font["font-style"]=k.style}if(k.weight){l.font["font-weight"]=k.weight}return l}if(k.weight==="bold"){l={normal:g.VARIANT.BOLD,italic:g.VARIANT.BOLDITALIC,fraktur:g.VARIANT.BOLDFRAKTUR,script:g.VARIANT.BOLDSCRIPT,"sans-serif":g.VARIANT.BOLDSANSSERIF,"sans-serif-italic":g.VARIANT.SANSSERIFBOLDITALIC}[l]||l}else{if(k.weight==="normal"){l={bold:g.VARIANT.normal,"bold-italic":g.VARIANT.ITALIC,"bold-fraktur":g.VARIANT.FRAKTUR,"bold-script":g.VARIANT.SCRIPT,"bold-sans-serif":g.VARIANT.SANSSERIF,"sans-serif-bold-italic":g.VARIANT.SANSSERIFITALIC}[l]||l}}if(k.style==="italic"){l={normal:g.VARIANT.ITALIC,bold:g.VARIANT.BOLDITALIC,"sans-serif":g.VARIANT.SANSSERIFITALIC,"bold-sans-serif":g.VARIANT.SANSSERIFBOLDITALIC}[l]||l}else{if(k.style==="normal"){l={italic:g.VARIANT.NORMAL,"bold-italic":g.VARIANT.BOLD,"sans-serif-italic":g.VARIANT.SANSSERIF,"sans-serif-bold-italic":g.VARIANT.BOLDSANSSERIF}[l]||l}}return a.FONTDATA.VARIANT[l]},SVGgetScale:function(){var l=1,k=this.getValues("mathsize","scriptlevel","fontsize");if((this.styles||{}).fontSize&&!k.fontsize){k.fontsize=this.styles.fontSize}if(k.fontsize&&!this.mathsize){k.mathsize=k.fontsize}if(k.scriptlevel!==0){if(k.scriptlevel>2){k.scriptlevel=2}l=Math.pow(this.Get("scriptsizemultiplier"),k.scriptlevel);k.scriptminsize=a.length2em(this.Get("scriptminsize"))/1000;if(l<k.scriptminsize){l=k.scriptminsize}}l*=a.length2em(k.mathsize)/1000;return l},SVGgetMu:function(m){var k=1,l=this.getValues("scriptlevel","scriptsizemultiplier");if(m.scale&&m.scale!==1){k=1/m.scale}if(l.scriptlevel!==0){if(l.scriptlevel>2){l.scriptlevel=2}k=Math.sqrt(Math.pow(l.scriptsizemultiplier,l.scriptlevel))}return k},SVGnotEmpty:function(k){while(k){if((k.type!=="mrow"&&k.type!=="texatom")||k.data.length>1){return true}k=k.data[0]}return false},SVGcanStretch:function(k){if(this.isEmbellished()){return this.Core().SVGcanStretch(k)}return false},SVGstretchV:function(k,l){return this.toSVG(k,l)},SVGstretchH:function(k){return this.toSVG(k)},SVGlineBreaks:function(){return false}},{SVGautoload:function(){var k=a.autoloadDir+"/"+this.type+".js";c.RestartAfter(h.Require(k))},SVGautoloadFile:function(k){var l=a.autoloadDir+"/"+k+".js";c.RestartAfter(h.Require(l))}});g.chars.Augment({toSVG:function(l,o,k,m){var n=this.data.join("").replace(/[\u2061-\u2064]/g,"");if(k){n=k(n,m)}return this.SVGhandleVariant(l,o,n)}});g.entity.Augment({toSVG:function(k,l){return this.SVGhandleVariant(k,l,this.toString().replace(/[\u2061-\u2064]/g,""))}});g.mo.Augment({toSVG:function(){this.SVGgetStyles();var r=this.svg=this.SVG();this.SVGhandleSpace(r);if(this.data.length==0){r.Clean();this.SVGsaveData(r);return r}var n=this.SVGgetScale(),p=this.SVGgetVariant();var w=this.getValues("largeop","displaystyle");if(w.largeop){p=a.FONTDATA.VARIANT[w.displaystyle?"-largeOp":"-smallOp"]}var v=this.CoreParent(),o=(v&&v.isa(g.msubsup)&&this!==v.data[0]),k=(o?this.SVGremapChars:null);if(this.data.join("").length===1&&v&&v.isa(g.munderover)&&this.CoreText(v.data[v.base]).length===1){var s=v.data[v.over],u=v.data[v.under];if(s&&this===s.CoreMO()&&v.Get("accent")){k=a.FONTDATA.REMAPACCENT}else{if(u&&this===u.CoreMO()&&v.Get("accentunder")){k=a.FONTDATA.REMAPACCENTUNDER}}}for(var q=0,l=this.data.length;q<l;q++){if(this.data[q]){var y=this.data[q].toSVG(p,n,this.SVGremap,k),t=r.w;if(t===0&&-y.l>10*y.w){t+=-y.l}r.Add(y,t,0,true);if(y.skew){r.skew=y.skew}}}r.Clean();if(this.data.join("").length!==1){delete r.skew}if(w.largeop){r.y=(r.h-r.d)/2/n-a.TeX.axis_height;if(r.r>r.w){r.ic=r.r-r.w;r.w=r.r}}this.SVGhandleColor(r);this.SVGsaveData(r);return r},CoreParent:function(){var k=this;while(k&&k.isEmbellished()&&k.CoreMO()===this&&!k.isa(g.math)){k=k.Parent()}return k},CoreText:function(k){if(!k){return""}if(k.isEmbellished()){return k.CoreMO().data.join("")}while(k.isa(g.mrow)&&k.data.length===1&&k.data[0]){k=k.data[0]}if(!k.isToken){return""}else{return k.data.join("")}},SVGremapChars:{"*":"\u2217",'"':"\u2033","\u00B0":"\u2218","\u00B2":"2","\u00B3":"3","\u00B4":"\u2032","\u00B9":"1"},SVGremap:function(l,k){l=l.replace(/-/g,"\u2212");if(k){l=l.replace(/'/g,"\u2032").replace(/`/g,"\u2035");if(l.length===1){l=k[l]||l}}return l},SVGcanStretch:function(o){if(!this.Get("stretchy")){return false}var p=this.data.join("");if(p.length>1){return false}var l=this.CoreParent();if(l&&l.isa(g.munderover)&&this.CoreText(l.data[l.base]).length===1){var n=l.data[l.over],k=l.data[l.under];if(n&&this===n.CoreMO()&&l.Get("accent")){p=a.FONTDATA.REMAPACCENT[p]||p}else{if(k&&this===k.CoreMO()&&l.Get("accentunder")){p=a.FONTDATA.REMAPACCENTUNDER[p]||p}}}p=a.FONTDATA.DELIMITERS[p.charCodeAt(0)];var m=(p&&p.dir==o.substr(0,1));if(!m){delete this.svg}return m},SVGstretchV:function(p,q){var m=this.svg||this.toSVG();var l=this.getValues("symmetric","maxsize","minsize");var o=a.TeX.axis_height,k=this.SVGgetMu(m),n;if(l.symmetric){n=2*Math.max(p-o,q+o)}else{n=p+q}l.maxsize=a.length2em(l.maxsize,k,m.h+m.d);l.minsize=a.length2em(l.minsize,k,m.h+m.d);n=Math.max(l.minsize,Math.min(l.maxsize,n));m=a.createDelimiter(this.data.join("").charCodeAt(0),n,m.scale);if(l.symmetric){n=(m.h+m.d)/2+o}else{n=(m.h+m.d)*p/(p+q)}m.y=n-m.h;this.SVGhandleSpace(m);this.SVGhandleColor(m);delete this.svg;this.SVGsaveData(m);return m},SVGstretchH:function(l){var n=this.svg||this.toSVG(),k=this.SVGgetMu(n);var m=this.getValues("maxsize","minsize","mathvariant","fontweight");if((m.fontweight==="bold"||parseInt(m.fontweight)>=600)&&!this.Get("mathvariant",true)){m.mathvariant=g.VARIANT.BOLD}m.maxsize=a.length2em(m.maxsize,k,n.w);m.minsize=a.length2em(m.minsize,k,n.w);l=Math.max(m.minsize,Math.min(m.maxsize,l));n=a.createDelimiter(this.data.join("").charCodeAt(0),l,n.scale,m.mathvariant);this.SVGhandleSpace(n);this.SVGhandleColor(n);delete this.svg;this.SVGsaveData(n);return n}});g.mtext.Augment({toSVG:function(){this.SVGgetStyles();var k,m,l=this.SVGgetScale();if(this.Parent().type==="merror"){k=this.SVG();this.SVGhandleSpace(k);m=i.G();m.Add(i.TEXT(0.9*l,this.data.join(""),{fill:"#C00"}));k.Add(i.RECT(m.h+100,m.d+100,m.w+200,{fill:"#FF8",stroke:"#C00","stroke-width":50}),0,0);k.Add(m,150,0);k.H+=150;k.D+=50;k.Clean();this.SVGsaveData(k);return k}else{if(a.config.mtextFontInherit){k=this.SVG();this.SVGhandleSpace(k);k.Add(i.TEXT(l,this.data.join("")));k.Clean();this.SVGhandleColor(k);this.SVGsaveData(k);return k}else{return this.SUPER(arguments).toSVG.call(this)}}}});g.ms.Augment({toSVG:g.mbase.SVGautoload});g.mglyph.Augment({toSVG:g.mbase.SVGautoload});g.mspace.Augment({toSVG:function(){this.SVGgetStyles();var m=this.getValues("height","depth","width");m.mathbackground=this.mathbackground;if(this.background&&!this.mathbackground){m.mathbackground=this.background}var l=this.SVG(),k=this.SVGgetMu(l);l.h=a.length2em(m.height,k)/l.scale;l.d=a.length2em(m.depth,k)/l.scale;l.w=l.r=a.length2em(m.width,k)/l.scale;if(l.w<0){l.x=l.w;l.w=l.r=0}if(l.h<-l.d){l.d=-l.h}l.l=0;l.Clean();this.SVGhandleColor(l);this.SVGsaveData(l);return l}});g.mphantom.Augment({toSVG:function(k,m){this.SVGgetStyles();var l=this.SVG();if(this.data[0]!=null){this.SVGhandleSpace(l);l.Add(this.SVGdataStretched(0,k,m));l.Clean();while(l.element.firstChild){l.element.removeChild(l.element.firstChild)}}this.SVGhandleColor(l);if(!l.element.firstChild){delete l.element}this.SVGsaveData(l);return l}});g.mpadded.Augment({toSVG:function(l,k){this.SVGgetStyles();var o=this.SVG();if(this.data[0]!=null){this.SVGhandleSpace(o);var m=this.SVGdataStretched(0,l,k),u=this.SVGgetMu(o);var t=this.getValues("height","depth","width","lspace","voffset"),r=0,q=0;if(t.lspace){r=this.SVGlength2em(m,t.lspace,u)}if(t.voffset){q=this.SVGlength2em(m,t.voffset,u)}var n=m.h,p=m.d,s=m.w;o.Add(m,r,q);o.Clean();o.h=n;o.d=p;o.w=s;o.removeable=false;if(t.height!==""){o.h=this.SVGlength2em(o,t.height,u,"h",0)}if(t.depth!==""){o.d=this.SVGlength2em(o,t.depth,u,"d",0)}if(t.width!==""){o.w=this.SVGlength2em(o,t.width,u,"w",0)}if(o.h>o.H){o.H=o.h}if(o.d>o.D){o.D=o.d}}this.SVGhandleColor(o);this.SVGsaveData(o);return o},SVGlength2em:function(o,r,l,s,k){if(k==null){k=-a.BIGDIMEN}var p=String(r).match(/width|height|depth/);var q=(p?o[p[0].charAt(0)]:(s?o[s]:0));var n=a.length2em(r,l,q);if(s&&String(r).match(/^\s*[-+]/)){return Math.max(k,o[s]+n)}else{return n}}});g.mrow.Augment({SVG:i.ROW,toSVG:function(){this.SVGgetStyles();var l=this.SVG();this.SVGhandleSpace(l);for(var n=0,k=this.data.length;n<k;n++){if(this.data[n]){l.Check(this.data[n])}}l.Stretch();l.Clean();if(this.SVGlineBreaks(l)){l=this.SVGmultiline(l)}this.SVGhandleColor(l);this.SVGsaveData(l);return l},SVGlineBreaks:function(k){if(!this.parent.linebreakContainer){return false}return(a.config.linebreaks.automatic&&k.w>a.linebreakWidth)||this.hasNewline()},SVGmultiline:function(k){g.mbase.SVGautoloadFile("multiline")},SVGstretchH:function(k){var l=this.data[this.core].SVGstretchH(k);this.SVGhandleColor(l);this.SVGsaveData(l);return l},SVGstretchV:function(l,m){var k=this.data[this.core].SVGstretchV(l,m);this.SVGhandleColor(k);this.SVGsaveData(k);return k}});g.mstyle.Augment({toSVG:function(){this.SVGgetStyles();var k=this.SVG();if(this.data[0]!=null){this.SVGhandleSpace(k);var l=k.Add(this.data[0].toSVG());k.Clean();if(l.ic){k.ic=l.ic}this.SVGhandleColor(k)}this.SVGsaveData(k);return k},SVGstretchH:function(k){return(this.data[0]!=null?this.data[0].SVGstretchH(k):i.NULL())},SVGstretchV:function(k,l){return(this.data[0]!=null?this.data[0].SVGstretchV(k,l):i.NULL())}});g.mfrac.Augment({toSVG:function(){this.SVGgetStyles();var w=this.SVG();this.SVGhandleSpace(w);var s=this.data[0].toSVG(),y=this.data[1].toSVG();var C=this.getValues("displaystyle","linethickness","numalign","denomalign","bevelled");var r=w.scale=this.SVGgetScale(),n=C.displaystyle;var x=a.TeX.axis_height*r;if(C.bevelled){var B=(n?400:150);var A=Math.max(s.h+s.d,y.h+y.d)+2*B;var F=a.createDelimiter(47,A);w.Add(s,0,(s.d-s.h)/2+x+B);w.Add(F,s.w-B/2,(F.d-F.h)/2+x);w.Add(y,s.w+F.w-B,(y.d-y.h)/2+x-B)}else{var o=Math.max(s.w,y.w);var E=a.thickness2em(C.linethickness,r),m,l,D,z;var k=a.TeX.min_rule_thickness/a.em*1000;if(n){D=a.TeX.num1;z=a.TeX.denom1}else{D=(E===0?a.TeX.num3:a.TeX.num2);z=a.TeX.denom2}D*=r;z*=r;if(E===0){m=Math.max((n?7:3)*a.TeX.rule_thickness,2*k);l=(D-s.d)-(y.h-z);if(l<m){D+=(m-l)/2;z+=(m-l)/2}w.w=o;E=0}else{m=Math.max((n?2:0)*k+E,E/2+1.5*k);l=(D-s.d)-(x+E/2);if(l<m){D+=m-l}l=(x-E/2)-(y.h-z);if(l<m){z+=m-l}w.Add(i.RECT(E/2,E/2,o+2*E),0,x)}w.Align(s,C.numalign,E,D);w.Align(y,C.denomalign,E,-z)}w.Clean();this.SVGhandleColor(w);this.SVGsaveData(w);return w},SVGcanStretch:function(k){return false},SVGhandleSpace:function(k){if(!this.texWithDelims){k.x=(this.useMMLspacing?0:a.length2em(this.texSpacing()||0))+120;k.X=120}}});g.msqrt.Augment({toSVG:function(){this.SVGgetStyles();var r=this.SVG();this.SVGhandleSpace(r);var m=this.data[0].toSVG(),s,o;var n=this.SVGgetScale();var w=a.TeX.rule_thickness*n,l,k,v,u=0;if(this.Get("displaystyle")){l=a.TeX.x_height*n}else{l=w}k=Math.max(w+l/4,1000*a.TeX.min_root_space/a.em);v=m.h+m.d+k+w;o=a.createDelimiter(8730,v,n);if(o.h+o.d>v){k=((o.h+o.d)-(v-w))/2}s=i.RECT(w,0,m.w);v=m.h+k+w;u=this.SVGaddRoot(r,o,u,o.h+o.d-v,n);r.Add(o,u,v-o.h);r.Add(s,u+o.w,v-s.h);r.Add(m,u+o.w,0);r.Clean();r.h+=w;r.H+=w;this.SVGhandleColor(r);this.SVGsaveData(r);return r},SVGaddRoot:function(l,m,k,o,n){return k}});g.mroot.Augment({toSVG:g.msqrt.prototype.toSVG,SVGaddRoot:function(n,l,q,o,k){var s=(l.isMultiChar?0.55:0.65)*l.w;if(this.data[1]){var p=this.data[1].toSVG();p.x=0;var m=this.SVGrootHeight(l.h+l.d,k,p)-o;var r=Math.min(p.w,p.r);q=Math.max(r,s);n.Add(p,q-r,m)}else{s=q}return q-s},SVGrootHeight:function(m,l,k){return 0.45*(m-900*l)+600*l+Math.max(0,k.d-75)}});g.mfenced.Augment({SVG:i.ROW,toSVG:function(){this.SVGgetStyles();var l=this.SVG();this.SVGhandleSpace(l);if(this.data.open){l.Check(this.data.open)}if(this.data[0]!=null){l.Check(this.data[0])}for(var n=1,k=this.data.length;n<k;n++){if(this.data[n]){if(this.data["sep"+n]){l.Check(this.data["sep"+n])}l.Check(this.data[n])}}if(this.data.close){l.Check(this.data.close)}l.Stretch();l.Clean();this.SVGhandleColor(l);this.SVGsaveData(l);return l}});g.menclose.Augment({toSVG:g.mbase.SVGautoload});g.maction.Augment({toSVG:g.mbase.SVGautoload});g.semantics.Augment({toSVG:function(){this.SVGgetStyles();var k=this.SVG();if(this.data[0]!=null){this.SVGhandleSpace(k);k.Add(this.data[0].toSVG());k.Clean()}else{k.Clean()}this.SVGsaveData(k);return k},SVGstretchH:function(k){return(this.data[0]!=null?this.data[0].SVGstretchH(k):i.NULL())},SVGstretchV:function(k,l){return(this.data[0]!=null?this.data[0].SVGstretchV(k,l):i.NULL())}});g.munderover.Augment({toSVG:function(F,C){this.SVGgetStyles();var l=this.getValues("displaystyle","accent","accentunder","align");if(!l.displaystyle&&this.data[this.base]!=null&&this.data[this.base].CoreMO().Get("movablelimits")){return g.msubsup.prototype.toSVG.call(this)}var E=this.SVG();this.SVGhandleSpace(E);var M=E.scale=this.SVGgetScale();var p=[],K=[],u,J,G,n=-a.BIGDIMEN,I=n;for(J=0,G=this.data.length;J<G;J++){if(this.data[J]!=null){if(J==this.base){p[J]=this.SVGdataStretched(J,F,C);K[J]=(C!=null||F==null)&&this.data[J].SVGcanStretch("Horizontal")}else{p[J]=this.data[J].toSVG();K[J]=this.data[J].SVGcanStretch("Horizontal")}if(p[J].w>I){I=p[J].w}if(!K[J]&&I>n){n=I}}}if(C==null&&F!=null){n=F}else{if(n==-a.BIGDIMEN){n=I}}for(J=I=0,G=this.data.length;J<G;J++){if(this.data[J]){if(K[J]){p[J]=this.data[J].SVGstretchH(n)}if(p[J].w>I){I=p[J].w}}}var B=a.TeX.rule_thickness;var o=p[this.base]||{w:0,h:0,d:0,H:0,D:0,l:0,r:0,scale:M};var s,q,w,v,r,A,H,L=0;if(o.ic){L=1.3*o.ic+0.05}for(J=0,G=this.data.length;J<G;J++){if(this.data[J]!=null){u=p[J];r=a.TeX.big_op_spacing5*M;var z=(J!=this.base&&l[this.ACCENTS[J]]);if(z&&u.w<=1){u.x=-u.l;p[J]=i.G().With({removeable:false});p[J].Add(u);p[J].Clean();p[J].w=-u.l;u=p[J]}A={left:0,center:(I-u.w)/2,right:I-u.w}[l.align];s=A;q=0;if(J==this.over){if(z){H=B*M;r=0;if(o.skew){s+=o.skew}}else{w=a.TeX.big_op_spacing1*M;v=a.TeX.big_op_spacing3*M;H=Math.max(w,v-Math.max(0,u.d))}H=Math.max(H,1500/a.em);s+=L/2;q=o.h+u.d+H;u.h+=r;if(u.h>u.H){u.H=u.h}}else{if(J==this.under){if(z){H=3*B*M;r=0}else{w=a.TeX.big_op_spacing2*M;v=a.TeX.big_op_spacing4*M;H=Math.max(w,v-u.h)}H=Math.max(H,1500/a.em);s-=L/2;q=-(o.d+u.h+H);u.d+=r;if(u.d>u.D){u.D=u.d}}}E.Add(u,s,q)}}E.Clean();this.SVGhandleColor(E);this.SVGsaveData(E);return E}});g.msubsup.Augment({toSVG:function(G,z){this.SVGgetStyles();var B=this.SVG();this.SVGhandleSpace(B);var K=B.scale=this.SVGgetScale(),E=this.SVGgetMu(B);var m=B.Add(this.SVGdataStretched(this.base,G,z));var l=(this.data[this.sup]||this.data[this.sub]||this).SVGgetScale();var I=a.TeX.x_height*K,y=a.TeX.scriptspace*K;var k,n;if(this.SVGnotEmpty(this.data[this.sup])){k=this.data[this.sup].toSVG();k.w+=y;k.r=Math.max(k.w,k.r)}if(this.SVGnotEmpty(this.data[this.sub])){n=this.data[this.sub].toSVG();n.w+=y;n.r=Math.max(n.w,n.r)}var C=a.TeX.sup_drop*l,A=a.TeX.sub_drop*l;var w=m.h-C,o=m.d+A,J=0,F;if(m.ic){m.w-=m.ic;J=1.3*m.ic+0.05}if(this.data[this.base]&&(this.data[this.base].type==="mi"||this.data[this.base].type==="mo")){if(this.data[this.base].data.join("").length===1&&m.scale===1&&!m.stretched&&!this.data[this.base].Get("largeop")){w=o=0}}var H=this.getValues("subscriptshift","superscriptshift");H.subscriptshift=(H.subscriptshift===""?0:a.length2em(H.subscriptshift,E));H.superscriptshift=(H.superscriptshift===""?0:a.length2em(H.superscriptshift,E));if(!k){if(n){o=Math.max(o,a.TeX.sub1*K,n.h-(4/5)*I,H.subscriptshift);B.Add(n,m.w,-o)}}else{if(!n){values=this.getValues("displaystyle","texprimestyle");F=a.TeX[(values.displaystyle?"sup1":(values.texprimestyle?"sup3":"sup2"))];w=Math.max(w,F*K,k.d+(1/4)*I,H.superscriptshift);B.Add(k,m.w+J,w)}else{o=Math.max(o,a.TeX.sub2*K);var x=a.TeX.rule_thickness*K;if((w-k.d)-(n.h-o)<3*x){o=3*x-w+k.d+n.h;C=(4/5)*I-(w-k.d);if(C>0){w+=C;o-=C}}B.Add(k,m.w+J,Math.max(w,H.superscriptshift));B.Add(n,m.w,-Math.max(o,H.subscriptshift))}}B.Clean();this.SVGhandleColor(B);this.SVGsaveData(B);return B}});g.mmultiscripts.Augment({toSVG:g.mbase.SVGautoload});g.mtable.Augment({toSVG:g.mbase.SVGautoload});g["annotation-xml"].Augment({toSVG:g.mbase.SVGautoload});g.math.Augment({SVG:i.Subclass({type:"svg",removeable:false}),toSVG:function(t,m){if(this.data[0]){this.SVGgetStyles();g.mbase.prototype.displayAlign=c.config.displayAlign;g.mbase.prototype.displayIndent=c.config.displayIndent;var q=i.G({stroke:"black",fill:"black","stroke-thickness":0,transform:"matrix(1 0 0 -1 0 0)"}).With({removeable:false});q.Add(this.data[0].toSVG(),0,0,true);q.Clean();this.SVGhandleColor(q);var s=this.SVG();s.element.setAttribute("xmlns:xlink",j);s.Add(q);s.Clean();this.SVGsaveData(s);var p=Math.max(-s.l,0),k=Math.max(s.r-s.w,0);var n=s.element.style;n.width=a.Ex(p+s.w+k);n.height=a.Ex(s.H+s.D);n.verticalAlign=a.Ex(-s.D-2*a.em);n.marginLeft=a.Ex(-p);n.marginRight=a.Ex(-k);s.element.setAttribute("viewBox",(-p)+" "+(-s.H)+" "+(p+s.w+k)+" "+(s.H+s.D));s.element.style.margin="1px 0px";if(s.H>s.h||s.D>s.d){var o=e.Element("span",{style:{display:"inline-block","white-space":"nowrap",padding:"1px 0px"},isMathJax:true},[["span",{style:{display:"inline-block",position:"relative",isMathJax:true,width:a.Ex(s.w),height:a.Ex(s.h+s.d),"vertical-align":a.Ex(-s.d)}}]]);o.firstChild.appendChild(s.element);s.element=o;n.verticalAlign="";n.position="absolute";n.bottom=a.Ex(s.d-s.D);n.left=0}t.appendChild(s.element);s.element=null;if(!this.isMultiline&&this.Get("display")==="block"){var u=this.getValues("indentalignfirst","indentshiftfirst","indentalign","indentshift");if(u.indentalignfirst!==g.INDENTALIGN.INDENTALIGN){u.indentalign=u.indentalignfirst}if(u.indentalign===g.INDENTALIGN.AUTO){u.indentalign=this.displayAlign}m.style.textAlign=u.indentalign;if(u.indentshiftfirst!==g.INDENTSHIFT.INDENTSHIFT){u.indentshift=u.indentshiftfirst}if(u.indentshift==="auto"){u.indentshift=this.displayIndent}if(u.indentshift&&u.indentalign!==g.INDENTALIGN.CENTER&&!s.hasIndent){t.style[{left:"marginLeft",right:"marginRight"}[u.indentalign]]=a.Ex(a.length2em(u.indentshift))}}}return t}});g.TeXAtom.Augment({toSVG:function(){this.SVGgetStyles();var k=this.SVG();this.SVGhandleSpace(k);if(this.data[0]!=null){var l=this.data[0].toSVG(),m=0;if(this.texClass===g.TEXCLASS.VCENTER){m=a.TeX.axis_height-(l.h+l.d)/2+l.d}k.Add(l,0,m);k.ic=l.ic}this.SVGhandleColor(k);this.SVGsaveData(k);return k}});c.Register.StartupHook("onLoad",function(){setTimeout(MathJax.Callback(["loadComplete",a,"jax.js"]),0)})});c.Register.StartupHook("End Cookie",function(){if(c.config.menuSettings.zoom!=="None"){h.Require("[MathJax]/extensions/MathZoom.js")}});if(!document.createElementNS){if(!document.namespaces.svg){document.namespaces.add("svg",b)}a.Augment({Element:function(k,l){var m=(typeof(k)==="string"?document.createElement("svg:"+k):k);m.isMathJax=true;if(l){for(var n in l){if(l.hasOwnProperty(n)){m.setAttribute(n,l[n].toString())}}}return m}})}})(MathJax.Ajax,MathJax.Hub,MathJax.HTML,MathJax.OutputJax.SVG);
PypiClean
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/packages/pip/_internal/utils/hashes.py
from __future__ import absolute_import import hashlib from pip._vendor.six import iteritems, iterkeys, itervalues from pip._internal.exceptions import ( HashMismatch, HashMissing, InstallationError, ) from pip._internal.utils.misc import read_chunks from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import ( Dict, List, BinaryIO, NoReturn, Iterator ) from pip._vendor.six import PY3 if PY3: from hashlib import _Hash else: from hashlib import _hash as _Hash # The recommended hash algo of the moment. Change this whenever the state of # the art changes; it won't hurt backward compatibility. FAVORITE_HASH = 'sha256' # Names of hashlib algorithms allowed by the --hash option and ``pip hash`` # Currently, those are the ones at least as collision-resistant as sha256. STRONG_HASHES = ['sha256', 'sha384', 'sha512'] class Hashes(object): """A wrapper that builds multiple hashes at once and checks them against known-good values """ def __init__(self, hashes=None): # type: (Dict[str, List[str]]) -> None """ :param hashes: A dict of algorithm names pointing to lists of allowed hex digests """ self._allowed = {} if hashes is None else hashes def check_against_chunks(self, chunks): # type: (Iterator[bytes]) -> None """Check good hashes against ones built from iterable of chunks of data. Raise HashMismatch if none match. """ gots = {} for hash_name in iterkeys(self._allowed): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): raise InstallationError('Unknown hash name: %s' % hash_name) for chunk in chunks: for hash in itervalues(gots): hash.update(chunk) for hash_name, got in iteritems(gots): if got.hexdigest() in self._allowed[hash_name]: return self._raise(gots) def _raise(self, gots): # type: (Dict[str, _Hash]) -> NoReturn raise HashMismatch(self._allowed, gots) def check_against_file(self, file): # type: (BinaryIO) -> None """Check good hashes against a file-like object Raise HashMismatch if none match. """ return self.check_against_chunks(read_chunks(file)) def check_against_path(self, path): # type: (str) -> None with open(path, 'rb') as file: return self.check_against_file(file) def __nonzero__(self): # type: () -> bool """Return whether I know any known-good hashes.""" return bool(self._allowed) def __bool__(self): # type: () -> bool return self.__nonzero__() class MissingHashes(Hashes): """A workalike for Hashes used when we're missing a hash for a requirement It computes the actual hash of the requirement and raises a HashMissing exception showing it to the user. """ def __init__(self): # type: () -> None """Don't offer the ``hashes`` kwarg.""" # Pass our favorite hash in to generate a "gotten hash". With the # empty list, it will never match, so an error will always raise. super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) def _raise(self, gots): # type: (Dict[str, _Hash]) -> NoReturn raise HashMissing(gots[FAVORITE_HASH].hexdigest())
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/charting/Element.js
define("dojox/charting/Element",["dojo/_base/lang","dojo/_base/array","dojo/dom-construct","dojo/_base/declare","dojox/gfx","dojox/gfx/utils","dojox/gfx/shape"],function(_1,_2,_3,_4,_5,_6,_7){ return _4("dojox.charting.Element",null,{chart:null,group:null,htmlElements:null,dirty:true,constructor:function(_8){ this.chart=_8; this.group=null; this.htmlElements=[]; this.dirty=true; this.trailingSymbol="..."; this._events=[]; },createGroup:function(_9){ if(!_9){ _9=this.chart.surface; } if(!this.group){ this.group=_9.createGroup(); } return this; },purgeGroup:function(){ this.destroyHtmlElements(); if(this.group){ _6.forEach(this.group,function(_a){ _7.dispose(_a); }); this.group.clear(); this.group.removeShape(); this.group=null; } this.dirty=true; if(this._events.length){ _2.forEach(this._events,function(_b){ _b.shape.disconnect(_b.handle); }); this._events=[]; } return this; },cleanGroup:function(_c){ this.destroyHtmlElements(); if(!_c){ _c=this.chart.surface; } if(this.group){ this.group.clear(); }else{ this.group=_c.createGroup(); } this.dirty=true; return this; },destroyHtmlElements:function(){ if(this.htmlElements.length){ _2.forEach(this.htmlElements,_3.destroy); this.htmlElements=[]; } },destroy:function(){ this.purgeGroup(); },getTextWidth:function(s,_d){ return _5._base._getTextBox(s,{font:_d}).w||0; },getTextWithLimitLength:function(s,_e,_f,_10){ if(!s||s.length<=0){ return {text:"",truncated:_10||false}; } if(!_f||_f<=0){ return {text:s,truncated:_10||false}; } var _11=2,_12=0.618,_13=s.substring(0,1)+this.trailingSymbol,_14=this.getTextWidth(_13,_e); if(_f<=_14){ return {text:_13,truncated:true}; } var _15=this.getTextWidth(s,_e); if(_15<=_f){ return {text:s,truncated:_10||false}; }else{ var _16=0,end=s.length; while(_16<end){ if(end-_16<=_11){ while(this.getTextWidth(s.substring(0,_16)+this.trailingSymbol,_e)>_f){ _16-=1; } return {text:(s.substring(0,_16)+this.trailingSymbol),truncated:true}; } var _17=_16+Math.round((end-_16)*_12),_18=this.getTextWidth(s.substring(0,_17),_e); if(_18<_f){ _16=_17; end=end; }else{ _16=_16; end=_17; } } } },getTextWithLimitCharCount:function(s,_19,_1a,_1b){ if(!s||s.length<=0){ return {text:"",truncated:_1b||false}; } if(!_1a||_1a<=0||s.length<=_1a){ return {text:s,truncated:_1b||false}; } return {text:s.substring(0,_1a)+this.trailingSymbol,truncated:true}; },_plotFill:function(_1c,dim,_1d){ if(!_1c||!_1c.type||!_1c.space){ return _1c; } var _1e=_1c.space; switch(_1c.type){ case "linear": if(_1e==="plot"||_1e==="shapeX"||_1e==="shapeY"){ _1c=_5.makeParameters(_5.defaultLinearGradient,_1c); _1c.space=_1e; if(_1e==="plot"||_1e==="shapeX"){ var _1f=dim.height-_1d.t-_1d.b; _1c.y1=_1d.t+_1f*_1c.y1/100; _1c.y2=_1d.t+_1f*_1c.y2/100; } if(_1e==="plot"||_1e==="shapeY"){ var _1f=dim.width-_1d.l-_1d.r; _1c.x1=_1d.l+_1f*_1c.x1/100; _1c.x2=_1d.l+_1f*_1c.x2/100; } } break; case "radial": if(_1e==="plot"){ _1c=_5.makeParameters(_5.defaultRadialGradient,_1c); _1c.space=_1e; var _20=dim.width-_1d.l-_1d.r,_21=dim.height-_1d.t-_1d.b; _1c.cx=_1d.l+_20*_1c.cx/100; _1c.cy=_1d.t+_21*_1c.cy/100; _1c.r=_1c.r*Math.sqrt(_20*_20+_21*_21)/200; } break; case "pattern": if(_1e==="plot"||_1e==="shapeX"||_1e==="shapeY"){ _1c=_5.makeParameters(_5.defaultPattern,_1c); _1c.space=_1e; if(_1e==="plot"||_1e==="shapeX"){ var _1f=dim.height-_1d.t-_1d.b; _1c.y=_1d.t+_1f*_1c.y/100; _1c.height=_1f*_1c.height/100; } if(_1e==="plot"||_1e==="shapeY"){ var _1f=dim.width-_1d.l-_1d.r; _1c.x=_1d.l+_1f*_1c.x/100; _1c.width=_1f*_1c.width/100; } } break; } return _1c; },_shapeFill:function(_22,_23){ if(!_22||!_22.space){ return _22; } var _24=_22.space; switch(_22.type){ case "linear": if(_24==="shape"||_24==="shapeX"||_24==="shapeY"){ _22=_5.makeParameters(_5.defaultLinearGradient,_22); _22.space=_24; if(_24==="shape"||_24==="shapeX"){ var _25=_23.width; _22.x1=_23.x+_25*_22.x1/100; _22.x2=_23.x+_25*_22.x2/100; } if(_24==="shape"||_24==="shapeY"){ var _25=_23.height; _22.y1=_23.y+_25*_22.y1/100; _22.y2=_23.y+_25*_22.y2/100; } } break; case "radial": if(_24==="shape"){ _22=_5.makeParameters(_5.defaultRadialGradient,_22); _22.space=_24; _22.cx=_23.x+_23.width/2; _22.cy=_23.y+_23.height/2; _22.r=_22.r*_23.width/200; } break; case "pattern": if(_24==="shape"||_24==="shapeX"||_24==="shapeY"){ _22=_5.makeParameters(_5.defaultPattern,_22); _22.space=_24; if(_24==="shape"||_24==="shapeX"){ var _25=_23.width; _22.x=_23.x+_25*_22.x/100; _22.width=_25*_22.width/100; } if(_24==="shape"||_24==="shapeY"){ var _25=_23.height; _22.y=_23.y+_25*_22.y/100; _22.height=_25*_22.height/100; } } break; } return _22; },_pseudoRadialFill:function(_26,_27,_28,_29,end){ if(!_26||_26.type!=="radial"||_26.space!=="shape"){ return _26; } var _2a=_26.space; _26=_5.makeParameters(_5.defaultRadialGradient,_26); _26.space=_2a; if(arguments.length<4){ _26.cx=_27.x; _26.cy=_27.y; _26.r=_26.r*_28/100; return _26; } var _2b=arguments.length<5?_29:(end+_29)/2; return {type:"linear",x1:_27.x,y1:_27.y,x2:_27.x+_26.r*_28*Math.cos(_2b)/100,y2:_27.y+_26.r*_28*Math.sin(_2b)/100,colors:_26.colors}; return _26; }}); });
PypiClean
/Gecco-0.3.0.tar.gz/Gecco-0.3.0/gecco/gecco.py
#pylint: disable=too-many-nested-blocks import sys import os import socket import socketserver import datetime import time import subprocess import json import traceback import random import importlib import inspect from collections import OrderedDict, defaultdict #from threading import Thread, Lock from queue import Empty from threading import Thread from multiprocessing import Process, Lock, JoinableQueue as Queue #pylint: disable=no-name-in-module from glob import glob import argparse import psutil import yaml import folia.fql as fql #pylint: disable=import-error,no-name-in-module import folia.main as folia #pylint: disable=import-error,no-name-in-module from ucto import Tokenizer #pylint: disable=import-error,no-name-in-module import gecco.helpers.evaluation from gecco.helpers.common import folia2json, makencname UCTOSEARCHDIRS = ('/usr/local/share/ucto','/usr/share/ucto', '/usr/local/etc/ucto','/etc/ucto/','.') if 'VIRTUAL_ENV' in os.environ: UCTOSEARCHDIRS = (os.environ['VIRTUAL_ENV'] + '/share/ucto/', os.environ['VIRTUAL_ENV'] + '/etc/ucto/',) + UCTOSEARCHDIRS VERSION = '0.3.0' class DataThread(Process): def __init__(self, corrector, foliadoc, module_ids, outputfile, inputqueue, outputqueue, infoqueue,waitforprocessors,dumpxml, dumpjson,**parameters): super().__init__() self.corrector = corrector self.inputqueue = inputqueue self.outputqueue = outputqueue self.infoqueue = infoqueue self.module_ids = module_ids self.outputfile = outputfile self.parameters = parameters self.dumpxml = dumpxml self.dumpjson = dumpjson self.waitforprocessors = waitforprocessors self.debug = 'debug' in self.parameters and self.parameters['debug'] self._stop = False #Load FoLiA document if isinstance(foliadoc, str): #We got a filename instead of a FoLiA document, that's okay ext = foliadoc.split('.')[-1].lower() if not ext in ('xml','folia','gz','bz2'): #Preprocessing - Tokenize input text (plaintext) and produce FoLiA output self.corrector.log("Starting Tokeniser") inputtextfile = foliadoc if ext == 'txt': outputtextfile = '.'.join(inputtextfile.split('.')[:-1]) + '.folia.xml' else: outputtextfile = inputtextfile + '.folia.xml' try: docid = makencname(inputtextfile.split('.')[0]) except ValueError: docid = "untitled" tokenizer = Tokenizer(self.corrector.settings['ucto'],xmloutput=True,docid=docid) tokenizer.tokenize(inputtextfile, outputtextfile) foliadoc = outputtextfile self.corrector.log("Tokeniser finished") #good, load self.corrector.log("Reading FoLiA document") self.foliadoc = folia.Document(file=foliadoc, processor=folia.Processor.create(name="gecco", version=VERSION)) else: self.foliadoc = foliadoc if 'metadata' in parameters: for k, v in parameters['metadata'].items(): self.foliadoc.metadata[k] = v begintime = time.time() self.corrector.log("Initialising modules on document") #not parallel, acts on same document anyway, should be very quick for module in self.corrector: if not module_ids or module.id in module_ids: self.corrector.log("\tInitialising module " + module.id) module.init(self.foliadoc) #data in inputqueue takes the form (module, data), where data is an instance of module.UNIT (a folia document or element) if folia.Document in self.corrector.units: self.corrector.log("\tPreparing input of full documents") for module in self.corrector: if not module_ids or module.id in module_ids: if module.UNIT is folia.Document: self.corrector.log("\t\tQueuing full-document module " + module.id) inputdata = module.prepareinput(self.foliadoc,**parameters) if inputdata is not None: self.inputqueue.put( (module.id, self.foliadoc.id, inputdata) ) for unit in self.corrector.units: if unit is not folia.Document: self.corrector.log("\tPreparing input of " + str(unit.__name__)) for element in self.foliadoc.select(unit): for module in self.corrector: if not module_ids or module.id in module_ids: if module.UNIT is unit: inputdata = module.prepareinput(element,**parameters) if inputdata is not None: self.inputqueue.put( (module.id, element.id, inputdata ) ) for _ in range(self.corrector.settings['threads']): self.inputqueue.put( (None,None,None) ) #signals the end of the queue, once for each thread duration = time.time() - begintime self.corrector.log("Input ready (" + str(duration) + "s)") def run(self): self.corrector.log("Waiting for processors to be ready...") #not parallel, acts on same document anyway, should be fairly quick depending on module self.waitforprocessors.acquire(True,self.corrector.settings['timeout']) self.corrector.log("Processing output...") #not parallel, acts on same document anyway, should be fairly quick depending on module while not self._stop: module_id, unit_id, outputdata, inputdata = self.outputqueue.get(True,self.corrector.settings['timeout']) self.outputqueue.task_done() if module_id is None and unit_id is None and outputdata is None and inputdata is None: #signals the end of the queue self._stop = True elif outputdata: module = self.corrector.modules[module_id] try: queries = module.processoutput(outputdata, inputdata, unit_id,**self.parameters) except Exception as e: #pylint: disable=broad-except self.corrector.log("***ERROR*** Exception processing output of " + module_id + ": " + str(e)) #not parallel, acts on same document anyway, should be fairly quick depending on module exc_type, exc_value, exc_traceback = sys.exc_info() #pylint: disable=unused-variable traceback.print_tb(exc_traceback, limit=50, file=sys.stderr) queries = None if queries is not None: if isinstance(queries, str): queries = (queries,) for query in queries: try: if self.debug: self.corrector.log("Processing FQL query " + query) q = fql.Query(query) q(self.foliadoc) self.infoqueue.put( module.id) except fql.SyntaxError as e: self.corrector.log("***ERROR*** FQL Syntax error in " + module_id + ":" + str(e)) #not parallel, acts on same document anyway, should be fairly quick depending on module self.corrector.log(" query: " + query) exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=50, file=sys.stderr) except fql.QueryError as e: self.corrector.log("***ERROR*** FQL Query error in " + module_id + ":" + str(e)) #not parallel, acts on same document anyway, should be fairly quick depending on module self.corrector.log(" query: " + query) exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_tb(exc_traceback, limit=50, file=sys.stderr) except Exception as e: #pylint: disable=broad-except self.corrector.log("***ERROR*** Error processing query for " + module_id + ": " + e.__class__.__name__ + " -- " + str(e)) #not parallel, acts on same document anyway, should be fairly quick depending on module self.corrector.log(" query: " + query) exc_type, exc_value, exc_traceback = sys.exc_info() #pylint: disable=unused-variable traceback.print_tb(exc_traceback, limit=50, file=sys.stderr) self.infoqueue.put(None) #signals end self.corrector.log("Finalising modules on document") #not parallel, acts on same document anyway, should be fairly quick depending on module for module in self.corrector: if not self.module_ids or module.id in self.module_ids: module.finish(self.foliadoc) #Store FoLiA document if self.outputfile: self.corrector.log("Saving document " + self.outputfile + "....") self.foliadoc.save(self.outputfile) elif not self.dumpxml and not self.dumpjson: self.corrector.log("Saving document " + self.foliadoc.filename + "....") self.foliadoc.save() if self.dumpxml: self.corrector.log("Dumping XML") print(self.foliadoc) if self.dumpjson: self.corrector.log("Dumping JSON") print(json.dumps(folia2json(self.foliadoc))) def stop(self): self._stop = True class ProcessorThread(Process): def __init__(self, corrector,inputqueue, outputqueue, timequeue, **parameters): self.corrector = corrector self.inputqueue = inputqueue self.outputqueue = outputqueue self.timequeue = timequeue self._stop = False self.parameters = parameters self.debug = 'debug' in parameters and parameters['debug'] self.clients = {} #each thread keeps a bunch of clients open to the servers of the various modules so we don't have to reconnect constantly (= faster) self.seqnr = {} self.random = random.Random() super().__init__() def run(self): self.corrector.log("[" + str(self.pid) + "] Start of thread") while not self._stop: try: module_id, unit_id, inputdata = self.inputqueue.get(True,self.corrector.settings['timeout']) except Empty: if self.debug: self.corrector.log(" (inputqueue timed out)") self._stop = True break self.inputqueue.task_done() if module_id is None: #signals the last item (there will be one for each thread) if self.debug: self.corrector.log(" (end of input queue)") self._stop = True break else: module = self.corrector.modules[module_id] if not module.UNITFILTER or module.UNITFILTER(inputdata): if not module.submodule: #modules marked a submodule won't be called by the main process, but are invoked by other modules instead begintime = time.time() module.prepare() #will block until all dependencies are done if module.local: if self.debug: module.log("[" + str(self.pid) + "] (Running " + module.id + " on " + repr(inputdata) + " [local])") outputdata = module.runlocal(inputdata, unit_id, **self.parameters) if outputdata is not None: self.outputqueue.put( (module.id, unit_id, outputdata,inputdata) ) if self.debug: duration = round(time.time() - begintime,4) module.log("[" + str(self.pid) + "] (...took " + str(duration) + "s)") else: connected = False if self.debug: module.log("[" + str(self.pid) + "] (Running " + module.id + " on " + repr(inputdata) + " [remote]") if module.id not in self.seqnr: self.seqnr[module.id] = self.random.randint(0,len(module.servers)) #start with a random sequence nr try: startseqnr = self.seqnr[module.id] while not connected: #get the server for this sequence nr, sequence numbers ensure rotation between servers server,port,load = module.getserver(self.seqnr[module.id]) #pylint: disable=unused-variable self.seqnr[module.id] += 1 #increase sequence number for this module if self.seqnr[module.id] >= startseqnr + (10 * len(module.servers)): break #max 10 retries over all servers try: if (server,port) not in self.clients: self.clients[(server,port)] = module.CLIENT(server,port) client = self.clients[(server,port)] if self.debug: module.log("[" + str(self.pid) + "] BEGIN (server=" + server + ", port=" + str(port) + ", client=" + str(client) + ", corrector=" + str(self.corrector) + ", module=" + str(module) + ", unit=" + unit_id + ")") outputdata = module.runclient(client, unit_id, inputdata, **self.parameters) if self.debug: module.log("[" + str(self.pid) + "] END (server=" + server + ", port=" + str(port) + ", client=" + str(client) + ", corrector=" + str(self.corrector) + ", module=" + str(module) + ", unit=" + unit_id + ")") if outputdata is not None: self.outputqueue.put( (module.id, unit_id, outputdata,inputdata) ) #will only be executed when connection succeeded: connected = True except ConnectionRefusedError: module.log("[" + str(self.pid) + "] Server " + server+":" + str(port) + ", module " + module.id + " refused connection, moving on...") del self.clients[(server,port)] except Exception: #pylint: disable=broad-except module.log("[" + str(self.pid) + "] Server communication failed for server " + server +":" + str(port) + ", module " + module.id + ", passed unit " + unit_id + " (traceback follows in debug), moving on...") exc_type, exc_value, exc_traceback = sys.exc_info() #pylint: disable=unused-variable traceback.print_tb(exc_traceback, limit=50, file=sys.stderr) del self.clients[(server,port)] except IndexError: module.log("**ERROR** No servers started for " + module.id) if not connected: module.log("**ERROR** Unable to connect client to server! All servers for module " + module.id + " are down, skipping!") duration = time.time() - begintime self.timequeue.put((module.id, duration)) if self.debug: module.log("[" + str(self.pid) + "] (...took " + str(round(duration,4)) + "s)") self.corrector.log("[" + str(self.pid) + "] End of thread") def stop(self): self._stop = True class Corrector: def __init__(self, **settings): self.settings = settings self.modules = OrderedDict() self.verifysettings() #Gather servers #self.servers = set( [m.settings['servers'] for m in self if not m.local ] ) self.units = set( [m.UNIT for m in self] ) self.loaded = False def load(self): if not self.loaded: self.log("Root directory is " + self.root) begintime =time.time() self.log("Loading remote modules") servers = self.findservers() for module, host, port, load in servers: self.log(" found " + module + "@" + host + ":" + str(port) + ", load " + str(load)) self.modules[module].clientload() self.log("Loading local modules") for module in self: if module.local: self.log("Loading " + module.id + " [local]") module.load() self.loaded = True duration = time.time() - begintime self.log("Modules loaded (" + str(duration) + "s)") def verifysettings(self): if 'config' in self.settings: #Settings are in external configuration, parse config and return (verifysettings will be reinvoked from parseconfig) self.parseconfig(self.settings['config']) return if 'id' not in self.settings: raise Exception("No ID specified") self.root = None if 'root' in self.settings: for d in self.settings['root'].split(':'): if os.path.isdir(os.path.abspath(d)): self.root = os.path.abspath(d) break if self.root is None: raise Exception("Root directory not found: " + self.settings['root']) else: self.root = self.settings['root'] = os.path.abspath('.') if self.root[-1] != '/': self.root += '/' if 'ucto' not in self.settings: if 'language' in self.settings: for d in UCTOSEARCHDIRS: if os.path.exists(d + "/tokconfig-" + self.settings['language']): self.settings['ucto'] = d + '/tokconfig-' + self.settings['language'] if 'ucto' not in self.settings: for d in UCTOSEARCHDIRS: if os.path.exists(d + "/tokconfig-generic"): self.settings['ucto'] = d + '/tokconfig-generic' if 'ucto' not in self.settings: raise Exception("Ucto configuration file not specified and no default found (use setting ucto=)") elif not os.path.exists(self.settings['ucto']): raise Exception("Specified ucto configuration file not found") if 'logfunction' not in self.settings: self.settings['logfunction'] = lambda x: print(datetime.datetime.now().strftime("%H:%M:%S.%f") + " " + x,file=sys.stderr) self.log = self.settings['logfunction'] if 'timeout' in self.settings: self.settings['timeout'] = int(self.settings['timeout']) else: self.settings['timeout'] = 120 if 'threads' not in self.settings: self.settings['threads'] = 1 if 'minpollinterval' not in self.settings: self.settings['minpollinterval'] = 60 #60 sec def parseconfig(self,configfile): self.configfile = configfile #pylint: disable=attribute-defined-outside-init config = yaml.full_load(open(configfile,'r',encoding='utf-8').read()) if 'inherit' in config: baseconfig = yaml.full_load(open(config['inherit'],'r',encoding='utf-8').read()) baseconfig.update(config) config = baseconfig if 'modules' not in config: raise Exception("No Modules specified") modulespecs = config['modules'] del config['modules'] self.settings = config self.verifysettings() for modulespec in modulespecs: if 'enabled' in modulespec and not modulespec['enabled'] or 'disabled' in modulespec and modulespec['disabled']: continue if not 'id' in modulespec: raise Exception("Mising ID in module specification") #import modules: pymodule = '.'.join(modulespec['module'].split('.')[:-1]) moduleclass = modulespec['module'].split('.')[-1] exec("from " + pymodule + " import " + moduleclass) #pylint: disable=exec-used ModuleClass = locals()[moduleclass] if 'servers' in modulespec: modulespec['servers'] = tuple( ( (x['host'],x['port']) for x in modulespec['servers']) ) try: module = ModuleClass(self, **modulespec) except TypeError: raise Exception("Error instantiating " + ModuleClass.__name__) self.append(module) def run(self,filename,modules,outputfile,dumpxml,dumpjson,**parameters): self.load() inputqueue = Queue() outputqueue = Queue() timequeue = Queue() infoqueue = Queue() waitforprocessors = Lock() waitforprocessors.acquire(False) datathread = DataThread(self,filename,modules, outputfile, inputqueue, outputqueue, infoqueue,waitforprocessors,dumpxml,dumpjson,**parameters) #fills inputqueue datathread.start() #processes outputqueue begintime = time.time() self.log("Processing modules") threads = [] for _ in range(self.settings['threads']): thread = ProcessorThread(self, inputqueue, outputqueue, timequeue,**parameters) threads.append(thread) self.log(str(len(threads)) + " threads ready.") for thread in threads: thread.start() self.log(str(len(threads)) + " threads started.") sys.stderr.flush() waitforprocessors.release() inputqueue.join() inputduration = time.time() - begintime self.log("Input queue processed (" + str(inputduration) + "s)") outputqueue.put( (None,None,None,None) ) #signals the end of the queue datathread.join() infopermod = defaultdict(int) while True: module_id = infoqueue.get(True, self.settings['timeout']) if module_id is None: break infopermod[module_id] += 1 duration = time.time() - begintime timequeue.put((None,None)) virtualdurationpermod = defaultdict(float) callspermod = defaultdict(int) virtualduration = 0.0 while True: modid, x = timequeue.get(True, self.settings['timeout']) if modid is None: break else: virtualdurationpermod[modid] += x callspermod[modid] += 1 virtualduration += x for modid, d in sorted(virtualdurationpermod.items(),key=lambda x: x[1] * -1): print("\t"+modid + "\t" + str(round(d,4)) + "s\t" + str(callspermod[modid]) + " calls\t" + str(infopermod[modid]) + " corrections",file=sys.stderr) self.log("Cleanup...") for thread in threads: thread.stop() #custom self.log("Processing done (real total " + str(round(duration,2)) + "s , virtual output " + str(virtualduration) + "s, real input " + str(inputduration) + "s)") if 'exit' in parameters and parameters['exit']: os._exit(0) #very rough exit, hacky... (solves issue #8) def __len__(self): return len(self.modules) def _getitem__(self, modid): return self.modules[modid] def __iter__(self): #iterate in proper dependency order: done = set() modules = self.modules.values() while modules: postpone = [] for module in self.modules.values(): if module.settings['depends']: for dep in module.settings['depends']: if dep not in done: postpone.append(module) break if module not in postpone: done.add(module.id) yield module if modules == postpone: raise Exception("There are unsolvable (circular?) dependencies in your module definitions") else: modules = postpone def append(self, module): assert isinstance(module, Module) self.modules[module.id] = module def train(self,module_ids=[], **parameters): #pylint: disable=dangerous-default-value for module in self: if not module_ids or module.id in module_ids: for sourcefile, modelfile in zip(module.sources, module.models): if (isinstance(modelfile, tuple) and not all([os.path.exists(f) for f in modelfile])) or not os.path.exists(modelfile): self.log("Training module " + module.id + "...") if (isinstance(sourcefile, tuple) and not all([os.path.exists(f) for f in sourcefile])) or not os.path.exists(sourcefile): raise Exception("[" + module.id + "] Source file not found: " + sourcefile) module.train(sourcefile, modelfile, **parameters) def evaluate(self, args): if args.parameters: parameters = dict(( tuple(p.split('=')) for p in args.parameters)) else: parameters = {} if args.modules: modules = args.modules.split(',') else: modules = [] outputfiles = [] if os.path.isdir(args.outputfilename): outputdir = args.outputfilename else: outputdir = None outputfiles = [args.outputfilename] if os.path.isdir(args.referencefilename): refdir = args.referencefilename elif os.path.isfile(args.referencefilename): refdir = None else: raise Exception("Reference file not found", args.referencefilename) inputfiles = [] if args.inputfilename != '-': if os.path.isdir(args.inputfilename): for root, _, files in os.walk(args.inputfilename): for name in files: inputfiles.append(os.path.join(root,name)) if outputdir: outputfiles.append(os.path.join(outputdir,name)) elif os.path.isfile(args.inputfilename): inputfiles = [args.inputfilename] else: raise Exception("Input file not found", args.inputfilename) else: if os.path.isdir(args.outputfilename): for root, _, files in os.walk(args.outputfilename): for name in files: outputfiles.append(os.path.join(root,name)) elif os.path.isfile(args.outputfilename): outputfiles = [args.outputfilename] evaldata = gecco.helpers.evaluation.Evaldata() if inputfiles: for inputfilename, outputfilename in zip(inputfiles, outputfiles): self.run(inputfilename,modules,outputfilename, False,False,**parameters) if refdir: referencefilename = os.path.join(refdir, os.path.basename(outputfilename)) else: referencefilename = args.referencefilename gecco.helpers.evaluation.processfile(outputfilename, referencefilename, evaldata) else: if not outputfiles: raise Exception("No output files found and no input files specified") for outputfilename in outputfiles: if refdir: referencefilename = os.path.join(refdir, os.path.basename(outputfilename)) else: referencefilename = args.referencefilename gecco.helpers.evaluation.processfile(outputfilename, referencefilename, evaldata) evaldata.output() def test(self,module_ids=[], **parameters): #pylint: disable=dangerous-default-value for module in self: if not module_ids or module.id in module_ids: self.log("Testing module " + module.id + "...") module.test(**parameters) def tune(self,module_ids=[], **parameters): #pylint: disable=dangerous-default-value for module in self: if not module_ids or module.id in module_ids: self.log("Tuning module " + module.id + "...") module.tune(**parameters) def reset(self,module_ids=[]): #pylint: disable=dangerous-default-value for module in self: if not module_ids or module.id in module_ids: if module.sources and module.models: for sourcefile, modelfile in zip(module.sources, module.models): if sourcefile: if isinstance(modelfile, tuple): l = modelfile else: l = [modelfile] for modelfile in l: if os.path.exists(modelfile): self.log("Deleting model " + modelfile + "...") module.reset(modelfile, sourcefile) def startservers(self, module_ids=[], blocking=False): #pylint: disable=dangerous-default-value """Starts all servers on the current host""" processes = [] MYHOSTS = set( [socket.getfqdn() , socket.gethostname(), socket.gethostbyname(socket.gethostname()), '127.0.0.1'] ) self.log("Starting servers for " + "/".join(MYHOSTS) ) if not os.path.exists(self.root + "/run"): os.mkdir(self.root + "/run") host = socket.getfqdn() for module in self: if not module.local: if not module_ids or module.id in module_ids: portfound = False #port is tasty, let's find port! while not portfound: port = random.randint(10000,65000) #get a random port sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = sock.connect_ex(('127.0.0.1',port)) if result != 0: portfound = True #Start this server *in a separate subprocess* if self.configfile: cmd = "gecco " + self.configfile + " " else: cmd = sys.argv[0] + " " cmd += "startserver " + module.id + " " + host + " " + str(port) self.log("Starting server " + module.id + "@" + host + ":" + str(port) + " ...") process = subprocess.Popen(cmd.split(' '),close_fds=True) with open(self.root + "/run/" + module.id + "." + host + "." + str(port) + ".pid",'w') as f: f.write(str(process.pid)) processes.append(process) else: print("Module " + module.id + " is local",file=sys.stderr) self.log(str(len(processes)) + " server(s) started.") if blocking: if processes: os.wait() #blocking self.log("All servers ended.") def stopservers(self, module_ids=[]): #pylint: disable=dangerous-default-value MYHOSTS = set( [socket.getfqdn() , socket.gethostname(), socket.gethostbyname(socket.gethostname()), '127.0.0.1'] ) self.log("Stopping servers for " + "/".join(MYHOSTS) ) runpath = self.root + "/run/" if not os.path.exists(runpath): os.mkdir(runpath) self.findservers() for module in self.modules.values(): for host,port,load in module.servers: #pylint: disable=unused-variable if not module.local and (not module_ids or module.id in module_ids) and host in MYHOSTS: self.log("Stopping server " + module.id + "@" + host + ":" + str(port) + " ...") with open(runpath + module.id + "." + host + "." + str(port) + ".pid",'r') as f: pid = int(f.read().strip()) try: os.kill(pid, 15) except ProcessLookupError: self.log("(process already dead)") os.unlink(runpath + module.id + "." + host + "." + str(port) + ".pid") def findservers(self): """find all running servers and get the load, will be called by Corrector.load() once before a run""" #reset servers for modules for module in self.modules.values(): module.servers = [] servers = [] runpath = self.root + "/run/" if os.path.exists(runpath): for filename in glob(runpath + "/*.pid"): filename = os.path.basename(filename) fields = filename.split('.')[:-1] try: module = self.modules[fields[0]] except KeyError: #PID for non-existant module, skip continue host = ".".join(fields[1:-1]) port = int(fields[-1]) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(0.5) #module servers have to respond very quickly or we ignore them try: sock.connect( (host,port) ) sock.sendall(b"%GETLOAD%\n") load = float(sock.recv(1024)) module.servers.append( (host,port,load) ) if hasattr(module,'forcelocal') and module.forcelocal: module.local = True servers.append( (module.id, host,port,load) ) except socket.timeout: self.log("Connection to " + module.id + "@" +host+":" + str(port) + " timed out") continue except ConnectionRefusedError: self.log("Connection to " + module.id + "@" +host+":" + str(port) + " refused") continue except ValueError: self.log("Connection to " + module.id + "@" +host+":" + str(port) + " failed") continue return servers def startserver(self, module_id, host, port): """Start one particular module's server. This method will be launched by server() in different processes""" module = self.modules[module_id] self.log("Loading module") module.load() self.log("Running server " + module_id+"@"+host+":"+str(port) + " ...") try: module.runserver(host,port) #blocking except OSError: self.log("Server " + module_id+"@"+host+":"+str(port) + " failed, address already in use.") self.log("Server " + module_id+"@"+host+":"+str(port) + " ended.") def main(self): """Parse command line options and run the desired part of the system""" parser = argparse.ArgumentParser(description="Gecco is a generic, scalable and modular spelling correction framework", formatter_class=argparse.ArgumentDefaultsHelpFormatter) subparsers = parser.add_subparsers(dest='command',title='Commands') parser_run = subparsers.add_parser('run', help="Run the spelling corrector on the specified input file") parser_run.add_argument('-o',dest="outputfile", help="Output filename (if not specified, the input file will be edited in-place",required=False,default="") parser_run.add_argument('-O',dest="dumpxml", help="Print result document to stdout as FoLiA XML", required=False) parser_run.add_argument('--json',dest="dumpjson", help="Print result document to stdout as JSON", action='store_true',default=False, required=False) parser_run.add_argument('filename', help="The file to correct, can be either a FoLiA XML file or a plain-text file which will be automatically tokenised and converted on-the-fly. The XML file will also be the output file. The XML file is edited in place, it will also be the output file unless -o is specified") parser_run.add_argument('modules', help="Only run the modules with the specified IDs (comma-separated list) (if omitted, all modules are run)", nargs='?',default="") parser_run.add_argument('-p',dest='parameters', help="Custom parameters passed to the modules, specify as -p parameter=value. This option can be issued multiple times", required=False, action="append") parser_run.add_argument('-m',dest='metadata', help="Set extra metadata to be included in the resulting FoLiA document, specify as -m key=value. This options can be issued multiple times ", required=False, action="append") parser_run.add_argument('-s',dest='settings', help="Setting overrides, specify as -s setting=value. This option can be issues multiple times.", required=False, action="append") parser_run.add_argument('--local', help="Run all modules locally, ignore remote servers", required=False, action='store_true',default=False) parser_startservers = subparsers.add_parser('startservers', help="Starts all the module servers, or the modules explicitly specified, on the current host. Issue once for each host.") parser_startservers.add_argument('modules', help="Only start server for modules with the specified IDs (comma-separated list) (if omitted, all modules are run)", nargs='?',default="") parser_startservers.add_argument('-b',"--blocking", help="Start in blocking/foreground mode, remains running until all servers have ended", action="store_true") parser_stopservers = subparsers.add_parser('stopservers', help="Stops all the module servers, or the modules explicitly specified, on the current host. Issue once for each host.") parser_stopservers.add_argument('modules', help="Only stop server for modules with the specified IDs (comma-separated list) (if omitted, all modules are run)", nargs='?',default="") parser_listservers = subparsers.add_parser('listservers', help="Lists all the module servers on all hosts.") parser_startserver = subparsers.add_parser('startserver', help="Start one module's server on the specified port, use 'startservers' instead") parser_startserver.add_argument('module', help="Module ID") parser_startserver.add_argument('host', help="Host/IP to bind to") parser_startserver.add_argument('port', type=int, help="Port") parser_train = subparsers.add_parser('train', help="Train modules") parser_train.add_argument('modules', help="Only train for modules with the specified IDs (comma-separated list) (if omitted, all modules are trained)", nargs='?',default="") parser_train.add_argument('-p',dest='parameters', help="Custom parameters passed to the modules, specify as -p parameter=value. This option can be issued multiple times", required=False, action="append") parser_eval = subparsers.add_parser('evaluate', help="Runs the spelling corrector on input data and compares it to reference data, produces an evaluation report") parser_eval.add_argument('--local', help="Run all modules locally, ignore remote servers", required=False, action='store_true',default=False) parser_eval.add_argument('-s',dest='settings', help="Setting overrides, specify as -s setting=value. This option can be issues multiple times.", required=False, action="append") parser_eval.add_argument('-p',dest='parameters', help="Custom parameters passed to the modules, specify as -p parameter=value. This option can be issued multiple times", required=False, action="append") parser_eval.add_argument('inputfilename', help="File or directory containing the input (plain text or FoLiA XML). Set to - if the output is already produced and you merely want to evaluate.") parser_eval.add_argument('outputfilename', help="File or directory to store the output (FoLiA XML)") parser_eval.add_argument('referencefilename', help="File or directory that holds the reference data (FoLiA XML)") parser_eval.add_argument('modules', help="Only train for modules with the specified IDs (comma-separated list) (if omitted, all modules are tested)", nargs='?',default="") #parser_test = subparsers.add_parser('test', help="Test modules") #parser_test.add_argument('modules', help="Only train for modules with the specified IDs (comma-separated list) (if omitted, all modules are tested)", nargs='?',default="") #parser_test.add_argument('-p',dest='parameters', help="Custom parameters passed to the modules, specify as -p parameter=value. This option can be issued multiple times", required=False, action="append") #parser_tune = subparsers.add_parser('tune', help="Tune modules") #parser_tune.add_argument('modules', help="Only train for modules with the specified IDs (comma-separated list) (if omitted, all modules are tuned)", nargs='?',default="") #parser_tune.add_argument('-p',dest='parameters', help="Custom parameters passed to the modules, specify as -p parameter=value. This option can be issued multiple times", required=False, action="append") parser_reset = subparsers.add_parser('reset', help="Reset modules, deletes all trained models that have sources. Issue prior to train if you want to start anew.") parser_reset.add_argument('modules', help="Only reset for modules with the specified IDs (comma-separated list) (if omitted, all modules are reset)", nargs='?',default="") parser_wipe = subparsers.add_parser('wipe', help="Forcibly deletes all knowledge of running servers, use only when you are sure no module servers are running (stop them with stopservers), or they will be orphaned. Used to clean up after a crash.") args = parser.parse_args() try: if args.settings: for key, value in ( tuple(p.split('=')) for p in args.settings): if value.isnumeric(): self.settings[key] = int(value) else: self.settings[key] = value except AttributeError: pass parameters = {} modules = [] self.log("GECCO v" + VERSION + " using " + self.settings['id']) if args.command == 'run': for module in self.modules.values(): module.forcelocal = args.local if args.parameters: parameters = dict(( tuple(p.split('=')) for p in args.parameters)) if args.metadata: parameters['metadata'] = dict(( tuple(p.split('=')) for p in args.metadata)) parameters['exit'] = True #force exit from run(), prevent stale processes if args.modules: modules = args.modules.split(',') self.run(args.filename,modules,args.outputfile,args.dumpxml, args.dumpjson,**parameters) elif args.command == 'startservers': if args.modules: modules = args.modules.split(',') self.startservers(modules, args.blocking) elif args.command == 'stopservers': if args.modules: modules = args.modules.split(',') self.stopservers(modules) elif args.command == 'startserver': self.startserver(args.module, args.host, args.port) elif args.command == 'listservers' or args.command == 'ls': servers = self.findservers() if not servers: print("No servers are running (root=",self.root+")", file=sys.stderr) else: for module, host, port, load in servers: print(module + "@" + host + ":" + str(port) + " (load " + str(load) + ")") elif args.command == 'train': if args.parameters: parameters = dict(( tuple(p.split('=')) for p in args.parameters)) if args.modules: modules = args.modules.split(',') self.train(modules) elif args.command == 'evaluate': self.evaluate(args) elif args.command == 'test': if args.parameters: parameters = dict(( tuple(p.split('=')) for p in args.parameters)) if args.modules: modules = args.modules.split(',') self.test(modules) elif args.command == 'tune': if args.parameters: parameters = dict(( tuple(p.split('=')) for p in args.parameters)) if args.modules: modules = args.modules.split(',') self.tune(modules) elif args.command == 'reset': if args.modules: modules = args.modules.split(',') self.reset(modules) elif args.command == 'wipe': count = 0 runpath = self.root + "/run/" for filename in glob(runpath + "/*.pid"): count += 1 os.unlink(filename) print("Wiped " + str(count) + " servers from memory. If any were still running, they are now orphans!",file=sys.stderr) elif not args.command: parser.print_help() else: print("No such command: " + args.command,file=sys.stderr) sys.exit(2) sys.exit(0) class LineByLineClient: """Simple communication protocol between client and server, newline-delimited""" def __init__(self, host, port,timeout=120): self.host = host self.port = port self.timeout = timeout self.connected = False def connect(self): self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #pylind: disable=attribute-defined-outside-init self.socket.settimeout(self.timeout) self.socket.connect( (self.host,self.port) ) self.connected = True def communicate(self, msg): self.send(msg) answer = self.receive() #print("Output: [" + msg + "], Response: [" + answer + "]",file=sys.stderr) return answer def send(self, msg): if not self.connected: self.connect() if isinstance(msg, str): msg = msg.encode('utf-8') if msg[-1] != 10: msg += b"\n" self.socket.sendall(msg) def receive(self): if not self.connected: self.connect() buffer = b'' cont_recv = True while cont_recv: chunk = self.socket.recv(1024) if not chunk or chunk[-1] == 10: #newline cont_recv = False buffer += chunk return str(buffer,'utf-8').strip() def close(self): if self.connected: self.socket.close() self.connected = False class LineByLineServerHandler(socketserver.BaseRequestHandler): """ The generic RequestHandler class for our server. Instantiated once per connection to the server, invokes the module's run() """ def handle(self): while True: #We have to loop so the connection is not closed after one request # self.request is the TCP socket connected to the client, self.server is the server cont_recv = True buffer = b'' while cont_recv: chunk = self.request.recv(1024) if not chunk or chunk[-1] == 10: #newline cont_recv = False buffer += chunk if not chunk: #connection broken break msg = str(buffer,'utf-8').strip() if msg == "%GETLOAD%": response = str(self.server.module.server_load()) else: response = json.dumps(self.server.module.run(json.loads(msg))) #print("Input: [" + msg + "], Response: [" + response + "]",file=sys.stderr) if isinstance(response,str): response = response.encode('utf-8') if response[-1] != 10: response += b"\n" self.request.sendall(response) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): def handle_error(self,request,client_address): print("An error occurred in the server for module " + self.module.id, file=sys.stderr) exc_type, exc_value, exc_traceback = sys.exc_info() print(exc_type, exc_value,file=sys.stderr) traceback.print_tb(exc_traceback, limit=50, file=sys.stderr) class Module: UNIT = folia.Document #Specifies on type of input tbe module gets. An entire FoLiA document is the default, any smaller structure element can be assigned, such as folia.Sentence or folia.Word . More fine-grained levels usually increase efficiency. UNITFILTER = None #Can be a function that takes a unit and return True if it has to be processed CLIENT = LineByLineClient SERVER = LineByLineServerHandler def __init__(self, parent,**settings): self.parent = parent self.settings = settings self.submodclients = {} #each module keeps a bunch of clients open to the servers of the various submodules so we don't have to reconnect constantly (= faster) self.servers = [] #only for the master process, will be populated by it later self.verifysettings() def getfilename(self, filename): if isinstance(filename, tuple): return tuple( ( self.getfilename(x) for x in filename ) ) elif filename[0] == '/': return filename else: return self.parent.root + filename def verifysettings(self): if 'id' not in self.settings: raise Exception("Module must have an ID!") self.id = self.settings['id'] for c in self.id: if c in ('.',' ','/'): raise ValueError("Invalid character in module ID (no spaces, period and slashes allowed): " + self.id) if 'source' in self.settings: if isinstance(self.settings['source'],str): self.sources = [ self.settings['source'] ] else: self.sources = self.settings['source'] elif 'sources' in self.settings: self.sources = self.settings['sources'] else: self.sources = [] self.sources = [ self.getfilename(f) for f in self.sources ] if 'model' in self.settings: if isinstance(self.settings['model'],str): self.models = [ self.settings['model'] ] else: self.models = self.settings['model'] elif 'models' in self.settings: self.models = self.settings['models'] else: self.models = [] self.models = [ self.getfilename(f) for f in self.models ] if self.sources and len(self.sources) != len(self.models): raise Exception("Number of specified sources and models for module " + self.id + " should be equal!") if 'logfunction' not in self.settings: self.settings['logfunction'] = lambda x: print(datetime.datetime.now().strftime("%H:%M:%S.%f") + " [" + self.id + "] " + x,file=sys.stderr) #will be rather messy when multithreaded self.log = self.settings['logfunction'] #Some defaults for FoLiA processing if 'set' not in self.settings: self.settings['set'] = "https://raw.githubusercontent.com/proycon/folia/master/setdefinitions/spellingcorrection.foliaset.xml" if 'class' not in self.settings: self.settings['class'] = "nonworderror" if 'annotator' not in self.settings: self.settings['annotator'] = self.id if 'depends' not in self.settings: self.settings['depends'] = [] if 'submodules' not in self.settings: self.submodules = {} else: try: self.submodules = { self.parent[x].id : self.parent[x] for x in self.settings['submodules'] } except KeyError: raise Exception("One or more submodules are not defined") for m in self.submodules.values(): if m.local: raise Exception("Module " + m.id + " is used as a submodule, but no servers are defined, submodules can not be local only") if m.UNIT != self.UNIT: raise Exception("Module " + m.id + " is used as a submodule of " + self.id + ", but they do not take the same unit") if 'submodule' not in self.settings: self.submodule = False else: self.submodule = bool(self.settings['submodule']) if 'local' not in self.settings: self.local = False else: self.local = bool(self.settings['local']) if self.submodule and self.local: raise Exception("Module " + self.id + " is a submodule, but no servers are defined, submodules can not be local only") def getserver(self, index): if not self.servers: raise IndexError("No servers") index = index % len(self.servers) return self.servers[index] def getsubmoduleclient(self, submodule): #submodule.prepare() #will block until all submod dependencies are done #for server,port in submodule.findserver(self.parent.loadbalancemaster): # if (server,port) not in self.submodclients: # self.submodclients[(server,port)] = submodule.CLIENT(server,port) # return self.submodclients[(server,port)] #raise Exception("Could not find server for submodule " + submodule.id) raise NotImplementedError #may be obsolete def prepare(self): """Executed prior to running the module, waits until all dependencies have completed""" waiting = True while waiting: waiting = False for dep in self.settings['depends']: if dep not in self.parent.done: waiting = True break if waiting: time.sleep(0.05) ####################### CALLBACKS ########################### ##### Default callbacks, almost never need to be overloaded: def init(self, foliadoc): """Initialises the module on the document. This method should set all the necessary declarations if they are not already present. It will be called sequentially and only once on the entire document.""" if 'set' in self.settings and self.settings['set']: if not foliadoc.declared(folia.Correction, self.settings['set']): foliadoc.declare(folia.Correction, self.settings['set']) return True def runserver(self, host, port): """Runs the server. Invoked by the Corrector on start. """ server = ThreadedTCPServer((host, port), self.SERVER) server.allow_reuse_address = True #pylint: disable=attribute-defined-outside-init server.module = self #pylint: disable=attribute-defined-outside-init # Start a thread with the server -- that thread will then fork for each request server_thread = Thread(target=server.serve_forever) # Exit the server thread when the main thread terminates server_thread.setDaemon(True) server_thread.start() server_thread.join() #block until done server.shutdown() def server_load(self): """Returns a float indicating the load of this server. 0 = idle, 1 = max load, >1 overloaded. Returns normalised system load by default, buy may be overriden for module-specific behaviour.""" return os.getloadavg()[0] / psutil.cpu_count() def runlocal(self, unit_id, inputdata, **parameters): """This method gets invoked by the Corrector when the module is run locally.""" return self.run(inputdata) def runclient(self, client, unit_id, inputdata, **parameters): """This method gets invoked by the Corrector when it should connect to a remote server, the client instance is passed and already available (will connect on first communication). """ return json.loads(client.communicate(json.dumps(inputdata))) ##### Optional callbacks invoked by the Corrector (defaults may suffice) def finish(self, foliadoc): """Finishes the module on the document. This method can do post-processing. It will be called sequentially.""" return False #Nothing to finish for this module def train(self, sourcefile, modelfile, **parameters): """This method gets invoked by the Corrector to train the model. Build modelfile out of sourcefile. Either may be a tuple if multiple files are required/requested. The function may be invoked multiple times with differences source and model files""" return False #Implies there is nothing to train for this module def test(self, **parameters): """This method gets invoked by the Corrector to test the model. Override it in your own model, use the input files in self.sources and for each entry create the corresponding file in self.models """ return False #Implies there is nothing to test for this module def tune(self, **parameters): """This method gets invoked by the Corrector to tune the model. Override it in your own model, use the input files in self.sources and for each entry create the corresponding file in self.models """ return False #Implies there is nothing to tune for this module def reset(self, modelfile, sourcefile): """Resets a module, should delete the specified modelfile (NOT THE SOURCEFILE!)""" filenames = (modelfile, modelfile.replace(".ibase",".wgt"), modelfile.replace(".ibase",".train")) for filename in filenames: if os.path.exists(filename): os.unlink(filename) ##### Main callbacks invoked by the Corrector that MUST ALWAYS be implemented: def prepareinput(self,unit,**parameters): """Converts a FoLiA unit to whatever lower-level input-representation the module needs. The representation must be passable over network in JSON. Will be executed serially. May return None to indicate the unit is not to be processed by the module.""" raise NotImplementedError def run(self, inputdata): """This methods gets called to turn inputdata into outputdata. It is the part that can be distributed over network and will be executed concurrently. Return value will be automatically serialised as JSON for remote modules. May return None if no output is produced.""" raise NotImplementedError def processoutput(self,outputdata,inputdata,unit_id,**parameters): """Processes low-level output data and returns a an FQL query (string) or list/tuple of FQL queries to perform on the data. Executed concurrently. May return None if no query is needed.""" raise NotImplementedError #### Callback invoked by the module itself, MUST be implemented if any loading is done: def load(self): """Load the requested modules from self.models, module-specific so doesn't do anything by default""" pass def clientload(self): """Load the requested modules from self.models, module-specific so doesn't do anything by default. This is a subset that may be loaded for clients, it should load as little as possible (preferably nothing at all!)""" pass ######################### FOLIA EDITING ############################## # # These methods are *NOT* available to module.run(), only to # module.processoutput() def processorquery(self): return "PROCESSOR id \"proc.gecco.{annotator}\" name \"{annotator}\" type \"auto\" IN PROCESSOR name \"gecco\" ".format(annotator=self.settings['annotator']) def addsuggestions(self, element_id, suggestions, **kwargs): self.log("Adding correction for " + element_id) if 'cls' in kwargs: cls = kwargs['cls'] else: cls = self.settings['class'] if isinstance(suggestions,str): suggestions = [suggestions] q = self.processorquery() q += "EDIT t (AS CORRECTION OF {foliaset} WITH class \"{cls}\" datetime now".format(foliaset=self.settings['set'],cls=cls) for suggestion in suggestions: if isinstance(suggestion, tuple) or isinstance(suggestion, list): suggestion, confidence = suggestion else: confidence = None q += " SUGGESTION text \"" + suggestion.replace('"','\\"') + "\"" if confidence is not None: q += " WITH confidence " + str(confidence) q += ") FOR ID \"" + element_id + "\" RETURN nothing" return q def adderrordetection(self, element_id): self.log("Adding correction for " + element_id ) #add the correction return self.proceessorquery() + "ADD errordetection OF " + self.settings['set'] + " WITH class \"" + self.settings['class'] + "\" annotator \"" + self.settings['annotator'] + "\" annotatortype \"auto\" datetime now FOR ID \"" + element_id + "\" RETURN nothing" def splitcorrection(self, word_id, suggestions): #split one word into multiple #suggestions is a list of ([word], confidence) tuples q = self.processorquery() q += "SUBSTITUTE (AS CORRECTION OF " + self.settings['set'] + " WITH class \"" + self.settings['class'] + "\" annotator \"" + self.settings['annotator'] + "\" annotatortype \"auto\" datetime now" for suggestion, confidence in suggestions: q += " SUGGESTION (" for i, newword in enumerate(suggestion): if i > 0: q += " " q += "SUBSTITUTE w WITH text \"" + newword.replace('"','\\"') + "\"" q += ") WITH confidence " + str(confidence) q += ") FOR SPAN ID \"" + word_id + "\"" q += " RETURN nothing" return q def mergecorrection(self, newword, originalwords): #merge multiple words into one q = self.processorquery() q += "SUBSTITUTE (AS CORRECTION OF " + self.settings['set'] + " WITH class \"" + self.settings['class'] + "\" annotator \"" + self.settings['annotator'] + "\" annotatortype \"auto\" datetime now" q += " SUGGESTION" q += " (SUBSTITUTE w WITH text \"" + newword.replace('"','\\"') + "\")" #q += " WITH confidence " + str(confidence) q += ") FOR SPAN" for i, ow in enumerate(originalwords): if i > 0: q += " &" q += " ID \"" + ow + "\"" q += " RETURN nothing" return q def suggestdeletion(self, word_id,merge=False, **kwargs): if 'cls' in kwargs: cls = kwargs['cls'] else: cls = self.settings['class'] q = self.processorquery() q += "SUBSTITUTE (AS CORRECTION OF " + self.settings['set'] + " WITH class \"" + cls + "\" annotator \"" + self.settings['annotator'] + "\" annotatortype \"auto\" datetime now" if merge: q += " SUGGESTION MERGE DELETION " else: q += " SUGGESTION DELETION " q += ") FOR SPAN ID \"" + word_id + "\"" q += " RETURN nothing" return q #----------- OLD (TODO: REMOVE) ----------- #parent = word.parent #index = parent.getindex(word,False) #if 'cls' in kwargs: # cls = kwargs['cls'] #else: # cls = self.settings['class'] #if index != -1: # self.log(" Suggesting deletion of " + str(word.id)) # sugkwargs = {} # if merge: # sugkwargs['merge'] = word.ancestor(folia.StructureElement).id # parent.data[index] = folia.Correction(word.doc, folia.Suggestion(word.doc, **sugkwargs), folia.Current(word.doc, word), set=self.settings['set'],cls=cls, annotator=self.settings['annotator'],annotatortype=folia.AnnotatorType.AUTO, datetime=datetime.datetime.now()) #else: # self.log(" ERROR: Unable to suggest deletion of " + str(word.id) + ", item index not found") def suggestinsertion(self,pivotword_id, text,split=False,mode='PREPEND'): q = self.processorquery() q += mode + " (AS CORRECTION OF " + self.settings['set'] + " WITH class \"" + self.settings['class'] + "\" annotator \"" + self.settings['annotator'] + "\" annotatortype \"auto\" datetime now" if split: q += " SUGGESTION SPLIT (ADD w WITH text \"" + text.replace('"','\\"') + "\") " else: q += " SUGGESTION (ADD w WITH text \"" + text.replace('"','\\"') + "\") " q += ") FOR ID \"" + pivotword_id + "\"" q += " RETURN nothing" return q #----------- OLD (TODO: REMOVE) ----------- #index = pivotword.parent.getindex(pivotword) #if index != -1: # self.log(" Suggesting insertion before " + str(pivotword.id)) # sugkwargs = {} # if split: # sugkwargs['split'] = pivotword.ancestor(folia.StructureElement).id # doc = pivotword.doc # pivotword.parent.insert(index,folia.Correction(doc, folia.Suggestion(doc, folia.Word(doc,text,generate_id_in=pivotword.parent)), folia.Current(doc), set=self.settings['set'],cls=self.settings['class'], annotator=self.settings['annotator'],annotatortype=folia.AnnotatorType.AUTO, datetime=datetime.datetime.now(), generate_id_in=pivotword.parent)) #else: # self.log(" ERROR: Unable to suggest insertion before " + str(pivotword.id) + ", item index not found") def helpmodules(): #Bit hacky, but it works print("Gecco Modules and Settings") print("=================================") print() import gecco.modules #pylint: disable=redefined-outer-name for modulefile in sorted(glob(gecco.modules.__path__[0] + "/*.py")): modulename = os.path.basename(modulefile).replace('.py','') importlib.import_module('gecco.modules.' + modulename) for C in dir(getattr(gecco.modules,modulename)): C = getattr(getattr(gecco.modules,modulename), C) if inspect.isclass(C) and issubclass(C, Module) and hasattr(C,'__doc__') and C.__doc__: print("gecco.modules." + modulename + "." + C.__name__) print("----------------------------------------------------------------------") try: print(C.__doc__) except: #pylint: disable=bare-except pass print() from gecco.helpers.hapaxing import Hapaxer print("Hapaxing") print("=================================") print("The following settings can be added to any module that supports hapaxing:") print(Hapaxer.__doc__) def main(): try: configfile = sys.argv[1] if configfile in ("-h","--help"): raise IndexError elif configfile == "--helpmodules": helpmodules() sys.exit(0) sys.argv = [sys.argv[0]] + sys.argv[2:] except IndexError: print("Syntax: gecco [configfile.yml] (First specify a config file, for help then add -h)" ,file=sys.stderr) print("To see all available modules and parameters: gecco --helpmodules" ,file=sys.stderr) sys.exit(2) corrector = Corrector(config=configfile) corrector.main() if __name__ == '__main__': main()
PypiClean
/CherryPy-18.8.0.tar.gz/CherryPy-18.8.0/cherrypy/_helper.py
import urllib.parse from cherrypy._cpcompat import text_or_bytes import cherrypy def expose(func=None, alias=None): """Expose the function or class. Optionally provide an alias or set of aliases. """ def expose_(func): func.exposed = True if alias is not None: if isinstance(alias, text_or_bytes): parents[alias.replace('.', '_')] = func else: for a in alias: parents[a.replace('.', '_')] = func return func import sys import types decoratable_types = types.FunctionType, types.MethodType, type, if isinstance(func, decoratable_types): if alias is None: # @expose func.exposed = True return func else: # func = expose(func, alias) parents = sys._getframe(1).f_locals return expose_(func) elif func is None: if alias is None: # @expose() parents = sys._getframe(1).f_locals return expose_ else: # @expose(alias="alias") or # @expose(alias=["alias1", "alias2"]) parents = sys._getframe(1).f_locals return expose_ else: # @expose("alias") or # @expose(["alias1", "alias2"]) parents = sys._getframe(1).f_locals alias = func return expose_ def popargs(*args, **kwargs): """Decorate _cp_dispatch. (cherrypy.dispatch.Dispatcher.dispatch_method_name) Optional keyword argument: handler=(Object or Function) Provides a _cp_dispatch function that pops off path segments into cherrypy.request.params under the names specified. The dispatch is then forwarded on to the next vpath element. Note that any existing (and exposed) member function of the class that popargs is applied to will override that value of the argument. For instance, if you have a method named "list" on the class decorated with popargs, then accessing "/list" will call that function instead of popping it off as the requested parameter. This restriction applies to all _cp_dispatch functions. The only way around this restriction is to create a "blank class" whose only function is to provide _cp_dispatch. If there are path elements after the arguments, or more arguments are requested than are available in the vpath, then the 'handler' keyword argument specifies the next object to handle the parameterized request. If handler is not specified or is None, then self is used. If handler is a function rather than an instance, then that function will be called with the args specified and the return value from that function used as the next object INSTEAD of adding the parameters to cherrypy.request.args. This decorator may be used in one of two ways: As a class decorator: .. code-block:: python @cherrypy.popargs('year', 'month', 'day') class Blog: def index(self, year=None, month=None, day=None): #Process the parameters here; any url like #/, /2009, /2009/12, or /2009/12/31 #will fill in the appropriate parameters. def create(self): #This link will still be available at /create. #Defined functions take precedence over arguments. Or as a member of a class: .. code-block:: python class Blog: _cp_dispatch = cherrypy.popargs('year', 'month', 'day') #... The handler argument may be used to mix arguments with built in functions. For instance, the following setup allows different activities at the day, month, and year level: .. code-block:: python class DayHandler: def index(self, year, month, day): #Do something with this day; probably list entries def delete(self, year, month, day): #Delete all entries for this day @cherrypy.popargs('day', handler=DayHandler()) class MonthHandler: def index(self, year, month): #Do something with this month; probably list entries def delete(self, year, month): #Delete all entries for this month @cherrypy.popargs('month', handler=MonthHandler()) class YearHandler: def index(self, year): #Do something with this year #... @cherrypy.popargs('year', handler=YearHandler()) class Root: def index(self): #... """ # Since keyword arg comes after *args, we have to process it ourselves # for lower versions of python. handler = None handler_call = False for k, v in kwargs.items(): if k == 'handler': handler = v else: tm = "cherrypy.popargs() got an unexpected keyword argument '{0}'" raise TypeError(tm.format(k)) import inspect if handler is not None \ and (hasattr(handler, '__call__') or inspect.isclass(handler)): handler_call = True def decorated(cls_or_self=None, vpath=None): if inspect.isclass(cls_or_self): # cherrypy.popargs is a class decorator cls = cls_or_self name = cherrypy.dispatch.Dispatcher.dispatch_method_name setattr(cls, name, decorated) return cls # We're in the actual function self = cls_or_self parms = {} for arg in args: if not vpath: break parms[arg] = vpath.pop(0) if handler is not None: if handler_call: return handler(**parms) else: cherrypy.request.params.update(parms) return handler cherrypy.request.params.update(parms) # If we are the ultimate handler, then to prevent our _cp_dispatch # from being called again, we will resolve remaining elements through # getattr() directly. if vpath: return getattr(self, vpath.pop(0), None) else: return self return decorated def url(path='', qs='', script_name=None, base=None, relative=None): """Create an absolute URL for the given path. If 'path' starts with a slash ('/'), this will return (base + script_name + path + qs). If it does not start with a slash, this returns (base + script_name [+ request.path_info] + path + qs). If script_name is None, cherrypy.request will be used to find a script_name, if available. If base is None, cherrypy.request.base will be used (if available). Note that you can use cherrypy.tools.proxy to change this. Finally, note that this function can be used to obtain an absolute URL for the current request path (minus the querystring) by passing no args. If you call url(qs=cherrypy.request.query_string), you should get the original browser URL (assuming no internal redirections). If relative is None or not provided, request.app.relative_urls will be used (if available, else False). If False, the output will be an absolute URL (including the scheme, host, vhost, and script_name). If True, the output will instead be a URL that is relative to the current request path, perhaps including '..' atoms. If relative is the string 'server', the output will instead be a URL that is relative to the server root; i.e., it will start with a slash. """ if isinstance(qs, (tuple, list, dict)): qs = urllib.parse.urlencode(qs) if qs: qs = '?' + qs if cherrypy.request.app: if not path.startswith('/'): # Append/remove trailing slash from path_info as needed # (this is to support mistyped URL's without redirecting; # if you want to redirect, use tools.trailing_slash). pi = cherrypy.request.path_info if cherrypy.request.is_index is True: if not pi.endswith('/'): pi = pi + '/' elif cherrypy.request.is_index is False: if pi.endswith('/') and pi != '/': pi = pi[:-1] if path == '': path = pi else: path = urllib.parse.urljoin(pi, path) if script_name is None: script_name = cherrypy.request.script_name if base is None: base = cherrypy.request.base newurl = base + script_name + normalize_path(path) + qs else: # No request.app (we're being called outside a request). # We'll have to guess the base from server.* attributes. # This will produce very different results from the above # if you're using vhosts or tools.proxy. if base is None: base = cherrypy.server.base() path = (script_name or '') + path newurl = base + normalize_path(path) + qs # At this point, we should have a fully-qualified absolute URL. if relative is None: relative = getattr(cherrypy.request.app, 'relative_urls', False) # See http://www.ietf.org/rfc/rfc2396.txt if relative == 'server': # "A relative reference beginning with a single slash character is # termed an absolute-path reference, as defined by <abs_path>..." # This is also sometimes called "server-relative". newurl = '/' + '/'.join(newurl.split('/', 3)[3:]) elif relative: # "A relative reference that does not begin with a scheme name # or a slash character is termed a relative-path reference." old = url(relative=False).split('/')[:-1] new = newurl.split('/') while old and new: a, b = old[0], new[0] if a != b: break old.pop(0) new.pop(0) new = (['..'] * len(old)) + new newurl = '/'.join(new) return newurl def normalize_path(path): """Resolve given path from relative into absolute form.""" if './' not in path: return path # Normalize the URL by removing ./ and ../ atoms = [] for atom in path.split('/'): if atom == '.': pass elif atom == '..': # Don't pop from empty list # (i.e. ignore redundant '..') if atoms: atoms.pop() elif atom: atoms.append(atom) newpath = '/'.join(atoms) # Preserve leading '/' if path.startswith('/'): newpath = '/' + newpath return newpath #### # Inlined from jaraco.classes 1.4.3 # Ref #1673 class _ClassPropertyDescriptor(object): """Descript for read-only class-based property. Turns a classmethod-decorated func into a read-only property of that class type (means the value cannot be set). """ def __init__(self, fget, fset=None): """Initialize a class property descriptor. Instantiated by ``_helper.classproperty``. """ self.fget = fget self.fset = fset def __get__(self, obj, klass=None): """Return property value.""" if klass is None: klass = type(obj) return self.fget.__get__(obj, klass)() def classproperty(func): # noqa: D401; irrelevant for properties """Decorator like classmethod to implement a static class property.""" if not isinstance(func, (classmethod, staticmethod)): func = classmethod(func) return _ClassPropertyDescriptor(func) ####
PypiClean
/BlueWhale3-3.31.3.tar.gz/BlueWhale3-3.31.3/Orange/widgets/utils/slidergraph.py
import numpy as np from pyqtgraph import PlotWidget, mkPen, InfiniteLine, PlotCurveItem, \ TextItem, Point from AnyQt.QtGui import QColor from AnyQt.QtCore import Qt class SliderGraph(PlotWidget): """ An widget graph element that shows a line plot with more sequences. It also plot a vertical line that can be moved left and right by a user. When the line is moved a callback function is called with selected value (on x axis). Attributes ---------- x_axis_label : str A text label for x axis y_axis_label : str A text label for y axis callback : callable A function which is called when selection is changed. background : str, optional (default: "w") Plot background color """ def __init__(self, x_axis_label, y_axis_label, callback): super().__init__(background="w") axis = self.getAxis("bottom") axis.setLabel(x_axis_label) axis = self.getAxis("left") axis.setLabel(y_axis_label) self.getViewBox().setMenuEnabled(False) self.getViewBox().setMouseEnabled(False, False) self.showGrid(True, True, alpha=0.5) self.setRange(xRange=(0.0, 1.0), yRange=(0.0, 1.0)) self.hideButtons() # tuples to store horisontal lines and labels self.plot_horlabel = [] self.plot_horline = [] self._line = None self.callback = callback # variables to store sequences self.sequences = None self.x = None self.selection_limit = None self.data_increasing = None # true if data mainly increasing def update(self, x, y, colors, cutpoint_x=None, selection_limit=None, names=None): """ Function replots a graph. Parameters ---------- x : np.ndarray One-dimensional array with X coordinates of the points y : array-like List of np.ndarrays that contains an array of Y values for each sequence. colors : array-like List of Qt colors (eg. Qt.red) for each sequence. cutpoint_x : int, optional A starting cutpoint - the location of the vertical line. selection_limit : tuple The tuple of two values that limit the range for selection. names : array-like The name of each sequence that shows in the legend, if None legend is not shown. legend_anchor : array-like The anchor of the legend in the graph """ self.clear_plot() if names is None: names = [None] * len(y) self.sequences = y self.x = x self.selection_limit = selection_limit self.data_increasing = [np.sum(d[1:] - d[:-1]) > 0 for d in y] # plot sequence for s, c, n, inc in zip(y, colors, names, self.data_increasing): c = QColor(c) self.plot(x, s, pen=mkPen(c, width=2), antialias=True) if n is not None: label = TextItem( text=n, anchor=(0, 1), color=QColor(0, 0, 0, 128)) label.setPos(x[-1], s[-1]) self._set_anchor(label, len(x) - 1, inc) self.addItem(label) self._plot_cutpoint(cutpoint_x) self.autoRange() def clear_plot(self): """ This function clears the plot and removes data. """ self.clear() self.setRange(xRange=(0.0, 1.0), yRange=(0.0, 1.0)) self.plot_horlabel = [] self.plot_horline = [] self._line = None self.sequences = None def set_cut_point(self, x): """ This function sets the cutpoint (selection line) at the specific location. Parameters ---------- x : int Cutpoint location at the x axis. """ self._plot_cutpoint(x) def _plot_cutpoint(self, x): """ Function plots the cutpoint. Parameters ---------- x : int Cutpoint location. """ if x is None: self._line = None return if self._line is None: # plot interactive vertical line self._line = InfiniteLine( angle=90, pos=x, movable=True, bounds=self.selection_limit if self.selection_limit is not None else (self.x.min(), self.x.max()) ) self._line.setCursor(Qt.SizeHorCursor) self._line.setPen(mkPen(QColor(Qt.black), width=2)) self._line.sigPositionChanged.connect(self._on_cut_changed) self.addItem(self._line) else: self._line.setValue(x) self._update_horizontal_lines() def _plot_horizontal_lines(self): """ Function plots the vertical dashed lines that points to the selected sequence values at the y axis. """ for _ in range(len(self.sequences)): self.plot_horline.append(PlotCurveItem( pen=mkPen(QColor(Qt.blue), style=Qt.DashLine))) self.plot_horlabel.append(TextItem( color=QColor(Qt.black), anchor=(0, 1))) for item in self.plot_horlabel + self.plot_horline: self.addItem(item) def _set_anchor(self, label, cutidx, inc): """ This function set the location of the text label around the selected point at the curve. It place the text such that it is not plotted at the line. Parameters ---------- label : TextItem Text item that needs to have location set. cutidx : int The index of the selected element in the list. If index in first part of the list we put label on the right side else on the left, such that it does not disappear at the graph edge. inc : bool This parameter tels whether the curve value is increasing or decreasing. """ if inc: label.anchor = Point(0, 0) if cutidx < len(self.x) / 2 \ else Point(1, 1) else: label.anchor = Point(0, 1) if cutidx < len(self.x) / 2 \ else Point(1, 0) def _update_horizontal_lines(self): """ This function update the horisontal lines when selection changes. If lines are present jet it calls the function to init them. """ if not self.plot_horline: # init horizontal lines self._plot_horizontal_lines() # in every case set their position location = int(round(self._line.value())) cutidx = np.searchsorted(self.x, location) minx = np.min(self.x) for s, curve, label, inc in zip( self.sequences, self.plot_horline, self.plot_horlabel, self.data_increasing): y = s[cutidx] curve.setData([minx, location], [y, y]) self._set_anchor(label, cutidx, inc) label.setPos(location, y) label.setPlainText("{:.3f}".format(y)) def _on_cut_changed(self, line): """ This function is called when selection changes. It extract the selected value and calls the callback function. Parameters ---------- line : InfiniteLine The cutpoint - selection line. """ # cut changed by means of a cut line over the scree plot. value = int(round(line.value())) # vertical line can take only int positions self._line.setValue(value) self._update_horizontal_lines() self.callback(value)
PypiClean
/GeoNode-3.2.0-py3-none-any.whl/geonode/static/lib/js/plugins/codesample/plugin.min.js
!function(e){"use strict";var n,t,a,r=tinymce.util.Tools.resolve("tinymce.PluginManager"),i=function(e){return function(){return e}},s=i(!1),o=i(!0),l=function(){return u},u=(n=function(e){return e.isNone()},{fold:function(e,n){return e()},is:s,isSome:s,isNone:o,getOr:a=function(e){return e},getOrThunk:t=function(e){return e()},getOrDie:function(e){throw new Error(e||"error: getOrDie called on none.")},getOrNull:i(null),getOrUndefined:i(undefined),or:a,orThunk:t,map:l,each:function(){},bind:l,exists:s,forall:o,filter:l,equals:n,equals_:n,toArray:function(){return[]},toString:i("none()")}),c=function(t){var e=i(t),n=function(){return r},a=function(e){return e(t)},r={fold:function(e,n){return n(t)},is:function(e){return t===e},isSome:o,isNone:s,getOr:e,getOrThunk:e,getOrDie:e,getOrNull:e,getOrUndefined:e,or:n,orThunk:n,map:function(e){return c(e(t))},each:function(e){e(t)},bind:a,exists:a,forall:a,filter:function(e){return e(t)?r:u},toArray:function(){return[t]},toString:function(){return"some("+t+")"},equals:function(e){return e.is(t)},equals_:function(e,n){return e.fold(s,function(e){return n(t,e)})}};return r},d={some:c,none:l,from:function(e){return null===e||e===undefined?u:c(e)}},p=tinymce.util.Tools.resolve("tinymce.dom.DOMUtils");function g(e){return e&&"PRE"===e.nodeName&&-1!==e.className.indexOf("language-")}function m(t){return function(e,n){return t(n)}}var f="undefined"!=typeof e.window?e.window:Function("return this;")(),h={},b={exports:h},y={};!function(n,t,a,d){var e=window.Prism;window.Prism={manual:!0},function(e){if("object"==typeof t&&void 0!==a)a.exports=e();else if("function"==typeof n&&n.amd)n([],e);else{("undefined"!=typeof window?window:void 0!==y?y:"undefined"!=typeof self?self:this).EphoxContactWrapper=e()}}(function(){return function c(i,s,o){function l(n,e){if(!s[n]){if(!i[n]){var t="function"==typeof d&&d;if(!e&&t)return t(n,!0);if(u)return u(n,!0);var a=new Error("Cannot find module '"+n+"'");throw a.code="MODULE_NOT_FOUND",a}var r=s[n]={exports:{}};i[n][0].call(r.exports,function(e){return l(i[n][1][e]||e)},r,r.exports,c,i,s,o)}return s[n].exports}for(var u="function"==typeof d&&d,e=0;e<o.length;e++)l(o[e]);return l}({1:[function(e,n,t){Prism.languages.c=Prism.languages.extend("clike",{"class-name":{pattern:/(\b(?:enum|struct)\s+)\w+/,lookbehind:!0},keyword:/\b(?:_Alignas|_Alignof|_Atomic|_Bool|_Complex|_Generic|_Imaginary|_Noreturn|_Static_assert|_Thread_local|asm|typeof|inline|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|union|unsigned|void|volatile|while)\b/,operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?/,number:/(?:\b0x(?:[\da-f]+\.?[\da-f]*|\.[\da-f]+)(?:p[+-]?\d+)?|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?)[ful]*/i}),Prism.languages.insertBefore("c","string",{macro:{pattern:/(^\s*)#\s*[a-z]+(?:[^\r\n\\]|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,alias:"property",inside:{string:{pattern:/(#\s*include\s*)(?:<.+?>|("|')(?:\\?.)+?\2)/,lookbehind:!0},directive:{pattern:/(#\s*)\b(?:define|defined|elif|else|endif|error|ifdef|ifndef|if|import|include|line|pragma|undef|using)\b/,lookbehind:!0,alias:"keyword"}}},constant:/\b(?:__FILE__|__LINE__|__DATE__|__TIME__|__TIMESTAMP__|__func__|EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|stdin|stdout|stderr)\b/}),delete Prism.languages.c["boolean"]},{}],2:[function(e,n,t){Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/(\b(?:class|interface|extends|implements|trait|instanceof|new)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,"boolean":/\b(?:true|false)\b/,"function":/\w+(?=\()/,number:/\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/[<>]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,punctuation:/[{}[\];(),.:]/}},{}],3:[function(e,t,n){(function(e){var n=function(u){var c=/\blang(?:uage)?-([\w-]+)\b/i,n=0,C={manual:u.Prism&&u.Prism.manual,disableWorkerMessageHandler:u.Prism&&u.Prism.disableWorkerMessageHandler,util:{encode:function(e){return e instanceof O?new O(e.type,C.util.encode(e.content),e.alias):Array.isArray(e)?e.map(C.util.encode):e.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/\u00a0/g," ")},type:function(e){return Object.prototype.toString.call(e).slice(8,-1)},objId:function(e){return e.__id||Object.defineProperty(e,"__id",{value:++n}),e.__id},clone:function s(e,t){var a,n,r=C.util.type(e);switch(t=t||{},r){case"Object":if(n=C.util.objId(e),t[n])return t[n];for(var i in a={},t[n]=a,e)e.hasOwnProperty(i)&&(a[i]=s(e[i],t));return a;case"Array":return(n=C.util.objId(e),t[n])?t[n]:(a=[],t[n]=a,e.forEach(function(e,n){a[n]=s(e,t)}),a);default:return e}},getLanguage:function(e){for(;e&&!c.test(e.className);)e=e.parentElement;return e?(e.className.match(c)||[,"none"])[1].toLowerCase():"none"},currentScript:function(){if("undefined"==typeof document)return null;if("currentScript"in document)return document.currentScript;try{throw new Error}catch(a){var e=(/at [^(\r\n]*\((.*):.+:.+\)$/i.exec(a.stack)||[])[1];if(e){var n=document.getElementsByTagName("script");for(var t in n)if(n[t].src==e)return n[t]}return null}}},languages:{extend:function(e,n){var t=C.util.clone(C.languages[e]);for(var a in n)t[a]=n[a];return t},insertBefore:function(t,e,n,a){var r=(a=a||C.languages)[t],i={};for(var s in r)if(r.hasOwnProperty(s)){if(s==e)for(var o in n)n.hasOwnProperty(o)&&(i[o]=n[o]);n.hasOwnProperty(s)||(i[s]=r[s])}var l=a[t];return a[t]=i,C.languages.DFS(C.languages,function(e,n){n===l&&e!=t&&(this[e]=i)}),i},DFS:function l(e,n,t,a){a=a||{};var r=C.util.objId;for(var i in e)if(e.hasOwnProperty(i)){n.call(e,i,e[i],t||i);var s=e[i],o=C.util.type(s);"Object"!==o||a[r(s)]?"Array"!==o||a[r(s)]||(a[r(s)]=!0,l(s,n,i,a)):(a[r(s)]=!0,l(s,n,null,a))}}},plugins:{},highlightAll:function(e,n){C.highlightAllUnder(document,e,n)},highlightAllUnder:function(e,n,t){var a={callback:t,container:e,selector:'code[class*="language-"], [class*="language-"] code, code[class*="lang-"], [class*="lang-"] code'};C.hooks.run("before-highlightall",a),a.elements=Array.prototype.slice.apply(a.container.querySelectorAll(a.selector)),C.hooks.run("before-all-elements-highlight",a);for(var r,i=0;r=a.elements[i++];)C.highlightElement(r,!0===n,a.callback)},highlightElement:function(e,n,t){var a=C.util.getLanguage(e),r=C.languages[a];e.className=e.className.replace(c,"").replace(/\s+/g," ")+" language-"+a;var i=e.parentNode;i&&"pre"===i.nodeName.toLowerCase()&&(i.className=i.className.replace(c,"").replace(/\s+/g," ")+" language-"+a);var s={element:e,language:a,grammar:r,code:e.textContent};function o(e){s.highlightedCode=e,C.hooks.run("before-insert",s),s.element.innerHTML=s.highlightedCode,C.hooks.run("after-highlight",s),C.hooks.run("complete",s),t&&t.call(s.element)}if(C.hooks.run("before-sanity-check",s),!s.code)return C.hooks.run("complete",s),void(t&&t.call(s.element));if(C.hooks.run("before-highlight",s),s.grammar)if(n&&u.Worker){var l=new Worker(C.filename);l.onmessage=function(e){o(e.data)},l.postMessage(JSON.stringify({language:s.language,code:s.code,immediateClose:!0}))}else o(C.highlight(s.code,s.grammar,s.language));else o(C.util.encode(s.code))},highlight:function(e,n,t){var a={code:e,grammar:n,language:t};return C.hooks.run("before-tokenize",a),a.tokens=C.tokenize(a.code,a.grammar),C.hooks.run("after-tokenize",a),O.stringify(C.util.encode(a.tokens),a.language)},matchGrammar:function(e,n,t,a,r,i,s){for(var o in t)if(t.hasOwnProperty(o)&&t[o]){var l=t[o];l=Array.isArray(l)?l:[l];for(var u=0;u<l.length;++u){if(s&&s==o+","+u)return;var c=l[u],d=c.inside,p=!!c.lookbehind,g=!!c.greedy,m=0,f=c.alias;if(g&&!c.pattern.global){var h=c.pattern.toString().match(/[imsuy]*$/)[0];c.pattern=RegExp(c.pattern.source,h+"g")}c=c.pattern||c;for(var b=a,y=r;b<n.length;y+=n[b].length,++b){var w=n[b];if(n.length>e.length)return;if(!(w instanceof O)){if(g&&b!=n.length-1){if(c.lastIndex=y,!(P=c.exec(e)))break;for(var v=P.index+(p&&P[1]?P[1].length:0),k=P.index+P[0].length,x=b,_=y,F=n.length;x<F&&(_<k||!n[x].type&&!n[x-1].greedy);++x)(_+=n[x].length)<=v&&(++b,y=_);if(n[b]instanceof O)continue;A=x-b,w=e.slice(y,_),P.index-=y}else{c.lastIndex=0;var P=c.exec(w),A=1}if(P){p&&(m=P[1]?P[1].length:0);k=(v=P.index+m)+(P=P[0].slice(m)).length;var S=w.slice(0,v),j=w.slice(k),$=[b,A];S&&(++b,y+=S.length,$.push(S));var E=new O(o,d?C.tokenize(P,d):P,f,P,g);if($.push(E),j&&$.push(j),Array.prototype.splice.apply(n,$),1!=A&&C.matchGrammar(e,n,t,b,y,!0,o+","+u),i)break}else if(i)break}}}}},tokenize:function(e,n){var t=[e],a=n.rest;if(a){for(var r in a)n[r]=a[r];delete n.rest}return C.matchGrammar(e,t,n,0,0,!1),t},hooks:{all:{},add:function(e,n){var t=C.hooks.all;t[e]=t[e]||[],t[e].push(n)},run:function(e,n){var t=C.hooks.all[e];if(t&&t.length)for(var a,r=0;a=t[r++];)a(n)}},Token:O};function O(e,n,t,a,r){this.type=e,this.content=n,this.alias=t,this.length=0|(a||"").length,this.greedy=!!r}if(u.Prism=C,O.stringify=function(e,n){if("string"==typeof e)return e;if(Array.isArray(e))return e.map(function(e){return O.stringify(e,n)}).join("");var t={type:e.type,content:O.stringify(e.content,n),tag:"span",classes:["token",e.type],attributes:{},language:n};if(e.alias){var a=Array.isArray(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(t.classes,a)}C.hooks.run("wrap",t);var r=Object.keys(t.attributes).map(function(e){return e+'="'+(t.attributes[e]||"").replace(/"/g,"&quot;")+'"'}).join(" ");return"<"+t.tag+' class="'+t.classes.join(" ")+'"'+(r?" "+r:"")+">"+t.content+"</"+t.tag+">"},!u.document)return u.addEventListener&&(C.disableWorkerMessageHandler||u.addEventListener("message",function(e){var n=JSON.parse(e.data),t=n.language,a=n.code,r=n.immediateClose;u.postMessage(C.highlight(a,C.languages[t],t)),r&&u.close()},!1)),C;var e=C.util.currentScript();if(e&&(C.filename=e.src,e.hasAttribute("data-manual")&&(C.manual=!0)),!C.manual){var t=function(){C.manual||C.highlightAll()},a=document.readyState;"loading"===a||"interactive"===a&&e&&e.defer?document.addEventListener("DOMContentLoaded",t):window.requestAnimationFrame?window.requestAnimationFrame(t):window.setTimeout(t,16)}return C}("undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{});void 0!==t&&t.exports&&(t.exports=n),void 0!==e&&(e.Prism=n)}).call(this,void 0!==y?y:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],4:[function(e,n,t){Prism.languages.cpp=Prism.languages.extend("c",{"class-name":{pattern:/(\b(?:class|enum|struct)\s+)\w+/,lookbehind:!0},keyword:/\b(?:alignas|alignof|asm|auto|bool|break|case|catch|char|char16_t|char32_t|class|compl|const|constexpr|const_cast|continue|decltype|default|delete|do|double|dynamic_cast|else|enum|explicit|export|extern|float|for|friend|goto|if|inline|int|int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|long|mutable|namespace|new|noexcept|nullptr|operator|private|protected|public|register|reinterpret_cast|return|short|signed|sizeof|static|static_assert|static_cast|struct|switch|template|this|thread_local|throw|try|typedef|typeid|typename|union|unsigned|using|virtual|void|volatile|wchar_t|while)\b/,number:{pattern:/(?:\b0b[01']+|\b0x(?:[\da-f']+\.?[\da-f']*|\.[\da-f']+)(?:p[+-]?[\d']+)?|(?:\b[\d']+\.?[\d']*|\B\.[\d']+)(?:e[+-]?[\d']+)?)[ful]*/i,greedy:!0},operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?|\b(?:and|and_eq|bitand|bitor|not|not_eq|or|or_eq|xor|xor_eq)\b/,"boolean":/\b(?:true|false)\b/}),Prism.languages.insertBefore("cpp","string",{"raw-string":{pattern:/R"([^()\\ ]{0,16})\([\s\S]*?\)\1"/,alias:"string",greedy:!0}})},{}],5:[function(e,n,t){Prism.languages.csharp=Prism.languages.extend("clike",{keyword:/\b(?:abstract|add|alias|as|ascending|async|await|base|bool|break|byte|case|catch|char|checked|class|const|continue|decimal|default|delegate|descending|do|double|dynamic|else|enum|event|explicit|extern|false|finally|fixed|float|for|foreach|from|get|global|goto|group|if|implicit|in|int|interface|internal|into|is|join|let|lock|long|namespace|new|null|object|operator|orderby|out|override|params|partial|private|protected|public|readonly|ref|remove|return|sbyte|sealed|select|set|short|sizeof|stackalloc|static|string|struct|switch|this|throw|true|try|typeof|uint|ulong|unchecked|unsafe|ushort|using|value|var|virtual|void|volatile|where|while|yield)\b/,string:[{pattern:/@("|')(?:\1\1|\\[\s\S]|(?!\1)[^\\])*\1/,greedy:!0},{pattern:/("|')(?:\\.|(?!\1)[^\\\r\n])*?\1/,greedy:!0}],"class-name":[{pattern:/\b[A-Z]\w*(?:\.\w+)*\b(?=\s+\w+)/,inside:{punctuation:/\./}},{pattern:/(\[)[A-Z]\w*(?:\.\w+)*\b/,lookbehind:!0,inside:{punctuation:/\./}},{pattern:/(\b(?:class|interface)\s+[A-Z]\w*(?:\.\w+)*\s*:\s*)[A-Z]\w*(?:\.\w+)*\b/,lookbehind:!0,inside:{punctuation:/\./}},{pattern:/((?:\b(?:class|interface|new)\s+)|(?:catch\s+\())[A-Z]\w*(?:\.\w+)*\b/,lookbehind:!0,inside:{punctuation:/\./}}],number:/\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)f?/i,operator:/>>=?|<<=?|[-=]>|([-+&|?])\1|~|[-+*/%&|^!=<>]=?/,punctuation:/\?\.?|::|[{}[\];(),.:]/}),Prism.languages.insertBefore("csharp","class-name",{"generic-method":{pattern:/\w+\s*<[^>\r\n]+?>\s*(?=\()/,inside:{"function":/^\w+/,"class-name":{pattern:/\b[A-Z]\w*(?:\.\w+)*\b/,inside:{punctuation:/\./}},keyword:Prism.languages.csharp.keyword,punctuation:/[<>(),.:]/}},preprocessor:{pattern:/(^\s*)#.*/m,lookbehind:!0,alias:"property",inside:{directive:{pattern:/(\s*#)\b(?:define|elif|else|endif|endregion|error|if|line|pragma|region|undef|warning)\b/,lookbehind:!0,alias:"keyword"}}}}),Prism.languages.dotnet=Prism.languages.cs=Prism.languages.csharp},{}],6:[function(e,n,t){!function(e){var n=/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/;e.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-]+[\s\S]*?(?:;|(?=\s*\{))/,inside:{rule:/@[\w-]+/}},url:{pattern:RegExp("url\\((?:"+n.source+"|[^\n\r()]*)\\)","i"),inside:{"function":/^url/i,punctuation:/^\(|\)$/}},selector:RegExp("[^{}\\s](?:[^{};\"']|"+n.source+")*?(?=\\s*\\{)"),string:{pattern:n,greedy:!0},property:/[-_a-z\xA0-\uFFFF][-\w\xA0-\uFFFF]*(?=\s*:)/i,important:/!important\b/i,"function":/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:,]/},e.languages.css.atrule.inside.rest=e.languages.css;var t=e.languages.markup;t&&(t.tag.addInlined("style","css"),e.languages.insertBefore("inside","attr-value",{"style-attr":{pattern:/\s*style=("|')(?:\\[\s\S]|(?!\1)[^\\])*\1/i,inside:{"attr-name":{pattern:/^\s*style/i,inside:t.tag.inside},punctuation:/^\s*=\s*['"]|['"]\s*$/,"attr-value":{pattern:/.+/i,inside:e.languages.css}},alias:"language-css"}},t.tag))}(Prism)},{}],7:[function(e,n,t){var a,r,i;a=Prism,r=/\b(?:abstract|assert|boolean|break|byte|case|catch|char|class|const|continue|default|do|double|else|enum|exports|extends|final|finally|float|for|goto|if|implements|import|instanceof|int|interface|long|module|native|new|null|open|opens|package|private|protected|provides|public|requires|return|short|static|strictfp|super|switch|synchronized|this|throw|throws|to|transient|transitive|try|uses|var|void|volatile|while|with|yield)\b/,i=/\b[A-Z](?:\w*[a-z]\w*)?\b/,a.languages.java=a.languages.extend("clike",{"class-name":[i,/\b[A-Z]\w*(?=\s+\w+\s*[;,=())])/],keyword:r,"function":[a.languages.clike["function"],{pattern:/(\:\:)[a-z_]\w*/,lookbehind:!0}],number:/\b0b[01][01_]*L?\b|\b0x[\da-f_]*\.?[\da-f_p+-]+\b|(?:\b\d[\d_]*\.?[\d_]*|\B\.\d[\d_]*)(?:e[+-]?\d[\d_]*)?[dfl]?/i,operator:{pattern:/(^|[^.])(?:<<=?|>>>?=?|->|--|\+\+|&&|\|\||::|[?:~]|[-+*/%&|^!=<>]=?)/m,lookbehind:!0}}),a.languages.insertBefore("java","string",{"triple-quoted-string":{pattern:/"""[ \t]*[\r\n](?:(?:"|"")?(?:\\.|[^"\\]))*"""/,greedy:!0,alias:"string"}}),a.languages.insertBefore("java","class-name",{annotation:{alias:"punctuation",pattern:/(^|[^.])@\w+/,lookbehind:!0},namespace:{pattern:/(\b(?:exports|import(?:\s+static)?|module|open|opens|package|provides|requires|to|transitive|uses|with)\s+)[a-z]\w*(?:\.[a-z]\w*)+/,lookbehind:!0,inside:{punctuation:/\./}},generics:{pattern:/<(?:[\w\s,.&?]|<(?:[\w\s,.&?]|<(?:[\w\s,.&?]|<[\w\s,.&?]*>)*>)*>)*>/,inside:{"class-name":i,keyword:r,punctuation:/[<>(),.:]/,operator:/[?&|]/}}})},{}],8:[function(e,n,t){Prism.languages.javascript=Prism.languages.extend("clike",{"class-name":[Prism.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])[_$A-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\.(?:prototype|constructor))/,lookbehind:!0}],keyword:[{pattern:/((?:^|})\s*)(?:catch|finally)\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],number:/\b(?:(?:0[xX](?:[\dA-Fa-f](?:_[\dA-Fa-f])?)+|0[bB](?:[01](?:_[01])?)+|0[oO](?:[0-7](?:_[0-7])?)+)n?|(?:\d(?:_\d)?)+n|NaN|Infinity)\b|(?:\b(?:\d(?:_\d)?)+\.?(?:\d(?:_\d)?)*|\B\.(?:\d(?:_\d)?)+)(?:[Ee][+-]?(?:\d(?:_\d)?)+)?/,"function":/#?[_$a-zA-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,operator:/--|\+\+|\*\*=?|=>|&&|\|\||[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?[.?]?|[~:]/}),Prism.languages.javascript["class-name"][0].pattern=/(\b(?:class|interface|extends|implements|instanceof|new)\s+)[\w.\\]+/,Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s])\s*)\/(?:\[(?:[^\]\\\r\n]|\\.)*]|\\.|[^/\\\[\r\n])+\/[gimyus]{0,6}(?=(?:\s|\/\*[\s\S]*?\*\/)*(?:$|[\r\n,.;:})\]]|\/\/))/,lookbehind:!0,greedy:!0},"function-variable":{pattern:/#?[_$a-zA-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|[_$a-zA-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+[_$A-Za-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*)?\s*\(\s*)(?!\s)(?:[^()]|\([^()]*\))+?(?=\s*\))/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*=>)/i,inside:Prism.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()]|\([^()]*\))+?(?=\s*\)\s*=>)/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:[_$A-Za-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*\s*)\(\s*)(?!\s)(?:[^()]|\([^()]*\))+?(?=\s*\)\s*\{)/,lookbehind:!0,inside:Prism.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),Prism.languages.insertBefore("javascript","string",{"template-string":{pattern:/`(?:\\[\s\S]|\${(?:[^{}]|{(?:[^{}]|{[^}]*})*})+}|(?!\${)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\${(?:[^{}]|{(?:[^{}]|{[^}]*})*})+}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\${|}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}}}),Prism.languages.markup&&Prism.languages.markup.tag.addInlined("script","javascript"),Prism.languages.js=Prism.languages.javascript},{}],9:[function(e,n,t){function b(e,n){return"___"+e.toUpperCase()+n+"___"}var y;y=Prism,Object.defineProperties(y.languages["markup-templating"]={},{buildPlaceholders:{value:function(a,r,e,i){if(a.language===r){var s=a.tokenStack=[];a.code=a.code.replace(e,function(e){if("function"==typeof i&&!i(e))return e;for(var n,t=s.length;-1!==a.code.indexOf(n=b(r,t));)++t;return s[t]=e,n}),a.grammar=y.languages.markup}}},tokenizePlaceholders:{value:function(p,g){if(p.language===g&&p.tokenStack){p.grammar=y.languages[g];var m=0,f=Object.keys(p.tokenStack);!function h(e){for(var n=0;n<e.length&&!(m>=f.length);n++){var t=e[n];if("string"==typeof t||t.content&&"string"==typeof t.content){var a=f[m],r=p.tokenStack[a],i="string"==typeof t?t:t.content,s=b(g,a),o=i.indexOf(s);if(-1<o){++m;var l=i.substring(0,o),u=new y.Token(g,y.tokenize(r,p.grammar),"language-"+g,r),c=i.substring(o+s.length),d=[];l&&d.push.apply(d,h([l])),d.push(u),c&&d.push.apply(d,h([c])),"string"==typeof t?e.splice.apply(e,[n,1].concat(d)):t.content=d}}else t.content&&h(t.content)}return e}(p.tokens)}}}})},{}],10:[function(e,n,t){Prism.languages.markup={comment:/<!--[\s\S]*?-->/,prolog:/<\?[\s\S]+?\?>/,doctype:{pattern:/<!DOCTYPE(?:[^>"'[\]]|"[^"]*"|'[^']*')+(?:\[(?:(?!<!--)[^"'\]]|"[^"]*"|'[^']*'|<!--[\s\S]*?-->)*\]\s*)?>/i,greedy:!0},cdata:/<!\[CDATA\[[\s\S]*?]]>/i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s(?:\s*[^\s>\/=]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))|(?=[\s/>])))+)?\s*\/?>/i,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/i,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/i,inside:{punctuation:[/^=/,{pattern:/^(\s*)["']|["']$/,lookbehind:!0}]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:/&#?[\da-z]{1,8};/i},Prism.languages.markup.tag.inside["attr-value"].inside.entity=Prism.languages.markup.entity,Prism.hooks.add("wrap",function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&amp;/,"&"))}),Object.defineProperty(Prism.languages.markup.tag,"addInlined",{value:function(e,n){var t={};t["language-"+n]={pattern:/(^<!\[CDATA\[)[\s\S]+?(?=\]\]>$)/i,lookbehind:!0,inside:Prism.languages[n]},t.cdata=/^<!\[CDATA\[|\]\]>$/i;var a={"included-cdata":{pattern:/<!\[CDATA\[[\s\S]*?\]\]>/i,inside:t}};a["language-"+n]={pattern:/[\s\S]+/,inside:Prism.languages[n]};var r={};r[e]={pattern:RegExp(/(<__[\s\S]*?>)(?:<!\[CDATA\[[\s\S]*?\]\]>\s*|[\s\S])*?(?=<\/__>)/.source.replace(/__/g,e),"i"),lookbehind:!0,greedy:!0,inside:a},Prism.languages.insertBefore("markup","cdata",r)}}),Prism.languages.xml=Prism.languages.extend("markup",{}),Prism.languages.html=Prism.languages.markup,Prism.languages.mathml=Prism.languages.markup,Prism.languages.svg=Prism.languages.markup},{}],11:[function(e,n,t){!function(n){n.languages.php=n.languages.extend("clike",{keyword:/\b(?:__halt_compiler|abstract|and|array|as|break|callable|case|catch|class|clone|const|continue|declare|default|die|do|echo|else|elseif|empty|enddeclare|endfor|endforeach|endif|endswitch|endwhile|eval|exit|extends|final|finally|for|foreach|function|global|goto|if|implements|include|include_once|instanceof|insteadof|interface|isset|list|namespace|new|or|parent|print|private|protected|public|require|require_once|return|static|switch|throw|trait|try|unset|use|var|while|xor|yield)\b/i,"boolean":{pattern:/\b(?:false|true)\b/i,alias:"constant"},constant:[/\b[A-Z_][A-Z0-9_]*\b/,/\b(?:null)\b/i],comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0}}),n.languages.insertBefore("php","string",{"shell-comment":{pattern:/(^|[^\\])#.*/,lookbehind:!0,alias:"comment"}}),n.languages.insertBefore("php","comment",{delimiter:{pattern:/\?>$|^<\?(?:php(?=\s)|=)?/i,alias:"important"}}),n.languages.insertBefore("php","keyword",{variable:/\$+(?:\w+\b|(?={))/i,"package":{pattern:/(\\|namespace\s+|use\s+)[\w\\]+/,lookbehind:!0,inside:{punctuation:/\\/}}}),n.languages.insertBefore("php","operator",{property:{pattern:/(->)[\w]+/,lookbehind:!0}});var e={pattern:/{\$(?:{(?:{[^{}]+}|[^{}]+)}|[^{}])+}|(^|[^\\{])\$+(?:\w+(?:\[.+?]|->\w+)*)/,lookbehind:!0,inside:n.languages.php};n.languages.insertBefore("php","string",{"nowdoc-string":{pattern:/<<<'([^']+)'(?:\r\n?|\n)(?:.*(?:\r\n?|\n))*?\1;/,greedy:!0,alias:"string",inside:{delimiter:{pattern:/^<<<'[^']+'|[a-z_]\w*;$/i,alias:"symbol",inside:{punctuation:/^<<<'?|[';]$/}}}},"heredoc-string":{pattern:/<<<(?:"([^"]+)"(?:\r\n?|\n)(?:.*(?:\r\n?|\n))*?\1;|([a-z_]\w*)(?:\r\n?|\n)(?:.*(?:\r\n?|\n))*?\2;)/i,greedy:!0,alias:"string",inside:{delimiter:{pattern:/^<<<(?:"[^"]+"|[a-z_]\w*)|[a-z_]\w*;$/i,alias:"symbol",inside:{punctuation:/^<<<"?|[";]$/}},interpolation:e}},"single-quoted-string":{pattern:/'(?:\\[\s\S]|[^\\'])*'/,greedy:!0,alias:"string"},"double-quoted-string":{pattern:/"(?:\\[\s\S]|[^\\"])*"/,greedy:!0,alias:"string",inside:{interpolation:e}}}),delete n.languages.php.string,n.hooks.add("before-tokenize",function(e){if(/<\?/.test(e.code)){n.languages["markup-templating"].buildPlaceholders(e,"php",/<\?(?:[^"'/#]|\/(?![*/])|("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|(?:\/\/|#)(?:[^?\n\r]|\?(?!>))*(?=$|\?>|[\r\n])|\/\*[\s\S]*?(?:\*\/|$))*?(?:\?>|$)/gi)}}),n.hooks.add("after-tokenize",function(e){n.languages["markup-templating"].tokenizePlaceholders(e,"php")})}(Prism)},{}],12:[function(e,n,t){Prism.languages.python={comment:{pattern:/(^|[^\\])#.*/,lookbehind:!0},"string-interpolation":{pattern:/(?:f|rf|fr)(?:("""|''')[\s\S]+?\1|("|')(?:\\.|(?!\2)[^\\\r\n])*\2)/i,greedy:!0,inside:{interpolation:{pattern:/((?:^|[^{])(?:{{)*){(?!{)(?:[^{}]|{(?!{)(?:[^{}]|{(?!{)(?:[^{}])+})+})+}/,lookbehind:!0,inside:{"format-spec":{pattern:/(:)[^:(){}]+(?=}$)/,lookbehind:!0},"conversion-option":{pattern:/![sra](?=[:}]$)/,alias:"punctuation"},rest:null}},string:/[\s\S]+/}},"triple-quoted-string":{pattern:/(?:[rub]|rb|br)?("""|''')[\s\S]+?\1/i,greedy:!0,alias:"string"},string:{pattern:/(?:[rub]|rb|br)?("|')(?:\\.|(?!\1)[^\\\r\n])*\1/i,greedy:!0},"function":{pattern:/((?:^|\s)def[ \t]+)[a-zA-Z_]\w*(?=\s*\()/g,lookbehind:!0},"class-name":{pattern:/(\bclass\s+)\w+/i,lookbehind:!0},decorator:{pattern:/(^\s*)@\w+(?:\.\w+)*/im,lookbehind:!0,alias:["annotation","punctuation"],inside:{punctuation:/\./}},keyword:/\b(?:and|as|assert|async|await|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\b/,builtin:/\b(?:__import__|abs|all|any|apply|ascii|basestring|bin|bool|buffer|bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|divmod|enumerate|eval|execfile|file|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|intern|isinstance|issubclass|iter|len|list|locals|long|map|max|memoryview|min|next|object|oct|open|ord|pow|property|range|raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|vars|xrange|zip)\b/,"boolean":/\b(?:True|False|None)\b/,number:/(?:\b(?=\d)|\B(?=\.))(?:0[bo])?(?:(?:\d|0x[\da-f])[\da-f]*\.?\d*|\.\d+)(?:e[+-]?\d+)?j?\b/i,operator:/[-+%=]=?|!=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]/,punctuation:/[{}[\];(),.:]/},Prism.languages.python["string-interpolation"].inside.interpolation.inside.rest=Prism.languages.python,Prism.languages.py=Prism.languages.python},{}],13:[function(e,n,t){!function(e){e.languages.ruby=e.languages.extend("clike",{comment:[/#.*/,{pattern:/^=begin\s[\s\S]*?^=end/m,greedy:!0}],"class-name":{pattern:/(\b(?:class)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:alias|and|BEGIN|begin|break|case|class|def|define_method|defined|do|each|else|elsif|END|end|ensure|extend|for|if|in|include|module|new|next|nil|not|or|prepend|protected|private|public|raise|redo|require|rescue|retry|return|self|super|then|throw|undef|unless|until|when|while|yield)\b/});var n={pattern:/#\{[^}]+\}/,inside:{delimiter:{pattern:/^#\{|\}$/,alias:"tag"},rest:e.languages.ruby}};delete e.languages.ruby["function"],e.languages.insertBefore("ruby","keyword",{regex:[{pattern:/%r([^a-zA-Z0-9\s{(\[<])(?:(?!\1)[^\\]|\\[\s\S])*\1[gim]{0,3}/,greedy:!0,inside:{interpolation:n}},{pattern:/%r\((?:[^()\\]|\\[\s\S])*\)[gim]{0,3}/,greedy:!0,inside:{interpolation:n}},{pattern:/%r\{(?:[^#{}\\]|#(?:\{[^}]+\})?|\\[\s\S])*\}[gim]{0,3}/,greedy:!0,inside:{interpolation:n}},{pattern:/%r\[(?:[^\[\]\\]|\\[\s\S])*\][gim]{0,3}/,greedy:!0,inside:{interpolation:n}},{pattern:/%r<(?:[^<>\\]|\\[\s\S])*>[gim]{0,3}/,greedy:!0,inside:{interpolation:n}},{pattern:/(^|[^/])\/(?!\/)(?:\[.+?]|\\.|[^/\\\r\n])+\/[gim]{0,3}(?=\s*(?:$|[\r\n,.;})]))/,lookbehind:!0,greedy:!0}],variable:/[@$]+[a-zA-Z_]\w*(?:[?!]|\b)/,symbol:{pattern:/(^|[^:]):[a-zA-Z_]\w*(?:[?!]|\b)/,lookbehind:!0},"method-definition":{pattern:/(\bdef\s+)[\w.]+/,lookbehind:!0,inside:{"function":/\w+$/,rest:e.languages.ruby}}}),e.languages.insertBefore("ruby","number",{builtin:/\b(?:Array|Bignum|Binding|Class|Continuation|Dir|Exception|FalseClass|File|Stat|Fixnum|Float|Hash|Integer|IO|MatchData|Method|Module|NilClass|Numeric|Object|Proc|Range|Regexp|String|Struct|TMS|Symbol|ThreadGroup|Thread|Time|TrueClass)\b/,constant:/\b[A-Z]\w*(?:[?!]|\b)/}),e.languages.ruby.string=[{pattern:/%[qQiIwWxs]?([^a-zA-Z0-9\s{(\[<])(?:(?!\1)[^\\]|\\[\s\S])*\1/,greedy:!0,inside:{interpolation:n}},{pattern:/%[qQiIwWxs]?\((?:[^()\\]|\\[\s\S])*\)/,greedy:!0,inside:{interpolation:n}},{pattern:/%[qQiIwWxs]?\{(?:[^#{}\\]|#(?:\{[^}]+\})?|\\[\s\S])*\}/,greedy:!0,inside:{interpolation:n}},{pattern:/%[qQiIwWxs]?\[(?:[^\[\]\\]|\\[\s\S])*\]/,greedy:!0,inside:{interpolation:n}},{pattern:/%[qQiIwWxs]?<(?:[^<>\\]|\\[\s\S])*>/,greedy:!0,inside:{interpolation:n}},{pattern:/("|')(?:#\{[^}]+\}|\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0,inside:{interpolation:n}}],e.languages.rb=e.languages.ruby}(Prism)},{}],14:[function(e,n,t){var a=e("prismjs/components/prism-core");e("prismjs/components/prism-clike"),e("prismjs/components/prism-markup-templating"),e("prismjs/components/prism-c"),e("prismjs/components/prism-cpp"),e("prismjs/components/prism-csharp"),e("prismjs/components/prism-css"),e("prismjs/components/prism-java"),e("prismjs/components/prism-javascript"),e("prismjs/components/prism-markup"),e("prismjs/components/prism-php"),e("prismjs/components/prism-python"),e("prismjs/components/prism-ruby"),n.exports={boltExport:a}},{"prismjs/components/prism-c":1,"prismjs/components/prism-clike":2,"prismjs/components/prism-core":3,"prismjs/components/prism-cpp":4,"prismjs/components/prism-csharp":5,"prismjs/components/prism-css":6,"prismjs/components/prism-java":7,"prismjs/components/prism-javascript":8,"prismjs/components/prism-markup":10,"prismjs/components/prism-markup-templating":9,"prismjs/components/prism-php":11,"prismjs/components/prism-python":12,"prismjs/components/prism-ruby":13}]},{},[14])(14)});var r=window.Prism;window.Prism=e}(undefined,h,b,undefined);var w=b.exports.boltExport,v=function(e){return f.Prism&&e.getParam("codesample_global_prismjs",!1,"boolean")?f.Prism:w},k=function(e){var n=e.selection?e.selection.getNode():null;return g(n)?d.some(n):d.none()},x=function(i){var e,t,n=i.getParam("codesample_languages")||[{text:"HTML/XML",value:"markup"},{text:"JavaScript",value:"javascript"},{text:"CSS",value:"css"},{text:"PHP",value:"php"},{text:"Ruby",value:"ruby"},{text:"Python",value:"python"},{text:"Java",value:"java"},{text:"C",value:"c"},{text:"C#",value:"csharp"},{text:"C++",value:"cpp"}],a=(0===(e=n).length?d.none():d.some(e[0])).fold(function(){return""},function(e){return e.value}),r=(t=a,k(i).fold(function(){return t},function(e){var n=e.className.match(/language-(\w+)/);return n?n[1]:t})),s=k(i).fold(function(){return""},function(e){return e.textContent});i.windowManager.open({title:"Insert/Edit Code Sample",size:"large",body:{type:"panel",items:[{type:"selectbox",name:"language",label:"Language",items:n},{type:"textarea",name:"code",label:"Code view"}]},buttons:[{type:"cancel",name:"cancel",text:"Cancel"},{type:"submit",name:"save",text:"Save",primary:!0}],initialData:{language:r,code:s},onSubmit:function(e){var n,t,a,r=e.getData();n=i,t=r.language,a=r.code,n.undoManager.transact(function(){var e=k(n);return a=p.DOM.encode(a),e.fold(function(){n.insertContent('<pre id="__new" class="language-'+t+'">'+a+"</pre>"),n.selection.select(n.$("#__new").removeAttr("id")[0])},function(e){n.dom.setAttrib(e,"class","language-"+t),e.innerHTML=a,v(n).highlightElement(e),n.selection.select(e)})}),e.close()}})},_=function(a){a.ui.registry.addToggleButton("codesample",{icon:"code-sample",tooltip:"Insert/edit code sample",onAction:function(){return x(a)},onSetup:function(t){var e=function(){var e,n;t.setActive((n=(e=a).selection.getStart(),e.dom.is(n,'pre[class*="language-"]')))};return a.on("NodeChange",e),function(){return a.off("NodeChange",e)}}}),a.ui.registry.addMenuItem("codesample",{text:"Code sample...",icon:"code-sample",onAction:function(){return x(a)}})};!function F(){r.add("codesample",function(n){var t,r,a;r=(t=n).$,t.on("PreProcess",function(e){r("pre[contenteditable=false]",e.node).filter(m(g)).each(function(e,n){var t=r(n),a=n.textContent;t.attr("class",r.trim(t.attr("class"))),t.removeAttr("contentEditable"),t.empty().append(r("<code></code>").each(function(){this.textContent=a}))})}),t.on("SetContent",function(){var e=r("pre").filter(m(g)).filter(function(e,n){return"false"!==n.contentEditable});e.length&&t.undoManager.transact(function(){e.each(function(e,n){r(n).find("br").each(function(e,n){n.parentNode.replaceChild(t.getDoc().createTextNode("\n"),n)}),n.contentEditable="false",n.innerHTML=t.dom.encode(n.textContent),v(t).highlightElement(n),n.className=r.trim(n.className)})})}),_(n),(a=n).addCommand("codesample",function(){var e=a.selection.getNode();a.selection.isCollapsed()||g(e)?x(a):a.formatter.toggle("code")}),n.on("dblclick",function(e){g(e.target)&&x(n)})})}()}(window);
PypiClean
/Mesa_Adapted-0.8.7.3-py3-none-any.whl/mesa/visualization/modules/HexGridVisualization.py
from collections import defaultdict from mesa.visualization.ModularVisualization import VisualizationElement class CanvasHexGrid(VisualizationElement): """ A CanvasHexGrid object functions similarly to a CanvasGrid object. It takes a portrayal dictionary and talks to HexDraw.js to draw that shape. A portrayal as a dictionary with the following structure: "x", "y": Coordinates for the cell in which the object is placed. "Shape": Can be either "hex" or "circle" "r": The radius, defined as a fraction of cell size. r=1 will fill the entire cell. "Color": The color to draw the shape in; needs to be a valid HTML color, e.g."Red" or "#AA08F8" "Filled": either "true" or "false", and determines whether the shape is filled or not. "Layer": Layer number of 0 or above; higher-numbered layers are drawn above lower-numbered layers. "text": The text to be inscribed inside the Shape. Normally useful for showing the unique_id of the agent. "text_color": The color to draw the inscribed text. Should be given in conjunction of "text" property. Attributes: portrayal_method: Function which generates portrayals from objects, as described above. grid_height, grid_width: Size of the grid to visualize, in cells. canvas_height, canvas_width: Size, in pixels, of the grid visualization to draw on the client. template: "canvas_module.html" stores the module's HTML template. """ package_includes = ["HexDraw.js", "CanvasHexModule.js", "InteractionHandler.js"] portrayal_method = None # Portrayal function canvas_width = 500 canvas_height = 500 def __init__( self, portrayal_method, grid_width, grid_height, canvas_width=500, canvas_height=500, ): """ Instantiate a new CanvasGrid. Args: portrayal_method: function to convert each object on the grid to a portrayal, as described above. grid_width, grid_height: Size of the grid, in cells. canvas_height, canvas_width: Size of the canvas to draw in the client, in pixels. (default: 500x500) """ self.portrayal_method = portrayal_method self.grid_width = grid_width self.grid_height = grid_height self.canvas_width = canvas_width self.canvas_height = canvas_height new_element = "new CanvasHexModule({}, {}, {}, {})".format( self.canvas_width, self.canvas_height, self.grid_width, self.grid_height ) self.js_code = "elements.push(" + new_element + ");" def render(self, model): grid_state = defaultdict(list) for x in range(model.grid.width): for y in range(model.grid.height): cell_objects = model.grid.get_cell_list_contents([(x, y)]) for obj in cell_objects: portrayal = self.portrayal_method(obj) if portrayal: portrayal["x"] = x portrayal["y"] = y grid_state[portrayal["Layer"]].append(portrayal) return grid_state
PypiClean
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/place_group_digest_benchmark_ranking_response_py3.py
from msrest.serialization import Model class PlaceGroupDigestBenchmarkRankingResponse(Model): """PlaceGroupDigestBenchmarkRankingResponse. :param high_cost: The highest cost for this group :type high_cost: float :param low_cost: The lowest cost for this group :type low_cost: float :param average_cost: The average cost for this group :type average_cost: float :param median_cost: The median cost for this group :type median_cost: float :param high_use: The highest use for this group :type high_use: float :param low_use: The lowest use for this group :type low_use: float :param average_use: The average use for this group :type average_use: float :param median_use: The median use for this group :type median_use: float :param high_value: The highest benchmark value for this group :type high_value: float :param low_value: The lowest benchmark value for this group :type low_value: float :param median_value: The median benchmark value for this group :type median_value: float :param results: :type results: list[~energycap.sdk.models.PlaceGroupDigestBenchmarkRankingChild] :param place_group_id: :type place_group_id: int :param place_group_code: :type place_group_code: str :param place_group_info: :type place_group_info: str :param place_group_display: This is the user's preferred way of viewing this entity - could be code or info based on the master "data object view" setting in DB :type place_group_display: str :param benchmark_unit: This will provide the benchmark unit eg:MMBTU/ft² or $/day :type benchmark_unit: str :param benchmark_factor_unit: This will provide the unit for the benchmarking factor eg:ft² or day :type benchmark_factor_unit: str :param benchmark_value_unit: This will provide the unit for the benchmark value eg:$ for cost/day, MMBTU for annualized use/area :type benchmark_value_unit: str :param use_unit: :type use_unit: ~energycap.sdk.models.UnitChild :param cost_unit: :type cost_unit: ~energycap.sdk.models.UnitChild :param updated: The date and time the data was updated :type updated: datetime """ _attribute_map = { 'high_cost': {'key': 'highCost', 'type': 'float'}, 'low_cost': {'key': 'lowCost', 'type': 'float'}, 'average_cost': {'key': 'averageCost', 'type': 'float'}, 'median_cost': {'key': 'medianCost', 'type': 'float'}, 'high_use': {'key': 'highUse', 'type': 'float'}, 'low_use': {'key': 'lowUse', 'type': 'float'}, 'average_use': {'key': 'averageUse', 'type': 'float'}, 'median_use': {'key': 'medianUse', 'type': 'float'}, 'high_value': {'key': 'highValue', 'type': 'float'}, 'low_value': {'key': 'lowValue', 'type': 'float'}, 'median_value': {'key': 'medianValue', 'type': 'float'}, 'results': {'key': 'results', 'type': '[PlaceGroupDigestBenchmarkRankingChild]'}, 'place_group_id': {'key': 'placeGroupId', 'type': 'int'}, 'place_group_code': {'key': 'placeGroupCode', 'type': 'str'}, 'place_group_info': {'key': 'placeGroupInfo', 'type': 'str'}, 'place_group_display': {'key': 'placeGroupDisplay', 'type': 'str'}, 'benchmark_unit': {'key': 'benchmarkUnit', 'type': 'str'}, 'benchmark_factor_unit': {'key': 'benchmarkFactorUnit', 'type': 'str'}, 'benchmark_value_unit': {'key': 'benchmarkValueUnit', 'type': 'str'}, 'use_unit': {'key': 'useUnit', 'type': 'UnitChild'}, 'cost_unit': {'key': 'costUnit', 'type': 'UnitChild'}, 'updated': {'key': 'updated', 'type': 'iso-8601'}, } def __init__(self, *, high_cost: float=None, low_cost: float=None, average_cost: float=None, median_cost: float=None, high_use: float=None, low_use: float=None, average_use: float=None, median_use: float=None, high_value: float=None, low_value: float=None, median_value: float=None, results=None, place_group_id: int=None, place_group_code: str=None, place_group_info: str=None, place_group_display: str=None, benchmark_unit: str=None, benchmark_factor_unit: str=None, benchmark_value_unit: str=None, use_unit=None, cost_unit=None, updated=None, **kwargs) -> None: super(PlaceGroupDigestBenchmarkRankingResponse, self).__init__(**kwargs) self.high_cost = high_cost self.low_cost = low_cost self.average_cost = average_cost self.median_cost = median_cost self.high_use = high_use self.low_use = low_use self.average_use = average_use self.median_use = median_use self.high_value = high_value self.low_value = low_value self.median_value = median_value self.results = results self.place_group_id = place_group_id self.place_group_code = place_group_code self.place_group_info = place_group_info self.place_group_display = place_group_display self.benchmark_unit = benchmark_unit self.benchmark_factor_unit = benchmark_factor_unit self.benchmark_value_unit = benchmark_value_unit self.use_unit = use_unit self.cost_unit = cost_unit self.updated = updated
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/widget/DynamicTooltip.js.uncompressed.js
define("dojox/widget/DynamicTooltip", ["dijit","dojo","dojox","dojo/i18n!dijit/nls/loading","dojo/require!dijit/Tooltip"], function(dijit,dojo,dojox){ dojo.provide("dojox.widget.DynamicTooltip"); dojo.experimental("dojox.widget.DynamicTooltip"); dojo.require("dijit.Tooltip"); dojo.requireLocalization("dijit", "loading"); dojo.declare("dojox.widget.DynamicTooltip", dijit.Tooltip, { // summary: // Extention of dijit.Tooltip providing content set via XHR // request via href param // hasLoaded: Boolean // false if the contents are yet to be loaded from the HTTP request hasLoaded: false, // href: String // location from where to fetch the contents href: "", // label: String // contents to diplay in the tooltip. Initialized to a loading icon. label: "", // preventCache: Boolean // Cache content retreived externally preventCache: false, postMixInProperties: function(){ this.inherited(arguments); this._setLoadingLabel(); }, _setLoadingLabel: function(){ // summary: // Changes the tooltip label / contents to loading message, only if // there's an href param, otherwise acts as normal tooltip if(this.href){ this.label = dojo.i18n.getLocalization("dijit", "loading", this.lang).loadingState; } }, // MOW: this is a new widget, do we really need a deprecated stub? // setHref: function(/*String|Uri*/ href){ // // summary: // // Deprecated. Use set('href', ...) instead. // dojo.deprecated("dojox.widget.DynamicTooltip.setHref() is deprecated. Use set('href', ...) instead.", "", "2.0"); // return this.set("href", href); // }, _setHrefAttr: function(/*String|Uri*/ href){ // summary: // Hook so attr("href", ...) works. // description: // resets so next show loads new href // href: // url to the content you want to show, must be within the same domain as your mainpage this.href = href; this.hasLoaded = false; }, loadContent: function(node){ // summary: // Download contents of href via XHR and display // description: // 1. checks if content already loaded // 2. if not, sends XHR to download new data if(!this.hasLoaded && this.href){ this._setLoadingLabel(); this.hasLoaded = true; dojo.xhrGet({ url: this.href, handleAs: "text", tooltipWidget: this, load: function(response, ioArgs){ this.tooltipWidget.label = response; this.tooltipWidget.close(); this.tooltipWidget.open(node); }, preventCache: this.preventCache }); } }, refresh: function(){ // summary: // Allows re-download of contents of href and display // Useful with preventCache = true this.hasLoaded = false; }, open: function(/*DomNode*/ target){ // summary: // Display the tooltip; usually not called directly. target = target || (this._connectNodes && this._connectNodes[0]); if(!target){ return; } this.loadContent(target); this.inherited(arguments); } } ); });
PypiClean
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/packages/aiohttp/__init__.py
__version__ = '3.5.4' from typing import Tuple # noqa from . import hdrs from .client import ( BaseConnector, ClientConnectionError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, ClientError, ClientHttpProxyError, ClientOSError, ClientPayloadError, ClientProxyConnectionError, ClientResponse, ClientRequest, ClientResponseError, ClientSSLError, ClientSession, ClientTimeout, ClientWebSocketResponse, ContentTypeError, Fingerprint, InvalidURL, RequestInfo, ServerConnectionError, ServerDisconnectedError, ServerFingerprintMismatch, ServerTimeoutError, TCPConnector, UnixConnector, WSServerHandshakeError, request ) from .cookiejar import CookieJar, DummyCookieJar from .formdata import FormData from .helpers import BasicAuth, ChainMapProxy from .http import ( HttpVersion, HttpVersion10, HttpVersion11, WSMsgType, WSCloseCode, WSMessage, WebSocketError ) from .multipart import ( BadContentDispositionHeader, BadContentDispositionParam, BodyPartReader, MultipartReader, MultipartWriter, content_disposition_filename, parse_content_disposition ) from .payload import ( AsyncIterablePayload, BufferedReaderPayload, BytesIOPayload, BytesPayload, IOBasePayload, JsonPayload, PAYLOAD_REGISTRY, Payload, StringIOPayload, StringPayload, TextIOPayload, get_payload, payload_type ) from .payload_streamer import streamer from .resolver import AsyncResolver, DefaultResolver, ThreadedResolver from .signals import Signal from .streams import ( DataQueue, EMPTY_PAYLOAD, EofStream, FlowControlDataQueue, StreamReader ) from .tracing import ( TraceConfig, TraceConnectionCreateEndParams, TraceConnectionCreateStartParams, TraceConnectionQueuedEndParams, TraceConnectionQueuedStartParams, TraceConnectionReuseconnParams, TraceDnsCacheHitParams, TraceDnsCacheMissParams, TraceDnsResolveHostEndParams, TraceDnsResolveHostStartParams, TraceRequestChunkSentParams, TraceRequestEndParams, TraceRequestExceptionParams, TraceRequestRedirectParams, TraceRequestStartParams, TraceResponseChunkReceivedParams ) __all__ = ( 'hdrs', # client 'BaseConnector', 'ClientConnectionError', 'ClientConnectorCertificateError', 'ClientConnectorError', 'ClientConnectorSSLError', 'ClientError', 'ClientHttpProxyError', 'ClientOSError', 'ClientPayloadError', 'ClientProxyConnectionError', 'ClientResponse', 'ClientRequest', 'ClientResponseError', 'ClientSSLError', 'ClientSession', 'ClientTimeout', 'ClientWebSocketResponse', 'ContentTypeError', 'Fingerprint', 'InvalidURL', 'RequestInfo', 'ServerConnectionError', 'ServerDisconnectedError', 'ServerFingerprintMismatch', 'ServerTimeoutError', 'TCPConnector', 'UnixConnector', 'WSServerHandshakeError', 'request', # cookiejar 'CookieJar', 'DummyCookieJar', # formdata 'FormData', # helpers 'BasicAuth', 'ChainMapProxy', # http 'HttpVersion', 'HttpVersion10', 'HttpVersion11', 'WSMsgType', 'WSCloseCode', 'WSMessage', 'WebSocketError', # multipart 'BadContentDispositionHeader', 'BadContentDispositionParam', 'BodyPartReader', 'MultipartReader', 'MultipartWriter', 'content_disposition_filename', 'parse_content_disposition', # payload 'AsyncIterablePayload', 'BufferedReaderPayload', 'BytesIOPayload', 'BytesPayload', 'IOBasePayload', 'JsonPayload', 'PAYLOAD_REGISTRY', 'Payload', 'StringIOPayload', 'StringPayload', 'TextIOPayload', 'get_payload', 'payload_type', # payload_streamer 'streamer', # resolver 'AsyncResolver', 'DefaultResolver', 'ThreadedResolver', # signals 'Signal', 'DataQueue', 'EMPTY_PAYLOAD', 'EofStream', 'FlowControlDataQueue', 'StreamReader', # tracing 'TraceConfig', 'TraceConnectionCreateEndParams', 'TraceConnectionCreateStartParams', 'TraceConnectionQueuedEndParams', 'TraceConnectionQueuedStartParams', 'TraceConnectionReuseconnParams', 'TraceDnsCacheHitParams', 'TraceDnsCacheMissParams', 'TraceDnsResolveHostEndParams', 'TraceDnsResolveHostStartParams', 'TraceRequestChunkSentParams', 'TraceRequestEndParams', 'TraceRequestExceptionParams', 'TraceRequestRedirectParams', 'TraceRequestStartParams', 'TraceResponseChunkReceivedParams', ) # type: Tuple[str, ...] try: from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa __all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker') except ImportError: # pragma: no cover pass
PypiClean
/Layback-0.0.7.tar.gz/Layback-0.0.7/layback/archive.py
import re, os from selenium import webdriver from selenium.webdriver.chrome.options import Options import imageio try: import urllib.request as rq from urllib.parse import urlparse except ImportError: import urllib2 as rq from urlparse import urlparse class Archive(object): def __init__(self, url, download_path): self.url = url self.prefix = "http://" if self.url.startswith('http://') | self.url.startswith('https://') == False else "" self.download_path = download_path + self.url.strip('http://') + '/' def initialize(self): urls = self.obtain_momentos() self.save_screenshots(urls) self.save_as_gif() def obtain_momentos(self): r = rq.urlopen("https://web.archive.org/web/timemap/link/" + self.prefix + self.url) mementos = [] for line in r: mementos.append(re.search("(?P<url>https?://[^\s]+)", str(line)).group("url").replace(">;", "")) mementos = mementos[2:] return mementos def save_screenshots(self, urls): num = 0 options = Options() options.add_argument('--headless') options.add_argument('--no-sandbox') options.add_argument('--disable-gpu') driver = webdriver.Chrome(chrome_options=options) driver.set_window_size(1024, 768) if not os.path.exists(self.download_path): os.makedirs(self.download_path) for url in urls: img_name = self.download_path + str(num) + '.png' driver.get(url) driver.save_screenshot(img_name) num += 1 driver.quit() def save_as_gif(self): image_files = next(os.walk(self.download_path))[2] image_data = [] for image in image_files: if os.path.splitext(image)[1] == ".png": image_data.append(imageio.imread(self.download_path + image)) os.remove(self.download_path + image) imageio.mimsave(self.download_path + 'movie.gif', image_data) print("successfully saved gif to " + self.download_path + "movie.gif")
PypiClean
/LFake-18.9.0.tar.gz/LFake-18.9.0/lfake/providers/job/sk_SK/__init__.py
from .. import Provider as JobProvider class Provider(JobProvider): """Translated from Super class""" jobs = ( "Administrátor, umenie", "Administrátor, štátna služba", "Advokát", "Advokát pre ochranné známky", "Akademický knihovník", "Akupunkturista", "Analytický chemik", "Analytik finančného rizika", "Angličtina ako lektorka cudzieho jazyka", "Angličtina ako učiteľka druhého jazyka", "Animátor", "Arborista", "Archeológ", "Architekt", "Architektonický technológ", "Archivár", "Arteterapeut", "Asistent politika", "Astronóm", "Audiologický vedec", "Automobilový inžinier", "Autorizovaný likvidátor strát", "Autorizovaný účtovník", "Autorizovaný účtovník v oblasti verejných financií", "Bankár", "Banský inžinier", "Barista", "Biochemik, klinický", "Biomedicínsky inžinier", "Biomedicínsky vedec", "Bylinkár", "Bytový manažér / referent", "Charitatívny úradník", "Chemický inžinier", "Chemik, analytický", "Chiropraktik", "Chirurg", "Copywriter, reklama", "Cytogenetik", "Daňový poradca", "Detská sestra", "Detský psychoterapeut", "Diagnostický rádiograf", "Dietológ", "Dizajnér, fúkané sklo / vitráž", "Dizajnér, grafik", "Dizajnér, interiér / priestor", "Dizajnér, keramika / hrnčiarstvo", "Dizajnér, multimédiá", "Dizajnér, móda / oblečenie", "Dizajnér, nábytok", "Dizajnér, priemyselný / produkt", "Dizajnér, televízia / film", "Dizajnér, textil", "Dizajnér, výstava / výstava", "Dizajnér, šperky", "Docent", "Dodávateľ", "Dospelý poradenský pracovník", "Dozorca", "Dramatický terapeut", "Dôstojník obchodného námorníctva", "Dôstojník pre ochranu prírody", "Dôstojník pre výcvik a vzdelávanie ozbrojených síl", "Editor funkcie časopisu", "Ekológ", "Ekonóm", "Elektroinžinier", "Embryológ, klinický", "Energetický inžinier", "Energetický manažér", "Environmentálny manažér", "Ergonóm", "Farebný technológ", "Farmaceut Spoločenstva", "Farmakológ", "Filmový / video editor", "Financny kontrolor", "Finančný manažér", "Finančný obchodník", "Finančný plánovač", "Finančný poradca", "Finančný riaditeľ", "Firemná sekretárka", "Fotograf", "Fytoterapeut", "Fyzik zdravia", "Fyzik, lekár", "Fyziologický vedec", "Fyziológ cvičenia", "Fyzioterapeut", "Fúkač skla / dizajnér", "Genetik, molekulárny", "Geochemik", "Geodet minerálov", "Geodet poistného rizika", "Geofyzik / terénny seizmológ", "Geológ, strojárstvo", "Geológ", "Geovedec", "Grafický dizajnér", "Grafik", "Hasič", "Hematológ", "Herec", "Herpetológ", "Hlavný marketingový riaditeľ", "Homeopat", "Hotelový manažér", "Hudobník", "Hudobný lektor", "Hudobný terapeut", "Hutník", "Hydrogeológ", "Hydrografický geodet", "Hydrológ", "Hygienik práce", "IT konzultant", "Ilustrátor", "Imunológ", "Informačný úradník", "Investičný analytik", "Investičný bankár, funkčný", "Investičný bankár, podnikový", "Inšpektor / hodnotiteľ reklamácií", "Inšpektor historických budov / referent pamiatkovej starostlivosti", "Inšpektor plánovania a rozvoja", "Inšpektor zdravia a bezpečnosti", "Inžinier budov", "Inžinier elektroniky", "Inžinier kontroly a prístrojového vybavenia", "Inžinier poľnohospodárstva", "Inžinier pre automobilový priemysel", "Inžinier výrobných systémov", "Inžinier, baníctvo", "Inžinier, biomedicínsky", "Inžinier, chemický", "Inžinier, elektronika", "Inžinier, elektrotechnik", "Inžinier, energia", "Inžinier, komunikácia", "Inžinier, letecký", "Inžinier, materiály", "Inžinier, pozemok", "Inžinier, poľnohospodár", "Inžinier, riadenie a prístrojové vybavenie", "Inžinier, ropa", "Inžinier, statik", "Inžinier, stavebné služby", "Inžinier, stavebný (zmluvný)", "Inžinier, stavebný inžinier (poradenstvo)", "Inžinier, technický predaj", "Inžinier, voda", "Inžinier, vysielanie (prevádzka)", "Inžinier, výroba", "Inžinier, výroba", "Inžinier, výrobné systémy", "Inžinier, vŕtanie", "Inžinier, web", "Inžinier, údržba", "Inžinier, údržba (IT)", "Inžiniersky geológ", "Kameraman", "Kariérny informačný úradník", "Kariérny poradca", "Kariérny poradca pre vysokoškolské vzdelávanie", "Kartograf", "Klinický biochemik", "Klinický cytogenetik", "Klinický embryológ", "Klinický molekulárny genetik", "Klinický psychológ", "Klinický vedec, histokompatibilita a imunogenetika", "Knihovník", "Knihovník, verejný", "Kníhkupec", "Komerčný / rezidenčný geodet", "Komerčný záhradník", "Komunikačný inžinier", "Komunitný umelecký pracovník", "Konateľ spoločnosti", "Kontrolór", "Konzervátor / reštaurátor nábytku", "Konzervátor múzea / galérie", "Konzervátor, múzeum / galéria", "Konzervátor, nábytok", "Konzultant pre dôchodky", "Konzultácia so stavebným inžinierom", "Koordinátor dobrovoľníctva", "Kupujúci, maloobchod", "Kurátor", "Kurátor múzea / galérie", "Kópia", "Lektor ďalšieho vzdelávania", "Lektor, vysokoškolské vzdelanie", "Lektor, ďalšie vzdelávanie", "Lekár všeobecného lekára", "Lekár, nemocnica", "Lekár, všeobecná prax", "Lekárnik, komunita", "Lekárnik, nemocnica", "Lekársky fyzik", "Lekársky ilustrátor", "Lekársky obchodný zástupca", "Lekársky sekretár", "Lekársky technický pracovník", "Letecký dispečer", "Letecký inžinier", "Letecký sprostredkovateľ", "Lexikograf", "Licencovaný dopravca", "Lobista", "Logistika / podpora / administratívny dôstojník ozbrojených síl", "Manažér call centra", "Manažér cestovnej kancelárie", "Manažér divadelnej scény", "Manažér farmy", "Manažér fitnescentra", "Manažér informačných systémov", "Manažér komerčnej umeleckej galérie", "Manažér logistiky a distribúcie", "Manažér stravovania", "Manažér umeleckej galérie", "Manažér zariadení", "Manažér zábavného parku", "Manžérsky konzultant", "Marketingový manažér", "Materiálový inžinier", "Mediálny plánovač", "Meteorológ", "Mikrobiológ", "Moderátor, vysielanie", "Morský vedec", "Multimediálne programy", "Módny návrhár", "Najlepší chlapec", "Nemocničný lekár", "Nemocničný lekárnik", "Neurochirurg", "Novinár novín", "Novinár časopisu", "Novinár, noviny", "Novinár, vysielanie", "Novinár, časopis", "Nákupca médií", "Nákupca, priemyselný", "Námorný architekt", "Návrhár interiérov a priestorov", "Návrhár nábytku", "Návrhár výstavy", "Návrhár šperkov", "Návrhárka keramiky", "Obchodník s akciami", "Obchodník s dlhopismi", "Obchodník s futures", "Oceánograf", "Ochranár, historické budovy", "Odborník na životné prostredie", "Odevný / textilný technológ", "Odevný / textilný technológ", "Onkológ", "Operatívny výskumník", "Operačný dôstojník diplomatických služieb", "Operačný dôstojník ozbrojených síl", "Optik, výdaj", "Optometristu", "Organizácia podujatia", "Ortoptista", "Osobný asistent", "Osteopat", "Očný lekár", "Palubní sprievodcovia", "Patent attorney", "Patológ", "Pedagogický psychológ", "Pedikér", "Personalista", "Pilot leteckej spoločnosti", "Plánovač dopravy", "Plánovač reklamného účtu", "Plánovač tlače", "Podnikový investičný bankár", "Podnikový pokladník", "Poistný matematik", "Poisťovací maklér", "Poisťovateľ", "Police officer", "Poradca pre zdravie a bezpečnosť", "Poradca pre životné prostredie", "Poradenská pracovníčka", "Poradenský psychológ", "Posádka", "Potravinársky technológ", "Poľnohospodársky konzultant", "Pracovník medzinárodnej pomoci / rozvoja", "Pracovník pomoci", "Pracovník rozvoja komunity", "Pracovník s mládežou", "Pracovný psychológ", "Pracovný terapeut", "Predajca", "Prekladateľ", "Prevádzkovateľ televíznej kamery", "Prevádzkový geológ", "Prevádzkový investičný bankár", "Prevádzkový riaditeľ", "Priemyselný / produktový dizajnér", "Priemyselný kupujúci", "Prieskumník trhu", "Prieskumový pracovník", "Probačný úradník", "Producent, rádio", "Producent, televízia / film / video", "Production assistant, radio", "Production assistant, television", "Production designer, theatre/television/film", "Production engineer", "Production manager", "Produktový dizajnér", "Produktový manažér", "Professor Emeritus", "Programme researcher, broadcasting/film/video", "Programmer, applications", "Programmer, multimedia", "Programmer, systems", "Proofreader", "Právnik", "Právny tajomník", "Prázdninový zástupca", "Psychiatric nurse", "Psychiatrist", "Psychologist, clinical", "Psychologist, counselling", "Psychologist, educational", "Psychologist, forensic", "Psychologist, occupational", "Psychologist, prison and probation services", "Psychologist, sport and exercise", "Psychoterapeut tanečného pohybu", "Psychoterapeut", "Pôda", "Pôrodná asistentka", "Manažér kvality", "Radca", "Realitný maklér", "Redaktor, uvedenie do prevádzky", "Redakčný asistent", "Referent cestovného ruchu", "Referent environmentálnej výchovy", "Referent geografických informačných systémov", "Referent komunitného vzdelávania", "Referent múzejného vzdelávania", "Referent obchodných noriem", "Referent ochrany prírody", "Referent odbornej prípravy a rozvoja", "Referent odborového výskumu", "Referent poľných pokusov", "Referent pre núdzové plánovanie / riadenie", "Referent pre rovnosť a rozmanitosť", "Referent pre výstavy v múzeách / galériách", "Referent rozvoja umenia", "Referent technickej podpory IT", "Referent výstavy, múzeum / galéria", "Referent ľudských zdrojov", "Referent školstva pre životné prostredie", "Referent školstva, komunita", "Referent školstva, múzeum", "Regulátor strát, objednaný", "Reklamný textár", "Reklamný umelecký riaditeľ", "Riaditeľ pre stratégiu", "Ropný inžinier", "Rozvojový pracovník, komunita", "Rozvojový pracovník, medzinárodná pomoc", "Rýchly streamer pre štátnu službu", "Sanitka", "Sestra pre dospelých", "Sestra pre duševné zdravie", "Sestra s poruchami učenia", "Sestra, detská", "Sestra, dospelý", "Sestra, porucha učenia", "Sieťový inžinier", "Spisovateľ", "Spolupracovník pre klinický výskum", "Spracovateľ geofyzikálnych údajov", "Spravodajský analytik", "Správca", "Správca databázy", "Správca dedičstva", "Správca dôchodkového systému", "Správca lesov a lesov", "Správca nehnuteľnosti / pozemkový agent", "Správca poistného účtu", "Správca polohy", "Správca spracovania údajov", "Správca umenia", "Správca zákazníckeho centra", "Správca školstva", "Správca štátnej služby", "Správca, charitatívne / dobrovoľnícke organizácie", "Správca, miestna samospráva", "Správca, vzdelávanie", "Správca, šport", "Stavebný geodet", "Stavebný geodet", "Stavebný inžinier, poradenstvo", "Stavebný inžinier, uzatváranie zmlúv", "Strihač, film / video", "Strojný inžinier", "Strážca / strážca", "Svetelný technik, vysielanie / film / video", "Súdny psychológ", "Súdny vedec", "Súkromný učiteľ hudby", "Tanečnica", "Technický dôstojník ozbrojených síl", "Technik údržby", "Technológ pre zvieratá", "Technológ varenia piva", "Terapeut, dráma", "Terapeut, hudba", "Terapeut, záhradnícky", "Terapeut, šport", "Terénny seizmológ", "Tlačový fotograf", "Tlmočník", "Toxikológ", "Umelec", "Urobiť", "Uvádzací redaktor", "Učiaci sa mentor", "Učiteľ v ranom detstve", "Učiteľ, angličtina ako cudzí jazyk", "Učiteľ, hudba", "Učiteľ, prvé roky / pred", "Učiteľ, vzdelávanie dospelých", "Učiteľ, základná škola", "Učiteľka na základnej škole", "Vedec dát", "Vedec pre kvalitu vody", "Vedec vývoja produktov / procesov", "Vedecký pracovník lekárskeho laboratória", "Vedúci kancelárie", "Vedúci konferenčného centra", "Vedúci osobnej dopravy", "Vedúci outdoorových aktivít / vzdelávania", "Vedúci reklamného účtu", "Vedúci reštaurácie rýchleho občerstvenia", "Vedúci rybej farmy", "Vedúci skladu", "Vedúci strediska voľného času", "Vedúci turistického informačného centra", "Vedúci ubytovania", "Vedúci zdravotníckej služby", "Vedúci úseku", "Veterinárny chirurg", "Video editor", "Vizuálny obchodník", "Vládny úradník pre sociálny výskum", "Vodný inžinier", "Vrtný inžinier", "Vybavenie záhradník", "Vybavovač poistných udalostí", "Vysielaný novinár", "Vysokoškolský lektor", "Výdajný optik", "Výkonný riaditeľ", "Výkonný technický riaditeľ", "Výrobný inžinier", "Výtvarný umelec", "Vývojár aplikácií", "Vývojár hier", "Vývojár počítačových hier", "Vývojár systémov", "Výživový poradca pre zvieratá", "Výživový terapeut", "Web dizajnér", "Wellsite geológ", "Zamestnanec imigračného úradu", "Zdravotná sestra, duševné zdravie", "Zdravotný návštevník", "Zememerač / geomatik", "Zmluvný stavebný inžinier", "Zubár", "Záchranár", "Záhradnícky konzultant", "Záhradnícky terapeut", "Záhradník, komerčný", "Záhradník, vybavenosť", "Záhradný architekt", "Záznamník bahna", "Úradník miestnej samosprávy", "Úradník pre rybolov", "Účtovník, autorizované verejné financie", "Účtovník, autorizovaný", "Účtovník, autorizovaný certifikovaný", "Účtovník, autorizovaný manažment", "Účtovný technik", "Špecialista na multimédiá", "Špecialista na podporu zdravia", "Špeditér", "Šľachtiteľ rastlín / genetik", ) def job(self) -> str: return self.random_element(self.jobs)
PypiClean
/AutoDiff_jnrw-0.0.2-py3-none-any.whl/autodiff/vector/sym.py
from typing import List, Union, Callable import numpy as np import itertools from ..forward.sym import Symbol, symbol NumberTypes = Union[int, float, complex] def symbolic_vec(op: Callable[[Symbol, Union[None, NumberTypes, Symbol]], Symbol]) -> Callable[[Union['SymVec', Symbol], Union[Symbol, None]], Union['SymVec', Symbol]]: """ Decorator that considers various type checking and converts a function of symbol or number to function for symbol vector. :param op: function that takes input of symbol and symbol or number, outputing symbol :type op: Callable[['SymVec', Union['Symbol', None]], np.ndarray] :return: wrapped function :rtype: Callable[[Union['SymVec', Symbol], Union[Symbol, None]], Union['SymVec', Symbol]] """ def sym_vec_f(left: Union['SymVec', Symbol], right=None) -> Union['SymVec', Symbol]: # single element operation, apply op to each symbol within SymVec if right is None: if isinstance(left, SymVec): return SymVec([op(left.symbols[i]) for i in range(left.shape)], names=left.names) else: return op(left) # double element operation, apply op based on different cases if isinstance(right, Symbol): return SymVec([op(s, right) for s in left.symbols], names=left.names + list(right.names)) elif isinstance(right, SymVec): if right.shape != left.shape: raise IndexError('Dimension incorrect') else: return SymVec([op(left.symbols[i], right.symbols[i]) for i in range(left.shape)], names=left.names + right.names) elif isinstance(right, np.ndarray): if right.ndim == 1 and right.shape[0] == left.shape: return SymVec([op(left.symbols[i], right[i]) for i in range(left.shape)], names=left.names) else: raise IndexError('Dimension incorrect') else: # if right is a float or int return SymVec([op(s, right) for s in left.symbols], names=left.names) return sym_vec_f @symbolic_vec def _add(left, right): return left + right @symbolic_vec def _multiply(left, right): return left * right @symbolic_vec def _neg(left): return -left @symbolic_vec def _div(left, right): return left / right @symbolic_vec def _rdiv(left, right): return right / left @symbolic_vec def _pow(left, right): return left ** right @symbolic_vec def _rpow(left, right): return right ** left def concat(symbols: List[Symbol]) -> 'SymVec': """ concatenating a list of Symbols into a vector :param symbols: list of Symbol objects :type: list of Symbols :return: Symbol vector :rtype: SymVec """ return SymVec(symbols) class vec_gen: def __init__(self): self.counter = itertools.count() def generate(self, dim: int) -> 'SymVec': """ Generate dummy Symbol vector with automatic name assignment can generate up to 99999 symbols in total. :param dim :type: int :return: Symbol vector :rtype: SymVec """ if dim < 1: raise ValueError('dimension must be at least 1') result = SymVec( [Symbol(name='dum_' + str(next(self.counter)).zfill(5)) for i in range(dim)]) return result def getnames(s: Union[Symbol, 'SymVec']) -> List[str]: """Helper method to extract the names from a Symbol or SymVec object :param s: entity from which to extract the names :type s: Union[Symbol, 'SymVec] :return: list of names :rtype: List[str] """ if s.name is None: return list(s.names) return [s.name] class SymVec: # This placed our class at a higher priority than the np array and therefore our radd and rmul will be used __array_priority__ = 1 def __init__(self, symbols: List[Symbol], names=None): """ Initiate a symbol vector by passing in a list of symbols and the names of all symbols relevant. Note, only advanced users with very specific applications in mind should construct a SymVec object directly. In the vast majority of cases, Symbols should be constructed through the factory method `vec_gen()` or `concat()`. :param symbols: a list of Symbols :type: List[Symbol] :param names: the names of all relevant symbol, to keep track for quickeval and quick eriv """ self.symbols = symbols if names is None: all_name = [] for s in self.symbols: # get the names of all relevant symbols if names is not passed in all_name = all_name + getnames(s) self.names = sorted(list(set(all_name))) else: self.names = sorted(list(set(names))) self.in_shape = len(self.names) self.shape = len(symbols) def eval(self, val: dict) -> np.array: """ Evaluating the value of the symbol vector based on input values :param val: dictionary of the value for all the symbols :type: dict of {symbol_name : value} :return: each individual symbol evaluation result :rtype: np.array """ return np.array([s.eval(val) for s in self.symbols]) def quickeval(self, array_: np.ndarray) -> 'SymVec': """ a quick evaluation of the symbol vector, map the input array to a sorted list of symbols :param array_: an 1-D np array which represent the values of individual parameters :type: np.ndarray :return: each individual symbol evaluation result :rtype: SymVec """ if array_.ndim != 1: # we only support vector for now raise TypeError('incorrect dimension') val_v = dict(zip(self.names, array_)) return np.array([s.eval(val_v) for s in self.symbols]) def deriv(self, val: dict): """ Calculate the derivative of a symbol vector :param val: list of Symbol objects :type: list of Symbols :return: Jacobian Matrix :rtype: np.ndarray """ return np.array([list(s.deriv(val).values()) for s in self.symbols]) def quickderiv(self, val: dict): """ Calculate the derivative of a symbol vector :param val: an 1-D np array which represent the values of individual parameters :type: list of Symbols :return: Jacobian Matrix :rtype: np.ndarray """ val_v = dict(zip(self.names, val)) return np.array([list(s.deriv(val_v).values()) for s in self.symbols]) def sum(self): """ sum the elements within a SymVec :param self :type: self :return: sum of Symbols :rtype: Symbol """ result = 0 for s in self.symbols: result = result + s return result def prod(self): """ calculate the product of elements within a SymVec :param self :type: self :return: product of Symbols :rtype: Symbol """ result = 1 for s in self.symbols: result = result * s return result def __add__(self, other): return _add(self, other) def __sub__(self, other): return _add(self, -other) def __mul__(self, other): return _multiply(self, other) def __radd__(self, other): return self.__add__(other) def __rmul__(self, other): return self.__mul__(other) def __rsub__(self, other): return _add(-self, other) def __neg__(self): return _neg(self) def __truediv__(self, other): return _div(self, other) def __rtruediv__(self, other): return _rdiv(self, other) def __pow__(self, other): return _pow(self, other) def __rpow__(self, other): return _rpow(self, other) def __len__(self): return self.shape def dot(left: Union[SymVec, np.ndarray], right: Union[SymVec, np.ndarray]) -> Union[Symbol, SymVec]: """ dot product between symbol vector and symbol vector or symbol vector and np.ndarray :param left :type: Union[SymVec, np.ndarray] :param right :type: Union[SymVec, np.ndarray] :return: Symbol vector or symbol :rtype: Union[SymVec, Symbol] """ result = 0 # if dot product of SymVec with SymVec, same as mul then sum if isinstance(left, SymVec) and isinstance(right, SymVec): if left.shape == right.shape: return (left * right).sum() else: raise IndexError('Dimension incorrect') elif isinstance(left, np.ndarray): if left.ndim == 1: # if left is a np vector, it should return a Symbol for i in range(left.shape[0]): result = result + left[i] * right.symbols[i] elif left.ndim == 2: # if left is a np matrix, it should return a SymVec for i in range(left.shape[1]): result = SymVec([dot(left[i], right) for i in range(left.shape[0])], names=right.names) else: raise NotImplementedError # if right is np array, it should only be vecotr elif isinstance(left, SymVec) and isinstance(right, np.ndarray): if left.shape == right.shape[0] and right.ndim == 1: for i in range(left.shape): result = result + left.symbols[i] * right[i] else: raise IndexError('Dimension incorrect') else: raise TypeError('Illegal Type') return result
PypiClean
/NNBuilder-0.3.7.tar.gz/NNBuilder-0.3.7/nnbuilder/kernel/basic.py
def defaultreturn(): raise AssertionError('Not Implemented') class operator(object): class Utils: def cast(self, t, dtype): defaultreturn() class Matrix: def dot(self, l, r): defaultreturn() def transpose(self, t): defaultreturn() def dimshuffle(self, t, order): defaultreturn() def tile(self, t, n): defaultreturn() def repeat(self, t, n): defaultreturn() class Elemwise: def add(self, l, r): defaultreturn() def sub(self, l, r): defaultreturn() def mul(self, l, r): defaultreturn() def div(self, l, r): defaultreturn() def floordiv(self, l, r): defaultreturn() def mod(self, l, r): defaultreturn() def divmod(self, l, r): defaultreturn() def pow(self, l, r): defaultreturn() def neg(self, t): defaultreturn() def abs(self, t): defaultreturn() def tanh(self, t): defaultreturn() def sigmoid(self, t): defaultreturn() def softmax(self, t, keepdims): defaultreturn() def relu(self, t): defaultreturn() def log(self, t): defaultreturn() def exp(self, t): defaultreturn() def sqr(self, t): defaultreturn() def sqrt(self, t): defaultreturn() def round(self, t): defaultreturn() def clip(self, t, min, max): defaultreturn() def eq(self, l, r): defaultreturn() def neq(self, l, r): defaultreturn() def lt(self, l, r): defaultreturn() def gt(self, l, r): defaultreturn() def ge(self, l, r): defaultreturn() def le(self, l, r): defaultreturn() def and_(self, l, r): defaultreturn() def or_(self, l, r): defaultreturn() def invert(self, t): defaultreturn() def xor(self, l, r): defaultreturn() def switch(self, condition, t, f): defaultreturn() class Reduction: def sum(self, t, axis, keepdims): defaultreturn() def mean(self, t, axis, keepdims): defaultreturn() def var(self, t, axis, keepdims): defaultreturn() def std(self, t, axis, keepdims): defaultreturn() def max(self, t, axis, keepdims): defaultreturn() def argmax(self, t, axis, keepdims): defaultreturn() def nonzero(self, t, keepdims): defaultreturn() class Slicing: def getitem(self, t, key): defaultreturn() def setitem(self, t, key, tnew): defaultreturn() class Grouping: def flatten(self, t): defaultreturn() def reshape(self, t, shape): defaultreturn() def concatenate(self, tlist, axis): defaultreturn() def stack(self, tlist): defaultreturn() class Alloc: def arange(self, start, stop, step, dtype): defaultreturn() def constant(self, x, name, ndim, dtype): defaultreturn() def ones(self, shape, dtype): defaultreturn() def oneslike(self, t, dtype): defaultreturn() def zeros(self, shape, dtype): defaultreturn() def zeroslike(self, t, dtype): defaultreturn() def alloc(self, value, shape, dtype): defaultreturn() class Nnet: def conv(self, input, filters, input_shape, filter_shape, mode, pad, stride, dilation): defaultreturn() def im2col(self, tensor, shape, step=None, mode='normal'): return None def col2im(self, tensor, shape, original_shape=None, mode='normal'): return None def pool(self, input, window, mode, stride, pad, autopad): defaultreturn() def binary_crossentropy(self, y, y_true): defaultreturn() def categorical_crossentropy(self, y, y_true): defaultreturn() ''' ----------- ''' ### ShortCuts ### ''' ----------- ''' ### Utils ### utils = Utils() cast = utils.cast ### Matrix ### matrix = Matrix() dot = matrix.dot transpose = matrix.transpose dimshuffle = matrix.dimshuffle tile = matrix.tile repeat = matrix.repeat ### Elemwise ### # operator # elemwise = Elemwise() add = elemwise.add sub = elemwise.sub mul = elemwise.mul div = elemwise.div floordiv = elemwise.floordiv mod = elemwise.mod divmod = elemwise.divmod pow = elemwise.pow neg = elemwise.neg abs = elemwise.abs log = elemwise.log exp = elemwise.exp sqr = elemwise.sqr sqrt = elemwise.sqrt round = elemwise.round clip = elemwise.clip # function # tanh = elemwise.tanh sigmoid = elemwise.sigmoid softmax = elemwise.softmax relu = elemwise.relu # logic # eq = elemwise.eq neq = elemwise.neq lt = elemwise.lt le = elemwise.le gt = elemwise.gt ge = elemwise.ge and_ = elemwise.and_ or_ = elemwise.or_ not_ = elemwise.invert xor = elemwise.xor switch = elemwise.switch ### Reduction ### reduction = Reduction() sum = reduction.sum mean = reduction.mean var = reduction.var std = reduction.std max = reduction.max argmax = reduction.argmax nonzero = reduction.nonzero ### Grouping ### grouping = Grouping() concatenate = grouping.concatenate stack = grouping.stack reshape = grouping.reshape flatten = grouping.flatten ### Slicing ### slicing = Slicing() getitem = slicing.getitem setitem = slicing.setitem ### alloc ### alloc = Alloc() constant = alloc.constant arange = alloc.arange ones = alloc.ones zeros = alloc.zeros oneslike = alloc.oneslike zeroslike = alloc.zeroslike ### nnet ### nnet = Nnet() conv = nnet.conv pool = nnet.pool im2col = nnet.im2col col2im = nnet.col2im categorical_crossentropy = nnet.categorical_crossentropy binary_crossentropy = nnet.binary_crossentropy class Randomgraph(object): def binomial(self, shape, n, p, ndim, dtype): defaultreturn() def uniform(self, shape, low, high, ndim, dtype): defaultreturn() def normal(self, shape, avg, std, ndim, dtype): defaultreturn() def random_integers(self, shape, low, high, ndim, dtype): defaultreturn() def choice(self, shape, a, p, ndim, dtype): defaultreturn() def poisson(self, shape, lam, ndim, dtype): defaultreturn() def permutation(self, shape, n, ndim, dtype): defaultreturn() def shuffle_row_elements(self, input): defaultreturn() def multinormial(self, shape, n, p, ndim, dtype): defaultreturn() randomgraph = Randomgraph() class Kernel(object): def change_random_seed(self, seed): defaultreturn() def change_randomgraph_seed(self, seed): defaultreturn() def printing(self, graph, outfile): defaultreturn() def grad(self, y, w): defaultreturn() def compile(self, inputs, outputs, updates, strict): defaultreturn() def scan(self, tensor, fn, sequences=None, outputs_info=None, non_sequences=None, n_steps=None, go_backwards=False): defaultreturn() kernel = Kernel()
PypiClean
/Astropysics-1.0.tar.gz/Astropysics-1.0/distribute_setup.py
import os import sys import time import fnmatch import tempfile import tarfile from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None try: import subprocess def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 except ImportError: # will be used for python 2.3 def _python_cmd(*args): args = (sys.executable,) + args # quoting arguments if windows if sys.platform == 'win32': def quote(arg): if ' ' in arg: return '"%s"' % arg return arg args = [quote(arg) for arg in args] return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 DEFAULT_VERSION = "0.6.10" DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" SETUPTOOLS_FAKED_VERSION = "0.6c11" SETUPTOOLS_PKG_INFO = """\ Metadata-Version: 1.0 Name: setuptools Version: %s Summary: xxxx Home-page: xxx Author: xxx Author-email: xxx License: xxx Description: xxx """ % SETUPTOOLS_FAKED_VERSION def _install(tarball): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Distribute') if not _python_cmd('setup.py', 'install'): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') finally: os.chdir(old_wd) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Distribute egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15, no_fake=True): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: try: import pkg_resources if not hasattr(pkg_resources, '_distribute'): if not no_fake: _fake_setuptools() raise ImportError except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("distribute>="+version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of distribute (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U distribute'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) finally: if not no_fake: _create_fake_setuptools_pkg_info(to_dir) def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15): """Download distribute from a specified location and return its filename `version` should be a valid distribute version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: log.warn("Downloading %s", url) src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(saveto, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def _patch_file(path, content): """Will backup the file then patch it""" existing_content = open(path).read() if existing_content == content: # already patched log.warn('Already patched.') return False log.warn('Patching...') _rename_path(path) f = open(path, 'w') try: f.write(content) finally: f.close() return True def _same_content(path, content): return open(path).read() == content def _no_sandbox(function): def __no_sandbox(*args, **kw): try: from setuptools.sandbox import DirectorySandbox def violation(*args): pass DirectorySandbox._old = DirectorySandbox._violation DirectorySandbox._violation = violation patched = True except ImportError: patched = False try: return function(*args, **kw) finally: if patched: DirectorySandbox._violation = DirectorySandbox._old del DirectorySandbox._old return __no_sandbox @_no_sandbox def _rename_path(path): new_name = path + '.OLD.%s' % time.time() log.warn('Renaming %s into %s', path, new_name) os.rename(path, new_name) return new_name def _remove_flat_installation(placeholder): if not os.path.isdir(placeholder): log.warn('Unkown installation at %s', placeholder) return False found = False for file in os.listdir(placeholder): if fnmatch.fnmatch(file, 'setuptools*.egg-info'): found = True break if not found: log.warn('Could not locate setuptools*.egg-info') return log.warn('Removing elements out of the way...') pkg_info = os.path.join(placeholder, file) if os.path.isdir(pkg_info): patched = _patch_egg_dir(pkg_info) else: patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) if not patched: log.warn('%s already patched.', pkg_info) return False # now let's move the files out of the way for element in ('setuptools', 'pkg_resources.py', 'site.py'): element = os.path.join(placeholder, element) if os.path.exists(element): _rename_path(element) else: log.warn('Could not find the %s element of the ' 'Setuptools distribution', element) return True def _after_install(dist): log.warn('After install bootstrap.') placeholder = dist.get_command_obj('install').install_purelib _create_fake_setuptools_pkg_info(placeholder) @_no_sandbox def _create_fake_setuptools_pkg_info(placeholder): if not placeholder or not os.path.exists(placeholder): log.warn('Could not find the install location') return pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) setuptools_file = 'setuptools-%s-py%s.egg-info' % \ (SETUPTOOLS_FAKED_VERSION, pyver) pkg_info = os.path.join(placeholder, setuptools_file) if os.path.exists(pkg_info): log.warn('%s already exists', pkg_info) return log.warn('Creating %s', pkg_info) f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() pth_file = os.path.join(placeholder, 'setuptools.pth') log.warn('Creating %s', pth_file) f = open(pth_file, 'w') try: f.write(os.path.join(os.curdir, setuptools_file)) finally: f.close() def _patch_egg_dir(path): # let's check if it's already patched pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') if os.path.exists(pkg_info): if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): log.warn('%s already patched.', pkg_info) return False _rename_path(path) os.mkdir(path) os.mkdir(os.path.join(path, 'EGG-INFO')) pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() return True def _before_install(): log.warn('Before install bootstrap.') _fake_setuptools() def _under_prefix(location): if 'install' not in sys.argv: return True args = sys.argv[sys.argv.index('install')+1:] for index, arg in enumerate(args): for option in ('--root', '--prefix'): if arg.startswith('%s=' % option): top_dir = arg.split('root=')[-1] return location.startswith(top_dir) elif arg == option: if len(args) > index: top_dir = args[index+1] return location.startswith(top_dir) elif option == '--user' and USER_SITE is not None: return location.startswith(USER_SITE) return True def _fake_setuptools(): log.warn('Scanning installed packages') try: import pkg_resources except ImportError: # we're cool log.warn('Setuptools or Distribute does not seem to be installed.') return ws = pkg_resources.working_set try: setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', replacement=False)) except TypeError: # old distribute API setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) if setuptools_dist is None: log.warn('No setuptools distribution found') return # detecting if it was already faked setuptools_location = setuptools_dist.location log.warn('Setuptools installation detected at %s', setuptools_location) # if --root or --preix was provided, and if # setuptools is not located in them, we don't patch it if not _under_prefix(setuptools_location): log.warn('Not patching, --root or --prefix is installing Distribute' ' in another location') return # let's see if its an egg if not setuptools_location.endswith('.egg'): log.warn('Non-egg installation') res = _remove_flat_installation(setuptools_location) if not res: return else: log.warn('Egg installation') pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') if (os.path.exists(pkg_info) and _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): log.warn('Already patched.') return log.warn('Patching...') # let's create a fake egg replacing setuptools one res = _patch_egg_dir(setuptools_location) if not res: return log.warn('Patched done.') _relaunch() def _relaunch(): log.warn('Relaunching...') # we have to relaunch the process args = [sys.executable] + sys.argv sys.exit(subprocess.call(args)) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" tarball = download_setuptools() _install(tarball) if __name__ == '__main__': main(sys.argv[1:])
PypiClean
/Flask-RESTy-4.0.2.tar.gz/Flask-RESTy-4.0.2/flask_resty/authentication.py
import flask from . import context from .exceptions import ApiError # ----------------------------------------------------------------------------- def get_request_credentials(): return context.get("request_credentials") def set_request_credentials(credentials): context.set("request_credentials", credentials) # ----------------------------------------------------------------------------- class AuthenticationBase: """Base class for API authentication components. Authentication components are responsible for extracting the request credentials, if any. They should raise a 401 if the credentials are invalid, but should provide `None` for unauthenticated users. Flask-RESTy provides an implementation using `JSON Web Tokens`_ but you can use any authentication component by extending :py:class:`AuthenticationBase` and implementing :py:meth:`get_request_credentials`. .. _JSON Web Tokens: https://jwt.io/ """ def authenticate_request(self): """Store the request credentials in the :py:class:`flask.ctx.AppContext`. .. warning:: No validation is performed by Flask-RESTy. It is up to the implementor to validate the request in :py:meth:`get_request_credentials`. """ set_request_credentials(self.get_request_credentials()) def get_request_credentials(self): """Get the credentials for the current request. Typically this is done by inspecting :py:data:`flask.request`. .. warning:: Implementing classes **must** raise an exception on authentication failure. A 401 Unauthorized :py:class:`ApiError` is recommended. :return: The credentials for the current request. """ raise NotImplementedError() # ----------------------------------------------------------------------------- class NoOpAuthentication: """An authentication component that provides no credentials.""" def authenticate_request(self): pass # ----------------------------------------------------------------------------- class HeaderAuthenticationBase(AuthenticationBase): """Base class for header authentication components. These authentication components get their credentials from the ``Authorization`` request header. The Authorization header has the form:: Authorization: <scheme> <token> This class also supports fallback to a query parameter, for cases where API clients cannot set headers. """ #: Corresponds to the <scheme> in the Authorization request header. header_scheme = "Bearer" #: A fallback query parameter. The value of this query parameter will be #: used as credentials if the Authorization request header is missing. credentials_arg = None def get_request_credentials(self): token = self.get_request_token() if token is None: return None return self.get_credentials_from_token(token) def get_request_token(self): authorization = flask.request.headers.get("Authorization") if authorization is not None: return self.get_token_from_authorization(authorization) if self.credentials_arg is not None: return flask.request.args.get(self.credentials_arg) return None def get_token_from_authorization(self, authorization): try: scheme, token = authorization.split() except (AttributeError, ValueError) as e: raise ApiError(401, {"code": "invalid_authorization"}) from e if scheme.lower() != self.header_scheme.lower(): raise ApiError(401, {"code": "invalid_authorization.scheme"}) return token def get_credentials_from_token(self, token): """Get the credentials from the token from the request. :param str token: The token from the request headers or query. :return: The credentials from the token. """ raise NotImplementedError() class HeaderAuthentication(HeaderAuthenticationBase): """Header authentication component where the token is the credential. This authentication component is useful for simple applications where the token itself is the credential, such as when it is a fixed secret shared between the client and the server that uniquely identifies the client. """ def get_credentials_from_token(self, token): return token
PypiClean
/Editra-0.7.20.tar.gz/Editra-0.7.20/src/syntax/_javascript.py
__author__ = "Cody Precord <cprecord@editra.org>" __svnid__ = "$Id: _javascript.py 70228 2011-12-31 20:39:16Z CJP $" __revision__ = "$Revision: 70228 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import synglob import syndata import _cpp #-----------------------------------------------------------------------------# #---- Keyword Specifications ----# # JavaScript Keywords # set to 1 for embeded JS_KEYWORDS = (0, "abstract break boolean byte case const continue catch " "class char debugger default delete do double default " "export false else enum export extend final finally " "float for function goto if implements import in " "instanceof int interface long native new null " "package private protected public return short static " "synchronized switch super this throw throws transient " "try true typeof var void volatile with while") #---- Syntax Style Spec ----# SYNTAX_ITEMS = [ (stc.STC_HJ_COMMENT, 'comment_style'), (stc.STC_HJ_COMMENTDOC, 'dockey_style'), (stc.STC_HJ_COMMENTLINE, 'comment_style'), (stc.STC_HJ_DEFAULT, 'default_style'), (stc.STC_HJ_DOUBLESTRING, 'string_style'), (stc.STC_HJ_KEYWORD, 'keyword_style'), (stc.STC_HJ_NUMBER, 'number_style'), (stc.STC_HJ_REGEX, 'scalar_style'), # STYLE ME (stc.STC_HJ_SINGLESTRING, 'string_style'), (stc.STC_HJ_START, 'scalar_style'), (stc.STC_HJ_STRINGEOL, 'stringeol_style'), (stc.STC_HJ_SYMBOLS, 'array_style'), (stc.STC_HJ_WORD, 'class_style'), (stc.STC_HJA_COMMENT, 'comment_style'), (stc.STC_HJA_COMMENTDOC, 'dockey_style'), (stc.STC_HJA_COMMENTLINE, 'comment_style'), (stc.STC_HJA_DEFAULT, 'default_style'), (stc.STC_HJA_DOUBLESTRING, 'string_style'), (stc.STC_HJA_KEYWORD, 'keyword_style'), (stc.STC_HJA_NUMBER, 'number_style'), (stc.STC_HJA_REGEX, 'scalar_style'), # STYLE ME (stc.STC_HJA_SINGLESTRING, 'string_style'), (stc.STC_HJA_START, 'scalar_style'), (stc.STC_HJA_STRINGEOL, 'stringeol_style'), (stc.STC_HJA_SYMBOLS, 'array_style'), (stc.STC_HJA_WORD, 'class_style') ] #-----------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for JavaScript""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_CPP) self.RegisterFeature(synglob.FEATURE_AUTOINDENT, _cpp.AutoIndenter) def GetKeywords(self): """Returns Specified Keywords List """ return [JS_KEYWORDS,] def GetSyntaxSpec(self): """Syntax Specifications """ if self.LangId == synglob.ID_LANG_HTML: return SYNTAX_ITEMS else: return _cpp.SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set """ return [("fold", "1")] def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u'//'] #---- Syntax Modules Internal Functions ----# def KeywordString(option=0): """Returns the specified Keyword String @keyword option: specific subset of keywords to get """ return JS_KEYWORDS[1] #---- End Syntax Modules Internal Functions ----#
PypiClean
/Graphine-0.0.tar.gz/Graphine-0.0/graph/base.py
# Copyright (C) 2009 Geremy Condra # # This file is part of Graphine. # # Graphine is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Graphine is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Graphine. If not, see <http://www.gnu.org/licenses/>. from collections import deque, namedtuple, defaultdict import heapq import copy from itertools import chain class GraphElement: """Base class for Nodes and Edges. A GraphElement.data property is provided to give easier access to all of the element's non-structural member variables. It returns a dictionary. In addition, a GraphElement.name property is provided to allow property access to the object's name. It also provides an __repr__ member function to make easier work of analyzing your graphs. Graph elements compare based on names. """ def __repr__(self): """Pretty prints this element.""" classname = type(self).__name__ name = "name=%s, " % str(self.name) attrs = name + ''.join(("%s=%s, " % (k, v) for k, v in self.data.items())) attrs = attrs[:-2] return "%s(%s)" % (classname, attrs) def __lt__(self, other): """Name-based comparison for sorting.""" return self.name < other.name def __hash__(self): """Returns the hash of this object's name.""" return hash(self._name) def __eq__(self, other): """Compares the two elements based on name.""" if type(self) != type(other): return False return self.name == other.name def __ne__(self, other): """Compares the two elements based on name.""" return not self == other @property def name(self): """Returns this object's name.""" return self._name @property def data(self): """Returns a dictionary representing the data values of this element. Note that elements which are marked private- ie, start with a single underscore- will not appear in this dictionary. """ return {k:v for k, v in self.__dict__.items() if not k.startswith("_")} class Node(GraphElement): """Base node representation. Nodes have seven properties: - incoming, which is a list of all edges coming into this node - outgoing, which is a list of all edges going away from this node - bidirectional, which is a list of all bidirectional edges incident to this node - edges, which is a list of all edges with this node as an endpoint - degree, which is the number of edges incident to this node - data, which is a dictionary of all non-private (ie, user-defined) attributes of this node - and name, which is a unique, non-None value optionally passed in at instantiation time, and used for hashing comparisons. In the event that a name is not passed in, the object's id will be used. """ def __init__(self, name=None, **kwargs): """Initializes the Node object. Accepts an optional name argument and a variable number of kwargs. If a name is provided, it can be accessed through the .name property. If a name is not provided, one will be automatically generated based on the object's id. The kwargs are mapped into attributes. Usage: >>> n = Node("bob", weight=5) >>> n.name bob >>> n.weight 5 """ if name is not None: self._name = name else: self._name = id(self) self._incoming = [] self._outgoing = [] self._bidirectional = [] for k, v in kwargs.items(): setattr(self, k, v) def get_adjacent(self, outgoing=True, incoming=False): """Returns a list of all adjacent nodes. The optional arguments outgoing and incoming indicate whether to include those edge sets in the search field. Their defaults are True and False, accordingly. If provided, outgoing and incoming should be booleans. """ adjacent = [] seen = set() if outgoing: for edge in self._outgoing: if edge.end not in seen: adjacent.append(edge.end) seen.add(edge.end) if incoming: for edge in self._incoming: if edge.start not in seen: adjacent.append(edge.start) seen.add(edge.start) if outgoing or incoming: for edge in self._bidirectional: if edge.other_end(self) not in seen: adjacent.append(edge.other_end(self)) seen.add(edge.other_end(self)) return adjacent @property def incoming(self): """Returns a list of all the incoming edges for this node. Note that the list returned is a copy, so modifying it doesn't impact the structure of the graph. """ return copy.copy(self._incoming + self._bidirectional) @property def outgoing(self): """Returns a list of all the outgoing edges for this node. Note that the list returned is a copy, so modifying it doesn't impact the structure of the graph. """ return copy.copy(self._outgoing + self._bidirectional) @property def bidirectional(self): """Returns a list of all bidirectional edges for this node. Note that the list returned is a copy, so modifying it doesn't impact the structure of the graph. """ return copy.copy(self._bidirectional) @property def edges(self): """Returns a list of all edges for this node. Note that the list returned is a copy, so modifying it doesn't impact the structure of the graph. """ # we have to ensure that all these elements are unique, since loops can be # both incoming and outgoing. return copy.copy(list(set(self._incoming + self._outgoing + self._bidirectional))) @property def degree(self): """Returns the degree of this Node, ie, the number of edges.""" return len(self.edges) class Edge(GraphElement): """Base edge representation. Edges have five properties. - start, which is the starting node - end, which is the end node - is_directed, which indicates if the edge is directed or undirected - name, which is a unique, non-None value optionally passed in at instantiation time, and used for hashing comparisons - data, which is a dictionary of all non-private (ie, user-defined) attributes of this node """ def __init__(self, start, end, name=None, is_directed=True, **kwargs): """Initializes the Edge. Accepts start and end arguments, which should be Node objects, an optional name (one will be autogenerated based on object id if not provided), the is_directed flag, which controls whether this is a directed or undirected edge, and a variable number of kwargs, which will be mapped into attributes. Usage: >>> e = Edge(Node("A"), Node("B"), "AB", is_directed=False, weight=5) >>> e.name "AB" >>> e.start Node(name=A) >>> e.end Node(name=B) >>> e.weight 5 """ if name is not None: self._name = name else: self._name = id(self) self._start = start self._end = end self._directed = is_directed for k, v in kwargs.items(): setattr(self, k, v) def other_end(self, starting_point): """Returns the other end of the edge from the given point. If the point given is not an endpoint on this edge or the endpoint on a directed edge, this raises AttributeError. """ if starting_point is self.start or starting_point is self.start.name: return self.end elif not self.is_directed: if starting_point is self.end or starting_point is self.end.name: return self.start raise AttributeError("%s has no endpoint opposite to %s" % (self, starting_point)) @property def start(self): """Returns the starting point for this edge.""" return self._start @property def end(self): """Returns the ending point for this edge.""" return self._end @property def is_directed(self): """Returns whether this is a directed edge or not.""" return self._directed class Graph: """A basic graph class, and base for all Graph mixins. In graph theoretic terms, this represents a bridge multigraph. This means that it supports both directed and undirected edges, loops, and parallel edges. Note that element names must be unique within this graph; non unique element names between graphs are not only allowable, but encouraged if you wish for those elements to compare equally between graphs. Because of its generality, it is suitable as a general-purpose Graph representation. """ Node = Node Edge = Edge def __init__(self): """Base initializer for Graphs. Usage: >>> g = Graph() """ self._nodes = {} self._edges = {} ################################################################# # Operators # ################################################################# def __contains__(self, element): """Returns True if the element is a member of the graph. Usage: >>> g = Graph() >>> n = g.add_node() >>> n in g True """ # if its a node if isinstance(element, self.Node): return element.name in self._nodes # if its an edge elif isinstance(element, self.Edge): return element.name in self._edges # if its a name else: return element in self._nodes or element in self._edges def __getitem__(self, name): """Returns the element corresponding to the given name or the given element's name. Raises KeyError if it is not found. """ name = self.get_name(name) # get the element if it exists element = self._nodes.get(name, False) element = element or self._edges.get(name, False) if not element: raise KeyError("%s not in %s" % (name, self)) return element def __and__(self, other): """Maps the & operator to the intersection operation.""" return self.intersection(other) def __or__(self, other): """Maps the | operator to the union operation.""" return self.union(other) def __sub__(self, other): """Maps the - operator to the difference operation.""" return self.difference(other) def __eq__(self, other): """Compares based on node and edge names.""" if set(self._nodes.keys()) == set(other._nodes.keys()): if set(self._edges.keys()) == set(other._edges.keys()): return True return False def __lt__(self, other): """Compares based on containment. This returns True if this graph is contained in other. """ return other.contains(self) and not self.contains(other) def __gt__(self, other): """Compares baed on containment. This returns True if it contains other, False otherwise. """ return self.contains(other) and not other.contains(self) ################################################################# # Properties # ################################################################# @property def nodes(self): """Returns an iterator over all the nodes in the graph.""" return self._nodes.values() @property def edges(self): """Returns an iterator over all the edges in the graph.""" return self._edges.values() ################################################################# # Convenience Functions # ################################################################# def get_element(self, item): """Takes an element or a name and returns an element. If no element corresponds to the given name, raises KeyError. """ if isinstance(item, GraphElement): element = self._edges.get(item.name, False) element = element or self._nodes.get(item.name, False) if not element: raise KeyError("%s not in %s" % (item, self)) return element else: element = self._edges.get(item, False) element = element or self._nodes.get(item, False) if not element: raise KeyError("%s not in %s" % (item, self)) return element def get_name(self, item): """Takes an element or a name and returns a name. If no element corresponds to the given name, raises KeyError """ if isinstance(item, GraphElement): item = item.name else: item = item if item in self._nodes: return item if item in self._edges: return item raise KeyError("%s not in %s" % (item, self)) ################################################################# # Graph Construction Tools # ################################################################# def add_node(self, name=None, **kwargs): """Adds a node with no edges to the current graph. The name argument, if given, should be hashable and unique in this graph. Usage: >>> g = Graph() >>> g.add_node("bob", weight=5) Node(name=bob, weight=5) """ # create the new node node = self.Node(name, **kwargs) # remove any otherwise identical nodes try: self.remove_node(node) except: pass # add the node to the backing data store self._nodes[node._name] = node return node def add_edge(self, start, end, name=None, is_directed=True, **kwargs): """Adds an edge to the current graph. The start and end arguments can be either nodes or node names. The name argument, if given, should be hashable and unique in this graph. The optional argument "is_directed" specifies whether the given edge should be directed or undirected. Usage: >>> g = Graph() >>> n1, n2 = g.add_node(), g.add_node() >>> g.add_edge(n1, n2, weight=5) Edge(weight=5) """ # get the start and end points start = self.get_element(start) end = self.get_element(end) # build the edge edge = self.Edge(start, end, name, is_directed=is_directed, **kwargs) # remove any otherwise identical edges try: self.remove_edge(edge) except: pass # and add the edge to the backing data store self._edges[edge.name] = edge # now take care of adjacency tracking if is_directed: start._outgoing.append(edge) end._incoming.append(edge) else: start._bidirectional.append(edge) # stops the edge from being added twice if it is an undirected # loop if start is not end: end._bidirectional.append(edge) return edge def remove_node(self, node): """Removes a node from the graph. Usage: >>> g = Graph() >>> n = g.add_node() >>> g.remove_node(n) >>> n in g False """ # get the actual node if a name is passed in node = self.get_element(node) # remove it from adjacency tracking for edge in node.edges: self.remove_edge(edge) # remove it from storage n = self._nodes.pop(node.name) return n def remove_edge(self, edge): """Removes an edge from the graph. Usage: >>> g = Graph() >>> n1, n2 = g.add_node(), g.add_node() >>> e = g.add_edge(n1, n2) >>> g.remove_edge(e) >>> e in g False """ # get the actual edge if a name is passed edge = self.get_element(edge) # remove it from adjacency tracking start = edge.start end = edge.end if edge.is_directed: start._outgoing.remove(edge) end._incoming.remove(edge) else: start._bidirectional.remove(edge) # fix the undirected loop problem if start is not end: end._bidirectional.remove(edge) # remove it from storage e = self._edges.pop(edge.name) return e ######################################################################### # Graph Inspection Tools # ######################################################################### def search_nodes(self, **kwargs): """ Convenience function to get nodes based on some properties. Usage: >>> g = Graph() >>> n1 = g.add_node("bob") >>> n2 = g.add_node("bill") >>> for node in g.search_nodes(name="bob"): ... print(node) Node(name="bob") """ desired_properties = set(kwargs.items()) for node in self.nodes: properties = set(node.data.items()) if "name" in kwargs: properties.add(("name", node.name)) if properties.issuperset(desired_properties): yield node def search_edges(self, **kwargs): """Convenience function to get edges based on some properties. Usage: >>> g = Graph() >>> n1, n2 = g.add_node(), g.add_node() >>> e1 = g.add_edge(n1, n2, weight=4) >>> e2 = g.add_edge(n1, n2, "n1->n2", weight=5) >>> for edge in g.search_edges(weight=5): ... print(edge) Edge(name=n1->n2, weight=5) """ if "start" in kwargs: kwargs["start"] = self.get_element(kwargs["start"]) if "end" in kwargs: kwargs["end"] = self.get_element(kwargs["end"]) desired_properties = set(kwargs.items()) for edge in self.edges: attrs = set(edge.data.items()) if "name" in kwargs: attrs.add(("name", edge.name)) if "start" in kwargs: attrs.add(("start", edge.start)) if "end" in kwargs: attrs.add(("end", edge.end)) if "is_directed" in kwargs: attrs.add(("is_directed", edge.is_directed)) if attrs.issuperset(desired_properties): yield edge def get_common_edges(self, n1, n2): """Gets the common edges between the two nodes. Usage: >>> g = Graph() >>> n1 = g.add_node() >>> n2 = g.add_node() >>> e = g.add_edge(n1, n2, "fluffy") >>> g.get_common_edges(n1, n2) {Edge(name="Fluffy")} """ # get the actual nodes if names are passed in n1 = self.get_element(n1) n2 = self.get_element(n2) n1_edges = set(n1.edges) n2_edges = set(n2.edges) return n1_edges & n2_edges def walk_nodes(self, start, reverse=False): """Provides a generator for application-defined walks. The start argument can be either a name or a label. The optional reverse argument can be used to do a reverse walk, ie, only walking down incoming edges. Usage: >>> g = Graph() >>> n1 = g.add_node() >>> n2 = g.add_node() >>> e1 = g.add_edge(n1, n2) >>> w = g.walk_nodes() >>> for adjacent_nodes in w: >>> next_node = adjacent_nodes.pop() >>> w.send(next_node) """ # make sure we have a real node start = self.get_element(start) # the actual generator function, wrapped for prettitude def walker(): next = start while next: if not reverse: adjacent = next.get_adjacent() else: adjacent = next.get_adjacent(outgoing=False, incoming=True) next = yield(adjacent) # the wrapper w = walker() candidates = next(w) while 1: selection = (yield candidates) candidates = w.send(selection) yield def walk_edges(self, start): """Provides a generator for application-defined walks. Usage is identical to walk_nodes, excepting only that it accepts, and yields, Edges in the place of Nodes. """ # make sure we have a real edge start = self.get_element(start) # the actual generator function def walker(): next = start while next: incident = list(next.other_end(next.start).outgoing) next = yield(incident) # convenience wrapper w = walker() candidates = next(w) while 1: selection = (yield candidates) candidates = w.send(selection) yield def heuristic_walk(self, start, selector, reverse=False): """Traverses the graph using selector as a selection filter on the adjacent nodes. The optional reverse argument allows you to do a reverse walk, ie, only finding adjacencies according to incoming edges rather than outgoing edges. Usage: >>> g = Graph() >>> g.add_node("A") >>> g.add_node("B") >>> g.add_edge("A", "B", "AB") >>> def selector(adjacent_nodes): ... return adjacent_nodes.pop() ... >>> for node in g.heuristic_walk("A", selector): ... print(node.name) B """ w = self.walk_nodes(start, reverse=reverse) for candidates in w: selection = selector(candidates) w.send(selection) yield selection def heuristic_traversal(self, root, selector): """Traverses the graph using selector as a selection filter on the unvisited nodes. Usage: >>> g = Graph() >>> n1, n2 = g.add_node("A"), g.add_node("B") >>> e = g.add_edge(n1, n2) >>> for node in g.a_star_traversal(n1, lambda s: s.pop()): >>> print(node) Node(name="A") Node(name="B") """ # handle the its-a-name case root = self.get_element(root) # stores nodes that are known to the algorithm but not yet visited discovered = [] visited = set() discovered.append(root) # while there are unprocessed nodes while discovered: # select the next one next = selector(discovered) yield next # visit it visited.add(next) # get the adjacent nodes adjacent = set(next.get_adjacent()) # filter it against those we've already visited not_yet_visited = adjacent - visited # make sure we're not double-adding for node in not_yet_visited: if node not in discovered: discovered.append(node) def depth_first_traversal(self, root): """Traverses the graph by visiting a node, then a child of that node, and so on. Usage: >>> g = Graph() >>> a, b = g.add_node("A"), g.add_node("B") >>> c, d = g.add_node("C"), g.add_node("D") >>> e1, e2 = g.add_edge(a, b), g.add_edge(a, c) >>> e3, e4 = g.add_edge(b, d), g.add_edge(c, d) >>> for node in g.depth_first_traversal(a): >>> print(node) Node(name="A") Node(name="B") Node(name="D") Node(name="C") """ for node in self.heuristic_traversal(root, lambda s: s.pop()): yield node def breadth_first_traversal(self, root): """Traverses the graph by visiting a node, then each of its children, then their children. Usage: >>> g = Graph() >>> a, b = g.add_node("A"), g.add_node("B") >>> c, d = g.add_node("C"), g.add_node("D") >>> e1, e2 = g.add_edge(a, b), g.add_edge(a, c) >>> e3, e4 = g.add_edge(b, d), g.add_edge(c, d) >>> for node in g.breadth_first_traversal(a): >>> print(node) Node(name="A") Node(name="B") Node(name="C") Node(name="D") """ for node in self.heuristic_traversal(root, lambda s: s.pop(0)): yield node def get_connected_components(self): """Gets all the connected components from the graph. Returns a list of sets of vertices. Usage: >>> g = Graph() >>> n1 = g.add_node(group=1) >>> n2 = g.add_node(group=1) >>> n3 = g.add_node(group=2) >>> e1 = g.add_edge(n1, n2) >>> g.get_connected_components() [{Node(group=1), Node(group=1)}, {Node(group=2)}] """ # set of all connected components connected = [] # iterate over the nodes for node in self.nodes: # get all the nodes that are reachable from this node discovered = set(self.depth_first_traversal(node)) add_this = True for component in connected: # if the newly discovered component is part of # an existing component if discovered.issubset(component): # don't add it add_this = False break # if the existing component is a part of the # newly discovered component elif discovered.issuperset(component): # don't add it add_this = False # but replace the old component with # the new one connected.remove(component) connected.append(discovered) continue # if this component was not a part of an existing # component or vice versa... if add_this: # add it to the components list connected.append(discovered) return connected def get_strongly_connected(self): """Returns a list of all strongly connected components. Each SCC is expressed as a set of vertices. Usage is identical to get_connected_components. """ # list of all SCCs strongly_connected_components = [] # iterate over all connected components for c in self.get_connected_components(): # get an arbitrary node arbitrary = c.pop() # get all the nodes visitable from there visited = [node for node in self.depth_first_traversal(arbitrary)] # reverse the direction of the edges in the graph self.transpose() # while there are still elements which aren't reachable while visited: current_component = set() for node in self.depth_first_traversal(visited.pop(0)): current_component.add(node) try: visited.remove(node) except: pass strongly_connected_components.append(current_component) self.transpose() return strongly_connected_components def get_shortest_paths(self, source, get_weight=lambda e: 1): """Finds the shortest path to all connected nodes from source. The optional get_weight argument should be a callable that accepts an edge and returns its weight. Returns a dictionary of node -> (path_length, [edges_traversed]) mappings. Usage: >>> g = Graph() >>> n1 = g.add_node("A") >>> n2 = g.add_node("B") >>> n3 = g.add_node("C") >>> n4 = g.add_node("D") >>> e1 = g.add_edge(n1, n2, weight=10) >>> e2 = g.add_edge(n1, n4, weight=1) >>> e3 = g.add_edge(n2, n3, weight=1) >>> e4 = g.add_edge(n3, n4, weight=1) >>> d = g.get_shortest_paths(n1, get_weight=lambda e: e.weight) >>> d[n1] (0, []) >>> d[n2] (10, [Edge(weight=10)]) >>> d[n3] (11, [Edge(weight=10), Edge(weight=1)]) >>> d[n4] (1, [Edge(weight=1)]) """ # handle the its-a-name case source = self.get_element(source) # create the paths table paths = defaultdict(lambda: (float("inf"), [])) paths[source] = (0, []) # create the minimum distance heap unoptomized = [(0, source)] # main loop while unoptomized: # pop the minimum distanced node distance, current = heapq.heappop(unoptomized) # iterate over its outgoing edges for edge in current.outgoing: # get the old path to the endpoint old_weight, old_path = paths[edge.other_end(current)] # get the weight of this path to the edge's end weight, path = paths[current] weight += get_weight(edge) # if the new path is better than the old path if weight < old_weight: # relax it paths[edge.other_end(current)] = (weight, path + [edge]) # and put it on the heap heapq.heappush(unoptomized, (weight, edge.other_end(current))) return paths def size(self): """Reports the number of edges in the graph. Usage: >>> g = Graph() >>> n1, n2 = g.add_node(), g.add_node() >>> g.size() 0 >>> e = g.add_edge(n1, n2) >>> g.size() 1 """ return len(self.edges) def order(self): """Reports the number of nodes in the graph. Usage: >>> g = Graph() >>> g.order() 0 >>> n = g.add_node() >>> g.order() 1 """ return len(self.nodes) ######################################################################### # Graph Rewriting Tools # ######################################################################### def move_edge(self, edge, start=None, end=None): """Moves the edge, leaving its data intact. Does not change a directed edge into an undirected edge. """ # get the edge if its a name edge = self.get_element(edge) if edge.is_directed: edge.start._outgoing.remove(edge) edge.end._incoming.remove(edge) else: try: # to fix the problem with undirected loops edge.start._bidirectional.remove(edge) edge.end._bidirectional.remove(edge) except: pass edge._start = start or edge.start edge._end = end or edge.end if edge.is_directed: edge.start._outgoing.append(edge) edge.end._incoming.append(edge) else: edge.start._bidirectional.append(edge) # fix the problem with undirected loops if start is not end: edge.end._bidirectional.append(edge) return edge def contract_edge(self, edge, node_data): """Contracts the given edge, merging its endpoints. node_data should be a callable that returns a dictionary. That dictionary will be used to initialize the new node. It returns the node so created. There are two caveats about using this: 1) The name passed back by node_data, if present, must still be unique- and the old nodes can't be deleted until after the new one is added. 2) Note that if multiple edges exist between the two nodes, this will still contract them! """ # get the edge if its a name edge = self.get_element(edge) # check to make sure that the given edge is the only edge between # it endpoints start = edge.start end = edge.end new_node = self.add_node(**node_data(start, end)) # delete the given edge self.remove_edge(edge) # move all incoming edges for edge in start.incoming + end.incoming: self.move_edge(edge, end=new_node) # move all outgoing edges for edge in start.outgoing + end.outgoing: self.move_edge(edge, start=new_node) # delete the existing endpoints # remember, this may be a loop, so you may # only be able to remove one. try: self.remove_node(start) self.remove_node(end) except KeyError: pass return new_node def transpose(self): """Reverses the directions on all edges in the current graph""" for e in self.edges: self.move_edge(e, start=e.end, end=e.start) def induce_subgraph(self, *nodes): """Returns a new graph composed of only the specified nodes and their mutual edges. Usage: Set up your graph: >>> enterprise = Graph() >>> kirk = enterprise.add_node("kirk") >>> spock = enterprise.add_node("spock") >>> bones = enterprise.add_node("mccoy") >>> enterprise.add_edge(kirk, spock) >>> enterprise.add_edge(kirk, bones) As you can see, it has 3 nodes and two edges: >>> enterprise.order() 3 >>> enterprise.size() 2 Now we induce a subgraph that includes spock and bones but not the captain: >>> new_mission = enterprise.induce_subgraph(spock, bones) And can see that it has two nodes- spock and bones- but no edges: >>> new_mission.order() 2 >>> new_mission.size() 0 """ g = type(self)() for node in nodes: node = self.get_element(node) name = node.name data = node.data n = g.add_node(name, **data) for edge in self.edges: if edge.start in g: if edge.end in g: name = edge.name start = edge.start.name end = edge.end.name is_directed = edge.is_directed data = edge.data g.add_edge(start, end, name, **data) return g def edge_induce_subgraph(self, *edges): """Similar to induce_subgraph but accepting edges rather than nodes.""" # create the new graph g = type(self)() for edge in edges: edge = self.get_element(edge) # and add them if they don't already exist if edge.start not in g: g.add_node(edge.start.name, **edge.start.data) if edge.end not in g: g.add_node(edge.end.name, **edge.end.data) # iterate over the provided edges for edge in edges: # and add them, translating nodes as we go g.add_edge(edge.start.name, edge.end.name, edge.name, **edge.data) return g ######################################################################### # Graph Comparison Tools # ######################################################################### def union(self, other): """Returns a new graph with all nodes and edges in either of its parents. Usage: >>> g1 = Graph() >>> g2 = Graph() >>> a = g1.add_node(1) >>> b = g1.add_node(3) >>> c = g1.add_node(5) >>> ab = g1.add_edge(a, b, 2) >>> bc = g1.add_edge(b, c, 4) >>> d = g2.add_node(3) >>> e = g2.add_node(5) >>> f = g2.add_node(7) >>> de = g2.add_edge(d, e, 4) >>> ef = g2.add_edge(e, f, 6) >>> g3 = g1 | g2 >>> [node.name for node in g3.nodes] [1, 3, 5, 7] >>> [edge.name for edge in g3.edges] [2, 4, 6] """ # create the graph g = type(self)() # add our nodes for node in chain(self.nodes, other.nodes): g.add_node(node.name, **node.data) # and for edges for edge in chain(self.edges, other.edges): g.add_edge(edge.start.name, edge.end.name, edge.name, edge.is_directed, **node.data) return g def intersection(self, other): """Returns a graph containing only the nodes and edges in both of its parents. Note that both endpoints must exist in the new graph for an edge to exist. Usage: >>> g1 = Graph() >>> g2 = Graph() >>> a = g1.add_node(1) >>> b = g1.add_node(3) >>> c = g1.add_node(5) >>> ab = g1.add_edge(a, b, 2) >>> bc = g1.add_edge(b, c, 4) >>> d = g2.add_node(3) >>> e = g2.add_node(5) >>> f = g2.add_node(7) >>> de = g2.add_edge(d, e, 4) >>> ef = g2.add_edge(e, f, 6) >>> g3 = g1 & g2 >>> [node.name for node in g3.nodes] [3, 5] >>> [edge.name for edge in g3.edges] [4] """ # create the graph g = type(self)() # iterate through our nodes for node in self.nodes: if node in other: name = node.name data = node.data g.add_node(name, **data) # and theirs for node in other.nodes: if node in self: name = node.name data = node.data g.add_node(name, **data) # ...and our edges... for edge in self.edges: if edge in other: name = edge.name start = edge.start end = edge.end if start in g and end in g: is_directed = edge.is_directed data = edge.data g.add_edge(start.name, end.name, name, is_directed, **data) # ...and theirs for edge in other.edges: if edge in self: name = edge.name start = edge.start end = edge.end if start in g and end in g: is_directed = edge.is_directed data = edge.data g.add_edge(start.name, end.name, name, is_directed, **data) return g def difference(self, other): """Return a graph composed of the nodes and edges not in the other. Usage: >>> g1 = Graph() >>> g2 = Graph() >>> a = g1.add_node(1) >>> b = g1.add_node(3) >>> c = g1.add_node(5) >>> ab = g1.add_edge(a, b, 2) >>> bc = g1.add_edge(b, c, 4) >>> d = g2.add_node(3) >>> e = g2.add_node(5) >>> f = g2.add_node(7) >>> de = g2.add_edge(d, e, 4) >>> ef = g2.add_edge(e, f, 6) >>> g3 = g1 & g2 >>> [node.name for node in g3.nodes] [1] >>> [edge.name for edge in g3.edges] [] """ # create the graph g = type(self)() # create all the equivalent nodes for node in self.nodes: if node not in other: g.add_node(node.name, **node.data) # create all the equivalent edges for edge in self.edges: if edge not in other: if edge.start in g and edge.end in g: g.add_edge(edge.start, edge.end, edge.name, **edge.data) return g def contains(self, other): """Tests to see if other is a subgraph of this graph. Comparison is based on names, and compares both nodes and edges. """ if set(self.nodes).issuperset(other.nodes): if set(self.edges).issuperset(other.edges): return True return False
PypiClean
/MetaCSV-0.1.1.tar.gz/MetaCSV-0.1.1/CONTRIBUTING.rst
============ Contributing ============ Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. You can contribute in many ways: Types of Contributions ---------------------- Report Bugs ~~~~~~~~~~~ Report bugs at https://github.com/delgadom/metacsv/issues. If you are reporting a bug, please include: * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. * Detailed steps to reproduce the bug. Fix Bugs ~~~~~~~~ Look through the GitHub issues for bugs. Anything tagged with "bug" is open to whoever wants to implement it. Implement Features ~~~~~~~~~~~~~~~~~~ Look through the GitHub issues for features. Anything tagged with "feature" is open to whoever wants to implement it. Write Documentation ~~~~~~~~~~~~~~~~~~~ MetaCSV could always use more documentation, whether as part of the official MetaCSV docs, in docstrings, or even on the web in blog posts, articles, and such. Submit Feedback ~~~~~~~~~~~~~~~ The best way to send feedback is to file an issue at https://github.com/delgadom/metacsv/issues. If you are proposing a feature: * Explain in detail how it would work. * Keep the scope as narrow as possible, to make it easier to implement. * Remember that this is a volunteer-driven project, and that contributions are welcome :) Get Started! ------------ Ready to contribute? Here's how to set up `metacsv` for local development. 1. Fork the `metacsv` repo on GitHub. 2. Clone your fork locally:: $ git clone git@github.com:your_name_here/metacsv.git 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: $ mkvirtualenv metacsv $ cd metacsv/ $ python setup.py develop 4. Create a branch for local development:: $ git checkout -b name-of-your-bugfix-or-feature Now you can make your changes locally. 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: $ flake8 metacsv tests $ python setup.py test $ tox To get flake8 and tox, just pip install them into your virtualenv. 6. Commit your changes and push your branch to GitHub:: $ git add . $ git commit -m "Your detailed description of your changes." $ git push origin name-of-your-bugfix-or-feature 7. Submit a pull request through the GitHub website. Pull Request Guidelines ----------------------- Before you submit a pull request, check that it meets these guidelines: 1. The pull request should include tests. 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the list in README.rst. 3. The pull request should work for Python 2.6, 2.7, and 3.3, and for PyPy. Check https://travis-ci.org/delgadom/metacsv/pull_requests and make sure that the tests pass for all supported Python versions. Tips ---- To run a subset of tests:: $ python -m unittest tests.test_metacsv
PypiClean
/FEADRE_AI-1.0.7.tar.gz/FEADRE_AI-1.0.7/FEADRE_AI/ai/match_loss/fmatch.py
import torch from FEADRE_AI.GLOBAL_LOG import flog from FEADRE_AI.ai.fits.olf.floss import f_bce_loss from FEADRE_AI.ai.object_detection.boxes.f_boxes import bbox_iou_v3, ltrb2xywh, xywh2ltrb from FEADRE_AI.ai.calc.f_calc_adv import f_cre_grid_cells from FEADRE_AI.ai.picture.f_show import f_show_od_ts4plt_v3 import numpy as np import torch.nn.functional as F @torch.no_grad() def match4yolox(gltrb_b, glabels_b, num_classes, pcls_b, p_xywh_input_b, p_ltrb_input_b, pconf_b, strides_match_input, img_ts=None, is_visual=False, center_radius=2.5): ''' 这里的计算只用于找GT simOTA :return: ''' ngt = len(glabels_b) dim = len(strides_match_input) p_xywh_input_3d = p_xywh_input_b.unsqueeze(0).repeat(ngt, 1, 1) # torch.Size([12, 8400, 4]) # torch.Size([12, 8400]) radius_ts_input_3d = (strides_match_input * center_radius).unsqueeze(0).repeat(ngt, 1) # glabels_b.unsqueeze(1).repeat(1, dim, 1) gltrb_i_b_3d = gltrb_b.unsqueeze(1).repeat(1, dim, 1) l = gltrb_i_b_3d[..., 0] t = gltrb_i_b_3d[..., 1] r = gltrb_i_b_3d[..., 2] b = gltrb_i_b_3d[..., 3] # --- 是否框内条件 --- torch.Size([12, 8400]) mask_col_lr = torch.logical_and(p_xywh_input_3d[..., 0] >= l, p_xywh_input_3d[..., 0] <= r) mask_row_tb = torch.logical_and(p_xywh_input_3d[..., 1] >= t, p_xywh_input_3d[..., 1] <= b) mask_in_gtboxes_1d = torch.logical_and(mask_col_lr, mask_row_tb).any(0) # flog.debug('mask_in_gtboxes %s', mask_in_gtboxes.sum()) # mask_2d = mask_in_gtboxes # (nn) --- 中心格子半径条件 --- gxywh_b_3d = ltrb2xywh(gltrb_i_b_3d) mask_radius_1d = torch.logical_and( torch.abs(p_xywh_input_3d[..., 0] - gxywh_b_3d[..., 0]) < radius_ts_input_3d, torch.abs(p_xywh_input_3d[..., 1] - gxywh_b_3d[..., 1]) < radius_ts_input_3d).any(0) # flog.debug('mask_radius %s', mask_radius.sum()) # 1d mask_in_radius_or = torch.logical_or(mask_in_gtboxes_1d, mask_radius_1d) # 或对小目标进行匹配 # num_pos_ = mask_in_radius_or.sum() # 满足的几乎必选 mask_in_radius_and = torch.logical_and(mask_in_gtboxes_1d, mask_radius_1d) # 所有的GT 与 侯选人 进行IOU iou = bbox_iou_v3(gltrb_b, p_ltrb_input_b, mode='iou', is_aligned=False) c_iou = -torch.log(iou + 1e-8) # 正例 cls 进行bce gcls4cost = F.one_hot(glabels_b.to(torch.int64) - 1, num_classes).float() \ .unsqueeze(1).repeat(1, dim, 1) _pcls_conf_sigmoid = pcls_b.sigmoid() * pconf_b.sigmoid().unsqueeze(-1) pcls_conf_sigmoid_ngt = _pcls_conf_sigmoid.unsqueeze(0).repeat(ngt, 1, 1) # 这个有 sum c_cls = f_bce_loss(pcls_conf_sigmoid_ngt.sqrt_(), gcls4cost, reduction='none').sum(-1) del pcls_conf_sigmoid_ngt del _pcls_conf_sigmoid # 使mask_in_radius的损失尽最小 优先选择框内和半径内的正例 (ngt,n侯造人) cost = c_cls + 3.0 * c_iou \ + 100000.0 * torch.logical_not(mask_in_radius_and).unsqueeze(0).repeat(ngt, 1) \ + 100000.0 * torch.logical_not(mask_in_radius_or).unsqueeze(0).repeat(ngt, 1) iou_top_val, _ = torch.topk(iou, 10, dim=-1) # ngt选取正例的个数 [ngt] 向下取整 dynamic_ks = torch.clamp(iou_top_val.sum(1).int(), min=1) mask_pos_b = torch.zeros_like(mask_in_radius_and, dtype=torch.bool) gcls_b_res = torch.zeros_like(pcls_b,dtype=torch.float) gltrb_b_res = torch.zeros_like(p_ltrb_input_b) # 还是有可能重复(一个框属于多个GT) 是否解决重复问题 for i in range(ngt): # largest 最小的 [nks个] _, index = torch.topk(cost[i], dynamic_ks[i], dim=-1, largest=False) mask_pos_b[index] = True gcls_b_res[index, glabels_b[i] - 1] = 1. * iou[i, index] gltrb_b_res[index] = gltrb_b[i] del c_cls, c_iou, cost, iou, if is_visual: flog.debug('dynamic_ks %s', dynamic_ks.sum()) flog.debug('num_pos %s\n', mask_pos_b.sum()) p_ltrb_input_pos = p_ltrb_input_b[mask_pos_b] f_show_od_ts4plt_v3(img_ts, g_ltrb=gltrb_b.cpu(), g_texts=glabels_b.cpu().tolist(), p_ltrb=p_ltrb_input_pos.cpu(), is_normal=True, ) return mask_pos_b, gcls_b_res, gltrb_b_res def decode_yolox(pt_xywh_b, grids_t): ''' 解码出来是特图 :param pt_xywh_b: 必须 c 是最后一维 :return: 输出原图归一化 ''' # 单一格子偏移 + 特图格子偏移 p_xy_t = pt_xywh_b[..., :2] + grids_t p_wh_t = pt_xywh_b[..., 2:].exp() p_xywh_t = torch.cat([p_xy_t, p_wh_t], -1) return p_xywh_t def match4atss(gltrb_i_b, anc_ltrb_i, nums_dim_t_list, num_atss_topk=9, glabels_b=None, img_ts=None, is_visual=False): ''' 这里默认都是 input 尺寸 且是batch 核心作用: 1. 使正例框数量都保持一致 保障小目标也能匹配到多个anc 2. 用最大一个iou的均值和标准差,计算阀值,用IOU阀值初选正样本 3. 确保anc中心点在gt框中 :param gltrb_i_b: :param anc_ltrb_i: :param anc_ltrb_i: :param nums_dim_t_list: [1600, 400, 100] 这个是每个特图对应的维度数 用于索引 如果只有一层可以优化掉 :param num_atss_topk: # 这个 topk = 初选个数 要 * 该层的anc数 :param glabels_b: 暂时没用 可视化 :param img_ts: 可视化 :param is_visual: 可视化 :return: mask_pos : [2100] 正例mask anc_max_iou: [2100] anc 对应的最大IOU值 g_pos_index: [2100] anc 对应GT的索引 ''' def _force_set(device, ious_ag, mask_pos4all, mask_pos4distances, num_atss_topk): # 强制以IOU匹配 num_atss_topk 个 [2100, ngt] -> [2100] -> [nnn] ^^ -> tuple([n_nogt]) indexes = torch.where(mask_pos4all.sum(0) == 0)[0] _mask = torch.zeros_like(ious_ag, dtype=torch.bool, device=device) for ii in indexes: # [2100, ngt] -> [2100,1] -> [2100] _mask_dis = mask_pos4distances[:, ii].squeeze(-1) # [2100,ngt] -> [nnnn] _iou_s = ious_ag[_mask_dis, ii] max_index = _iou_s.topk(num_atss_topk)[1] _m = torch.zeros_like(_iou_s, dtype=torch.bool) _m[max_index] = True _mask[_mask_dis, ii] = _m return _mask device = gltrb_i_b.DEVICE # 计算 iou anc_xywh_i = ltrb2xywh(anc_ltrb_i) # num_anc = anc_xywh_i.shape[0] # (anc 个,boxes 个) torch.Size([3, 10647]) ious_ag = bbox_iou_v3(anc_ltrb_i, gltrb_i_b) num_gt = gltrb_i_b.shape[0] # 正样本个数 # 全部ANC的距离 gxywh_i_b = ltrb2xywh(gltrb_i_b) # 中间点绝对距离 多维广播 (anc 个,boxes 个) torch.Size([32526, 7]) distances = (anc_xywh_i[:, None, :2] - gxywh_i_b[None, :, :2]).pow(2).sum(-1).sqrt() # 每层 anc 数是一致的 # num_atss_topk = 9 # 这个 topk = 初选个数 要 * 该层的anc数 idxs_candidate = [] # 这个用来保存最一层所匹配的最小距离anc的索引 每层9个 index_start = 0 # 这是每层的anc偏移值 for i, num_dim_feature in enumerate(nums_dim_t_list): # [24336, 6084, 1521, 441, 144] '''每一层的每一个GT选 topk * anc数''' index_end = index_start + num_dim_feature # 取出某层的所有anc距离 中间点绝对距离 (anc 个,boxes 个) torch.Size([32526, 7]) -> [nn, 7] distances_per_level = distances[index_start:index_end, :] # 确认该层的TOPK 不能超过该层总 anc 数 这里是一个数 topk = min(num_atss_topk, num_dim_feature) # 选 topk个最小的 每个gt对应对的anc的index torch.Size([24336, box_n])---(anc,gt) -> torch.Size([topk, 1]) _, topk_idxs_per_level = distances_per_level.topk(topk, dim=0, largest=False) # 只能在某一维top idxs_candidate.append(topk_idxs_per_level + index_start) index_start = index_end # 用于计算iou均值和方差 候选人,候补者;应试者 torch.Size([405, 1]) idxs_candidate = torch.cat(idxs_candidate, dim=0) '''--- 选出每层每个anc对应的距离中心最近topk iou值 ---''' # ***************这个是ids选择 这个是多维筛选 ious---[anc,ngt] [405, ngt] [0,1...ngt]-> [405,ngt] ious_candidate = ious_ag[idxs_candidate, torch.arange(num_gt)] # 这里是index 蒙板取数的方法 mask_pos4distances = torch.zeros_like(distances, device=device, dtype=torch.bool) # [2000,ngt] mask_pos4distances[idxs_candidate, torch.arange(idxs_candidate.shape[1])] = True '''--- 用最大一个iou的均值和标准差,计算阀值 ---''' # 统计每一个 GT的均值 std [ntopk,ngt] -> [ngt] 个 _iou_mean_per_gt = ious_candidate.mean(dim=0) # 除维 _iou_std_per_gt = ious_candidate.std(dim=0) _iou_thresh_per_gt = _iou_mean_per_gt + _iou_std_per_gt '''--- 用IOU阀值初选正样本 ---''' # torch.Size([32526, 1]) ^^ ([ngt] -> [1,ngt]) -> [32526,ngt] mask_pos4iou = ious_ag >= _iou_thresh_per_gt.unsqueeze(0) # 核心是这个选 '''--- 中心点需落在GT中间 需要选出 anc的中心点-gt的lt为正, gr的rb-anc的中心点为正 ---''' # torch.Size([32526, 1, 2]) dlt = anc_xywh_i[:, None, :2] - gltrb_i_b[None, :, :2] drb = gltrb_i_b[None, :, 2:] - anc_xywh_i[:, None, :2] # [32526, 1, 2] -> [32526, 1, 4] -> [32526, 1] mask_pos4in_gt = torch.all(torch.cat([dlt, drb], dim=-1) > 0.01, dim=-1) mask_pos4all = torch.logical_and(torch.logical_and(mask_pos4distances, mask_pos4iou), mask_pos4in_gt) '''--- 生成最终正例mask [32526, ngt] -> [32526] ---''' # 多个GT可能对应 不同的index 需要合并 msak_pos_1d = mask_pos4all.any(1) '''--- 确定anc匹配 一个锚框被多个真实框所选择,则其归于iou较高的真实框 ---''' # (anc 个,boxes 个) torch.Size([3, 10647]) anc_max_iou, g_index = ious_ag.max(dim=1) # 存的是 bboxs的index ''' 这里是强制代码 ''' if msak_pos_1d.sum() == 0 or (mask_pos4iou.sum(0) == 0).any() or (mask_pos4all.sum(0) == 0).any(): ''' msak_pos_1d 该图所有GT都没有匹配GT,概率较低 mask_pos4iou 该图存在的通过IOU高斯值 没有匹配到的GT mask_pos4all 最终IOU+框内条件 存在有没有匹配到的GT ''' # mask_pos4all mask_pos4iou mask_pos4distances # flog.debug('有问题 mask_pos4iou= %s, mask_pos4iou= %s, mask_pos4all= %s ' # % (mask_pos4distances.sum(0), mask_pos4iou.sum(0), mask_pos4all.sum(0))) # 强制修正 1阶段 选5个IOU最大的 再进行框内逻辑 _mask = _force_set(device, ious_ag, mask_pos4all, mask_pos4distances, 5) # 更新 两个mask mask_pos4all[_mask] = True mask_pos4all = torch.logical_and(mask_pos4all, mask_pos4in_gt) # 优先框内 # 1阶段未修复的 2阶段直接取两个IOU最大的匹配 if (mask_pos4all.sum(0) == 0).any(): # 二次修正 单修就很难再进来 # flog.error('二次修正,强制选一个 mask_pos4all= %s' % (mask_pos4all.sum(0))) _mask = _force_set(device, ious_ag, mask_pos4all, mask_pos4distances, 2) mask_pos4all[_mask] = True msak_pos_1d = mask_pos4all.any(1) # 修正强制IOU 对应关系 解决iou小 最终被强制修订 这里不能再用 anc_max_iou值 ious_ag[mask_pos4all] = 999 anc_max_iou, g_index = ious_ag.max(dim=1) # 存的是 bboxs的index # flog.debug('修正后匹配的GT mask_pos4all= %s ' % (mask_pos4all.sum(0))) # is_visual = True if is_visual or msak_pos_1d.sum() == 0 or (mask_pos4all.sum(0) == 0).any(): # 修正后还不行的进来 这里 flog.error('双重修正后不可能没有 mask_pos4iou= %s, mask_pos4all= %s ' % (mask_pos4iou.sum(0), mask_pos4all.sum(0))) from FEADRE_AI.ai.picture import f_show_od_ts4plt_v3 # *********** debug 可视 每层选9个匹配27(3*9)个正例可视化 ***************** # 多个gt 对应一个anc 进行转换显示 dim, ngt = mask_pos4distances.shape # [2100,4] -> [ngt,2100,4] anc_ltrb_i_pos = anc_ltrb_i.view(1, dim, 4).repeat(ngt, 1, 1)[mask_pos4distances.t()] f_show_od_ts4plt_v3(img_ts, g_ltrb=gltrb_i_b.cpu(), # p_ltrb=anc_ltrb_i[mask_pos_distances], p_ltrb=anc_ltrb_i_pos.cpu(), is_normal=True, ) # *********** 可视化IOU ***************** # mask_pos 已经进行max iou 筛选对应的GT anc_ltrb_i_pos = anc_ltrb_i.view(1, dim, 4).repeat(ngt, 1, 1)[mask_pos4iou.t()] f_show_od_ts4plt_v3(img_ts, g_ltrb=gltrb_i_b.cpu(), p_ltrb=anc_ltrb_i_pos.cpu(), is_normal=True, ) # *********** 可视化 多重过滤(IOU正态阀值,框内,已对应最大GT)后个正例可视化 ***************** # mask_pos 已经进行max iou 筛选对应的GT f_show_od_ts4plt_v3(img_ts, g_ltrb=gltrb_i_b.cpu(), p_ltrb=anc_ltrb_i[msak_pos_1d].cpu(), is_normal=True, ) return msak_pos_1d, anc_max_iou, g_index def decode4nanodet(anc_xy_t, p_tltrb_t, max_size_hw=None): ''' p -> g :param anc_xy_t: torch.Size([2100, 2]) :param p_tltrb_t: torch.Size([3, 2100, 4]) :param max_size_hw: 预测时使用 这个在归一化后使用 clamp是一样的 :return: ''' assert anc_xy_t.shape[-1] == 2, 'anc_xy_t 输入应为xy shape = %s' % anc_xy_t.shape # torch.Size([3, 2100]) x1 = anc_xy_t[..., 0] - p_tltrb_t[..., 0] y1 = anc_xy_t[..., 1] - p_tltrb_t[..., 1] x2 = anc_xy_t[..., 0] + p_tltrb_t[..., 2] y2 = anc_xy_t[..., 1] + p_tltrb_t[..., 3] if max_size_hw is not None: x1 = x1.clamp(min=0, max=max_size_hw[1]) y1 = y1.clamp(min=0, max=max_size_hw[0]) x2 = x2.clamp(min=0, max=max_size_hw[1]) y2 = y2.clamp(min=0, max=max_size_hw[0]) # torch.Size([3, 2100]) x4 torch.Size([3, 2100,4]) return torch.stack([x1, y1, x2, y2], -1) def encode4nanodet(anc_xy_t, g_ltrb_t, max_val, eps=0.1, is_debug=False): ''' 编码针对特图 :param anc_xy_t: torch.Size([2100, 2]) :param g_ltrb_t: torch.Size([3, 2100, 4]) :param max_val: 限制匹配的正例 最大距离 在0~7 之内 :param is_debug: 用于查看 GT 与 匹配的点 的ltrb距离是否会超过7 :param eps: :return: ''' left = anc_xy_t[:, 0] - g_ltrb_t[..., 0] top = anc_xy_t[:, 1] - g_ltrb_t[..., 1] right = g_ltrb_t[..., 2] - anc_xy_t[:, 0] bottom = g_ltrb_t[..., 3] - anc_xy_t[:, 1] g_tltrb_t = torch.stack([left, top, right, bottom], -1) if is_debug: # flog.debug('注意是正例 最大值应该在7以内 min=%f max=%f' % (g_tltrb_t.min(), g_tltrb_t.max())) pass if max_val is not None: g_tltrb_t = g_tltrb_t.clamp(min=0, max=max_val - eps) return g_tltrb_t def match_yolo1_od(g_ltrb_input_b, g_labels_b, size_wh_t_ts, device, cfg, img_ts_input): ''' 匹配 gyolo 如果需要计算IOU 需在这里生成 :param g_ltrb_input_b: ltrb :return: ''' num_gt = g_ltrb_input_b.shape[0] g_txywh_t, weights, indexs_colrow_t = encode_yolo1_od(g_ltrb_input_b, size_wh_t_ts, cfg) g_cls_b_ = torch.zeros((size_wh_t_ts[1], size_wh_t_ts[0], cfg.NUM_CLASSES), device=device) g_weight_b_ = torch.zeros((size_wh_t_ts[1], size_wh_t_ts[0], 1), device=device) g_txywh_t_b_ = torch.zeros((size_wh_t_ts[1], size_wh_t_ts[0], 4), device=device) labels_index = (g_labels_b - 1).long() indexs_colrow_t = indexs_colrow_t.long() # index需要long类型 for i in range(num_gt): g_cls_b_[indexs_colrow_t[i, 1], indexs_colrow_t[i, 0], labels_index[i]] = 1 # 构建 onehot g_weight_b_[indexs_colrow_t[i, 1], indexs_colrow_t[i, 0]] = weights[i] g_txywh_t_b_[indexs_colrow_t[i, 1], indexs_colrow_t[i, 0]] = g_txywh_t[i] g_cls_b_ = g_cls_b_.reshape(-1, cfg.NUM_CLASSES) g_weight_b_ = g_weight_b_.reshape(-1, 1) g_txywh_t_b_ = g_txywh_t_b_.reshape(-1, 4) '''可视化验证''' if cfg.IS_VISUAL: mask_pos_1d = (g_weight_b_ > 0).any(-1) # [169] # [2] -> [1,2] -> [ngt,2] sizes_wh_t_ts = size_wh_t_ts.unsqueeze(0).repeat(num_gt, 1) flog.debug('size_wh_t_ts = %s', size_wh_t_ts) flog.debug('g_txywh_t = %s', g_txywh_t) flog.debug('indexs_colrow_t = %s', indexs_colrow_t) flog.debug('对应index = %s', indexs_colrow_t[0, 1] * size_wh_t_ts[0] + indexs_colrow_t[0, 0]) flog.debug('对应index = %s', torch.where(g_weight_b_ > 0)) flog.debug('g_ltrb_input_b = %s', g_ltrb_input_b) grid_wh = [size_wh_t_ts[0].item(), size_wh_t_ts[1].item()] # 预测时需要sigmoid 这里的 g_txywh_t_b_ 已 sigmoid p_ltrb_t = decode_yolo1_od(p_txywh_t_sigmoidxy=g_txywh_t_b_, grid_xy=grid_wh) p_ltrb_t_pos = p_ltrb_t[mask_pos_1d] # 特图 -> input # img_wh_ts_x2 = torch.tensor(img_np.shape[:2][::-1], device=device).repeat(2) p_ltrb_input_pos = p_ltrb_t_pos * cfg.STRIDE flog.debug('p_ltrb_input_pos = %s', p_ltrb_input_pos) flog.debug(' ----------------------------------------- ') f_show_od_ts4plt_v3(img_ts=img_ts_input, g_ltrb=g_ltrb_input_b.cpu(), p_ltrb=p_ltrb_input_pos.cpu(), is_recover_size=False, is_normal=True, # 图形归一化恢复 grid_wh_np=size_wh_t_ts.cpu().numpy() ) return g_cls_b_, g_weight_b_, g_txywh_t_b_ def encode_yolo1_od(g_ltrb_input_b, size_wh_t_ts, cfg): # ltrb -> xywh 原图归一化 编码xy与yolo2一样的 g_xywh_input = ltrb2xywh(g_ltrb_input_b) g_xywh_t = g_xywh_input / cfg.STRIDE cxys_t = g_xywh_t[:, :2] whs_t = g_xywh_t[:, 2:] whs_one = whs_t / size_wh_t_ts # 转换到特图的格子中 indexs_colrow_t = cxys_t.floor() g_txy_t = cxys_t - indexs_colrow_t g_twh_t = whs_t.log() g_txywh_t = torch.cat([g_txy_t, g_twh_t], dim=-1) # 值在 1~2 之间 放大小的 weights = 2.0 - torch.prod(whs_one, dim=-1) return g_txywh_t, weights, indexs_colrow_t def decode_yolo1_od(p_txywh_t_sigmoidxy, grid_wh): ''' 解码出来是特图 :param p_txywh_t_sigmoidxy: 必须 c 是最后一维 :return: 输出原图归一化 ''' device = p_txywh_t_sigmoidxy.DEVICE # 单一格子偏移 + 特图格子偏移 p_xy_t = p_txywh_t_sigmoidxy[..., :2] \ + f_cre_grid_cells((grid_wh[1], grid_wh[0]), is_swap=True, num_repeat=1).to(device) p_wh_t = p_txywh_t_sigmoidxy[..., 2:].exp() p_xywh_t = torch.cat([p_xy_t, p_wh_t], -1) p_ltrb_t = xywh2ltrb(p_xywh_t) return p_ltrb_t def match_fcos4od(g_ltrb_input_b, g_labels_b, dim_total, cfg, img_ts=None, ): ''' 只计算半径正例 添加 g_ltrbs在最后 center_ness == 1? ''' device = g_ltrb_input_b.DEVICE g_xywh_input = ltrb2xywh(g_ltrb_input_b) # gt: cls centerness offltrb radius iou area keypoints*2 kpmask +ltrb _area_b_ = torch.empty(dim_total, device=device) # 这个用于记录当前图片 多个框的面积 _area_b_[:] = 999999999999. # 面积赋个极大值 float最大值 sys.maxsize int最大值 g_cls_b_ = torch.zeros((dim_total, cfg.NUM_CLASSES), device=device) # 全为背景置1 g_centerness_b_ = torch.zeros((dim_total, 1), device=device) g_tltrb_input_b_ = torch.zeros((dim_total, 4), device=device) positive_radius_b_ = torch.zeros((dim_total, 1), device=device) # 半径正例 g_ltrb_input_b_ = torch.zeros((dim_total, 4), device=device) # 4 # degbug 调试异常匹配 # if cfg.tcfg_temp2 in [39, 57, 206, 226, 239]: # f_show_od_ts4plt_v3(img_ts, gboxes_ltrb=g_ltrb_input_b, is_normal=True) # 长宽比过大 导致阀值过大 匹配不了大特图, 小特图又没有点 # print('123') # pass # 遍历每一个标签, 的每一层的格子 找出格子是否在预测框中, 并记录差异 for i in range(len(g_labels_b)): l = g_ltrb_input_b[i, 0] t = g_ltrb_input_b[i, 1] r = g_ltrb_input_b[i, 2] b = g_ltrb_input_b[i, 3] area_gt_input = torch.prod(g_xywh_input[i][2:]) # 连乘 index_colrow_input = [] scale_thresholds = [] radius = [] # 特图 -> input图 这个可以复用 for j, s in enumerate(cfg.STRIDES): # 每层网格对应特图的 网格点 _grids = f_mershgrid(fix=cfg.t_grids_hw[j][0], col=cfg.t_grids_hw[j][1], is_no_swap=False).to(device) _grids = _grids * s + s // 2 # dim, _ = _grids.shape index_colrow_input.append(_grids) _scale = torch.empty_like(_grids, device=device) _scale[:, 0] = cfg.SCALE_THRESHOLDS[j] _scale[:, 1] = cfg.SCALE_THRESHOLDS[j + 1] scale_thresholds.append(_scale) # [nn] _radius = torch.empty_like(_grids[:, 0], device=device) _radius[:] = cfg.MATCH_RADIUS * s # 半径阀值 radius.append(_radius) # _start_indexes = [] # _s = 0 # for _i in range(5): # _s += len(index_colrow_input[_i]) # _start_indexes.append(_s) # (nn,2) index_colrow_input = torch.cat(index_colrow_input, 0) scale_thresholds = torch.cat(scale_thresholds, 0) radius = torch.cat(radius, 0) # [nn] # 这个根据 cfg.MATCH_RADIUS 算出来的 每层固定值 # (nn) # --- 是否框内条件 --- mask_col_lr = torch.logical_and(index_colrow_input[:, 0] >= l, index_colrow_input[:, 0] <= r) mask_row_tb = torch.logical_and(index_colrow_input[:, 1] >= t, index_colrow_input[:, 1] <= b) mask_in_gtboxes = torch.logical_and(mask_col_lr, mask_row_tb) # (nn) --- 中心格子半径条件 --- # 网格点在右 到中心的距离 off_rtxy_input = index_colrow_input - g_xywh_input[i, :2].unsqueeze(0) # 网格点在左 到中心的距离 off_ltxy_input = g_xywh_input[i, :2].unsqueeze(0) - index_colrow_input off_lrtxy_input = torch.cat([off_rtxy_input, off_ltxy_input], -1) off_lrtxy_input_max, _ = torch.max(off_lrtxy_input, -1) mask_radius = off_lrtxy_input_max < radius # 是等价的 # mask_radius = torch.logical_and(torch.abs(index_colrow_input[:, 0] - g_xywh_input[i, 0]) < radius, # torch.abs(index_colrow_input[:, 1] - g_xywh_input[i, 1]) < radius) ''' (nn,2) 这是匹配的结果 每层对应特图网格到 input ltrb的距离 lt框内全为正 rb框内全为正 ''' # torch.Size([3614, 2]) ^^(1,2) = off_lt_input = index_colrow_input - g_ltrb_input_b[i, :2].unsqueeze(0) # 恢复权重为-1 off_rb_input = g_ltrb_input_b[i, 2:].unsqueeze(0) - index_colrow_input # 恢复权重为1 # 后续通过IOU计算LOSS 这里不进行匹配 用于计算最大边,确定匹配到哪一层 和centerness off_ltrb_input = torch.cat([off_lt_input, off_rb_input], -1) # (nn,4) off_ltrb_input_max, _ = torch.max(off_ltrb_input, -1) # (nn,4) -> (nn) # 这个也是框内正例 # off_ltrb_input_min, _ = torch.min(off_ltrb_input, -1) # (nn,4) -> (nn) # mask_in_gtboxes1 = off_ltrb_input_min > 0 # [340, 2] -> # (nn) # --- 层阀值条件 --- mask_in_ceng = torch.logical_and(off_ltrb_input_max > scale_thresholds[:, 0], off_ltrb_input_max <= scale_thresholds[:, 1]) # --- 面积条件 小于的选中 --- mask_area = _area_b_ > area_gt_input # 框内正例 mask_kuang = torch.logical_and(torch.logical_and(mask_in_gtboxes, mask_in_ceng), mask_area) # 半径正例 mask = torch.logical_and(mask_kuang, mask_radius) # (nn) -> (nn,1) lr_min = torch.min(off_ltrb_input[:, ::2], -1)[0] lr_max = torch.max(off_ltrb_input[:, ::2], -1)[0] tb_min = torch.min(off_ltrb_input[:, 1::2], -1)[0] tb_max = torch.max(off_ltrb_input[:, 1::2], -1)[0] # center_ness = torch.sqrt(lr_min / lr_max * tb_min / tb_max) # sqrt不能为0 这里可以不用 clamp center_ness = torch.sqrt((lr_min * tb_min) / (lr_max * tb_max)) # center_ness[center_ness.isinf()] = 1. center_ness[center_ness.isnan()] = 0. center_ness.unsqueeze_(-1) g_cls_b_[mask, g_labels_b[i].long() - 1] = 1. # 这个需要保留以前的值 本次只复写需要的 g_centerness_b_[mask_kuang] = center_ness[mask_kuang] # 这个大于0 与 positive_radius_b_ 等价 positive_radius_b_[mask] = 1 _area_b_[mask] = area_gt_input # 面积更新 g_tltrb_input_b_[mask] = off_ltrb_input[mask] g_ltrb_input_b_[mask] = g_ltrb_input_b[i] # (g_centerness_b_ > 0.).sum().cpu().item()==(g_cls_b_ == 1).sum().cpu().item() # debug # if (g_centerness_b_ > 0.).sum().cpu().item() == 0: # print(cfg.tcfg_temp2) # cfg.IS_VISUAL = True # cfg.tcfg_temp2 += 1 if cfg.IS_VISUAL: cfg.IS_VISUAL = False ''' 可视化匹配最大的ANC ''' import matplotlib.pyplot as plt from f_tools.pic.enhance.f_data_pretreatment4np import f_recover_normalization4ts _img_ts = img_ts.clone() _img_ts = f_recover_normalization4ts(_img_ts) img_np = _img_ts.cpu().numpy().astype(np.uint8).transpose((1, 2, 0)) img_np = cv2.cvtColor(img_np, cv2.COLOR_RGB2BGR) # f_show_od_np4plt_v2(img_np, gboxes_ltrb=g_ltrb_input_b) # plt.imshow(img_np) # plt.show() # 生成随机颜色 # COLOR_RANDOM_CV = f_random_color4cv(cfg.NUM_CLASSES) print('框内 %s 半径 %s' % ((g_centerness_b_ > 0.).sum().cpu().item(), (g_cls_b_ == 1).sum().cpu().item())) start_index = 0 num_gt = 0 for j in range(len(cfg.STRIDES)): # 遍历每层的 row col for row in range(cfg.t_grids_hw[j][0]): for col in range(cfg.t_grids_hw[j][1]): # 网络在原图的坐标 x = col * cfg.STRIDES[j] + cfg.STRIDES[j] // 2 y = row * cfg.STRIDES[j] + cfg.STRIDES[j] // 2 # start_index+(row* w +col) index = (row * cfg.t_grids_hw[j][1] + col) + start_index # if g_centerness_b_[index] > 0.: # 这个是框内 if (g_cls_b_[index] == 1).any(): # 这个半径正例 # 正例 off_l, off_t, off_r, off_b = g_tltrb_input_b_[index] # 网络位置 求GT的位置 xmin = int(x - off_l) ymin = int(y - off_t) xmax = int(x + off_r) ymax = int(y + off_b) gcls = np.argmax(g_cls_b_[index, :].cpu(), axis=-1) mess = '%s' % (int(gcls)) # 这个是网格点 这个是用CV 画 np cv画参考 cv2.circle(img_np, (int(x), int(y)), 5, COLOR_CV['green'], -1) cv2.rectangle(img_np, (xmin, ymin), (xmax, ymax), (0, 0, 255), 2) cv2.rectangle(img_np, (int(xmin), int(abs(ymin) - 15)), (int(xmin + (xmax - xmin) * 0.55), int(ymin)), (255, 0, 0), -1) cv2.putText(img_np, mess, (int(xmin), int(ymin)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 2) num_gt += 1 start_index += (cfg.t_grids_hw[j][0] * cfg.t_grids_hw[j][1]) cv2.putText(img_np, 'num_gt=' + str(num_gt), (0, 50), cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.5, color=(0, 255, 0), thickness=2) f_show_od_np4plt_v2(img_np) # 这个显示 bgr 有色差 # cv2.imshow('image', img_np) # cv2.waitKey(0) return g_cls_b_, g_centerness_b_, g_tltrb_input_b_, positive_radius_b_, g_ltrb_input_b_ def decode_fcos4box(cfg, p_tltrb_input, toone_wh_ts_input, is_to_one=False): ''' size_toone_wh_ts : torch.Size([batch, 2]) ''' device = p_tltrb_input.DEVICE weight = torch.tensor([-1, -1, 1, 1], device=device).view(1, 1, -1) index_colrow = [] # 这个是表示对应特图上的点, 感受野中心点 # start_index = 0 for j, s in enumerate(cfg.STRIDES): # 这里将特图网格转换到input网格 _grids = f_mershgrid(fix=cfg.t_grids_hw[j][0], col=cfg.t_grids_hw[j][1], is_no_swap=False).to(device) _grids = _grids * s + s // 2 # 每层对应的特图感受野块 index_colrow.append(_grids) # start_index += (grid_wh[1] * grid_wh[0]) index_colrow = torch.cat(index_colrow, 0) index_colrow_x2 = index_colrow.repeat(1, 2).unsqueeze(0) index_colrow_x5 = index_colrow.repeat(1, cfg.NUM_KEYPOINTS).unsqueeze(0) p_ltrb_input = p_tltrb_input * weight + index_colrow_x2 # 得到真实 ltrb if is_to_one: # 是否特图尺寸 默认否为运行进行 归一化 p_ltrb_one = p_ltrb_input / toone_wh_ts_input.repeat(1, 2).unsqueeze(1) # 归一化尺寸 return p_ltrb_one return p_ltrb_input
PypiClean
/HMOBSTER-0.0.44.tar.gz/HMOBSTER-0.0.44/mobster/plotting_mobster.py
import torch from scipy.stats import beta, pareto, moyal import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt import matplotlib.patches as mpatches import numpy as np import mobster.utils_mobster as mut def plot_results(data, inf_res, bins=50, output = "results.png",fig_height = 4, fig_width = 3, drivers = None): all_params = inf_res["model_parameters"] tail = inf_res['run_parameters']['tail'] K = inf_res['run_parameters']['K'] multi_tail = inf_res['run_parameters']['multi_tail'] subclonal_prior = inf_res['run_parameters']['subclonal_prior'] truncated_pareto = inf_res['run_parameters']['truncated_pareto'] nKar = len(all_params) plt.rcParams["figure.figsize"] = (fig_height * nKar, fig_width * nKar) fig, axs = plt.subplots(nKar) karyos = list(data.keys()) theoretical_num_clones = [mut.theo_clonal_num(kr, range = False) for kr in karyos] theo_clonal_means = [torch.min(mut.theo_clonal_num(kr)) for kr in karyos] for i, kr in enumerate(data): params = all_params[kr] assignment_probs = params["mixture_probs"] data_mut = data[kr].detach().numpy() data_mut = data_mut[:,0] / data_mut[:,1] if nKar == 1: axs.hist(data_mut, bins=bins, density=True, alpha=0.48) axs.title.set_text("Karyotype = " + kr) else: axs[i].hist(data_mut, bins=bins, density=True, alpha=0.48) axs[i].title.set_text("Karyotype = " + kr) for j in range(tail, params["beta_concentration1"].shape[0] + tail): cl = "tab:green" a = params["beta_concentration1"][j - tail] b = params["beta_concentration2"][j - tail] x = np.linspace(0.05, 1, 1000) tot_p = np.zeros_like(x) p = beta.pdf(x, a, b) * assignment_probs[j] tot_p += p if nKar == 1: axs.plot(x, p, linewidth=1.5, color=cl) else: axs[i].plot(x, p, linewidth=1.5, color=cl) if tail == 1: if K > 0 and truncated_pareto and multi_tail: for w in range(K + 1): alpha = params["tail_shape"] nall = mut.theo_clonal_tot(kr) x = np.linspace(0.05, 1, 1000) p = pareto.pdf(x, alpha * nall, scale=params["tail_scale"]) p[p < params["tail_higher"][w]] = 0 p = p / np.trapz(p,x) p *= assignment_probs[0] * params["multi_tail_weights"][w] tot_p += p if nKar == 1: axs.plot(x, p, linewidth=1.5, color="tab:pink") else: axs[i].plot(x, p, linewidth=1.5, color="tab:pink") else: alpha = params["tail_shape"] nall = mut.theo_clonal_tot(kr) x = np.linspace(0.05, 1, 1000) p = pareto.pdf(x, alpha * nall, scale=params["tail_scale"]) * assignment_probs[0] if truncated_pareto: p[x > params["tail_higher"]] = 0 p = p / np.trapz(p,x) tot_p += p if nKar == 1: axs.plot(x, p, linewidth=1.5, color="tab:pink") else: axs[i].plot(x, p, linewidth=1.5, color="tab:pink") if drivers is not None: drivers_mut = data_mut[drivers[kr]] if(len(drivers_mut) > 0): if nKar == 1: p = axs.patches heights = [patch.get_height() for patch in p] axs.vlines(drivers_mut, 0, np.max(heights)) else: p = axs[i].patches heights = [patch.get_height() for patch in p] axs[i].vlines(drivers_mut, 0, np.max(heights)) for z in range(K): x = np.linspace(0.05, 1, 1000) if subclonal_prior == "Moyal": p = moyal.pdf(x, params["loc_subclones"][z], params["scale_subclonal"][z]) p[x < np.min(data_mut).item()] = 0 p[x > theo_clonal_means[i].item()] = 0 p = p / np.trapz(p, x) p *= assignment_probs[(z + tail + theoretical_num_clones[i])] tot_p += p else: p = beta.pdf(x, params["loc_subclones"][z] * params["n_trials_subclonal"][z], (1 - params["loc_subclones"][z]) * params["n_trials_subclonal"][z]) * \ assignment_probs[(z + tail + theoretical_num_clones[i])] tot_p += p if nKar == 1: axs.plot(x, p, linewidth=1.5, color="tab:red") else: axs[i].plot(x, p, linewidth=1.5, color="tab:red") if nKar == 1: axs.plot(x, tot_p, linewidth=1., linestyle = "--") else: axs[i].plot(x, tot_p, linewidth=1., linestyle = "--") pink_patch = mpatches.Patch(color='tab:pink', label='Tail') red_patch = mpatches.Patch(color='tab:red', label='Subclonal') green_patch = mpatches.Patch(color='tab:green', label='Clonal') plt.legend(handles=[pink_patch, red_patch, green_patch]) plt.savefig(output, dpi=300) plt.clf()
PypiClean
/CellProfiler-4.2.6.tar.gz/CellProfiler-4.2.6/cellprofiler/gui/module_view/_module_sizer.py
import logging import wx class ModuleSizer(wx.Sizer): """The module sizer uses the maximum best width of the setting edit controls to compute the column widths, then it sets the text controls to wrap within the remaining space, then it uses the best height of each text control to lay out the rows. """ def __init__(self, rows, cols=2): super(ModuleSizer, self).__init__() self.__rows = rows self.__cols = cols self.__min_text_width = 150 self.__height_padding = 5 self.__printed_exception = False self.__items = [] def get_item(self, i, j): if len(self.__items) <= j or len(self.__items[j]) <= i: return None return self.__items[j][i] def Reset(self, rows, cols=3, destroy_windows=True): if destroy_windows: windows = [] for j in range(self.__rows): for i in range(self.__cols): item = self.get_item(i, j) if item is None: continue if item.IsWindow(): window = item.GetWindow() if isinstance(window, wx.Window): windows.append(window) for window in windows: window.Hide() window.Destroy() self.Clear(False) self.__rows = rows self.__cols = cols self.__items = [] def Add(self, control, *args, **kwargs): if len(self.__items) == 0 or len(self.__items[-1]) == self.__cols: self.__items.append([]) item = super(ModuleSizer, self).Add(control, *args, **kwargs) self.__items[-1].append(item) return item def CalcMin(self): """Calculate the minimum from the edit controls. Returns a wx.Size where the height is the total height of the grid and the width is self.__min_text_width plus the widths of the edit controls and help controls. """ try: if ( self.__rows * self.__cols == 0 or self.Children is None or len(self.Children) == 0 ): return wx.Size(0, 0) height = self.__height_padding for j in range(0, self.__rows): borders = [ self.get_item(col, j).GetBorder() for col in range(2) if self.get_item(col, j) is not None ] if len(borders) == 0: height += 10 else: height_border = max(borders) height += self.get_row_height(j) + 2 * height_border height += self.__height_padding self.__printed_exception = False return wx.Size( self.calc_edit_size()[0] + self.__min_text_width + self.calc_help_size()[0], height, ) except: # This happens, hopefully transiently, on the Mac if not self.__printed_exception: logging.error("WX internal error detected", exc_info=True) self.__printed_exception = True return wx.Size(0, 0) def get_row_height(self, j): height = 0 for i in range(self.__cols): item = self.get_item(i, j) if item is None: continue if item.IsWindow() and isinstance(item.GetWindow(), wx.StaticLine): height = max(height, item.CalcMin()[1] * 1.25) else: height = max(height, item.CalcMin()[1]) return height def calc_column_size(self, j): """Return a wx.Size with the total height of the controls in column j and the maximum of their widths. """ height = 0 width = 0 for i in range(self.__rows): item = self.get_item(j, i) if item is None: continue size = item.CalcMin() height += size[1] width = max(width, size[0]) return wx.Size(width, height) def calc_help_size(self): return self.calc_column_size(2) def calc_edit_size(self): return self.calc_column_size(1) def calc_max_text_width(self): width = self.__min_text_width for i in range(self.__rows): item = self.get_item(0, i) if item is None: continue control = item.GetWindow() assert isinstance(control, wx.StaticText), ( "Control at column 0, " "%d of grid is not StaticText: %s" % (i, str(control)) ) text = control.GetLabel().replace("\n", " ") ctrl_width = control.GetFullTextExtent(text)[0] + 2 * item.GetBorder() width = max(width, ctrl_width) return width def RecalcSizes(self): """Recalculate the sizes of our items, resizing the text boxes as we go. """ if self.__rows * self.__cols == 0: return try: size = self.GetSize() width = size[0] - 20 edit_width = self.calc_edit_size()[0] help_width = self.calc_help_size()[0] max_text_width = self.calc_max_text_width() if edit_width + help_width + max_text_width < width: edit_width = width - max_text_width - help_width elif edit_width * 4 < width: edit_width = width / 4 text_width = max([width - edit_width - help_width, self.__min_text_width]) widths = [text_width, edit_width, help_width] # # Change all static text controls to wrap at the text width. Then # ask the items how high they are and do the layout of the line. # height = self.__height_padding panel = self.GetContainingWindow() for i in range(self.__rows): text_item = self.get_item(0, i) edit_item = self.get_item(1, i) if edit_item is None: continue inner_text_width = text_width - 2 * text_item.GetBorder() control = text_item.GetWindow() assert isinstance(control, wx.StaticText), ( "Control at column 0, %d of grid is not StaticText: %s" % (i, str(control)) ) text = control.GetLabel() edit_control = edit_item.GetWindow() height_border = max([x.GetBorder() for x in (edit_item, text_item)]) if isinstance(edit_control, wx.StaticLine) and len(text) == 0: # # A line spans both columns # text_item.Show(False) # make the divider height the same as a text row plus some item_height = self.get_row_height(i) assert isinstance(edit_item, wx.SizerItem) border = edit_item.GetBorder() third_width = (text_width + edit_width - 2 * border) / 3 item_location = wx.Point( text_width - third_width / 2, height + border + item_height / 2 ) item_size = wx.Size(third_width, edit_item.GetSize()[1]) item_location = panel.CalcScrolledPosition(item_location) edit_item.SetDimension(item_location, item_size) else: text_item.Show(True) if text_width > self.__min_text_width and ( text.find("\n") != -1 or control.GetFullTextExtent(text)[0] > inner_text_width ): text = text.replace("\n", " ") control.SetLabel(text) control.Wrap(inner_text_width) row_height = self.get_row_height(i) for j in range(self.__cols): item = self.get_item(j, i) item_x = sum(widths[0:j]) item_y = height if (item.Flag & wx.EXPAND) == 0: item_size = item.CalcMin() if item.Flag & wx.ALIGN_CENTER_VERTICAL: item_y = height + (row_height - item_size[1]) / 2 if item.Flag & wx.ALIGN_CENTER_HORIZONTAL: item_x += (widths[j] - item_size[0]) / 2 elif item.Flag & wx.ALIGN_RIGHT: item_x += widths[j] - item_size[0] else: item_size = wx.Size(widths[j], item.CalcMin()[1]) item_location = wx.Point(item_x, item_y) item_location = panel.CalcScrolledPosition(item_location) item.SetDimension(item_location, item_size) height += self.get_row_height(i) + 2 * height_border except: # This happens, hopefully transiently, on the Mac if not self.__printed_exception: logging.warning("Detected WX error", exc_info=True) self.__printed_exception = True
PypiClean
/FastTextRank-1.4.zip/FastTextRank-1.4/README.rst
# FastTextRank Extract abstracts and keywords from Chinese text, use *optimized iterative algorithms* to improve running **speed**, and *selectively use word vectors* to improve **accuracy**. ## PageRank PageRank is a website page ranking algorithm from Google.<br/> PageRank was originally used to calculate the importance of web pages. The entire www can be seen as a directed graph, and the node is a web page.<br/> This algorithm can caculate all node's importance by their connections.<br/> * My algorithm changed the iterative algorithm to make the algorithm much faster, it costs 10ms per article, on the mean while TextRank4ZH costs 80ms on my data.<br/> * My algorithm also use word2vec to make the abstract more accurate, but it will cost more time to run the algorithm. Using word2vec costs 40ms per article on the same traning data. ## W2VTextRank4Sentence ### Introduction 1. Cut article into sentence 2. Calculate similarity between sentences: * Using word vectors' cosine similarity * Using two sentences' common words 3. Build a graph by sentences' similarity 4. Caculate the importance of each sentence by improved iterative algorithm 5. Get the abstract ### API * use_stopword: boolean, default True * stop_words_file: str, default None. The stop words file you want to use. If it is None, you will use this package's stop words. * use_w2v: boolean, default False If it is True, you must input passing dict_path parameter. * dict_path: str, default None. * max_iter:maximum iteration round * tol: maximum tolerance error ## W2VTextRank4Word ### Introduction 1. Cut artile into word 2. Calculate similarity between word: If two words are all in window distance, then the graph's side of this two word add 1.0. Window is set by user. 3. Build a graph by word' similarity 4. Caculate the importance of each word by improved iterative algorithm 5. Get the key word ### API * use_stopword=boolean, default True * stop_words_file=str, default None. The stop words file you want to use. If it is None, you will use this package's stop words. * max_iter=maximum iteration round * tol=maximum tolerance error * window=int, default 2 The window to determine if two words are related
PypiClean
/CADET-Process-0.7.3.tar.gz/CADET-Process-0.7.3/examples/load_wash_elute/lwe_concentration.py
# %% [markdown] # (lwe_example_concentration)= # # Concentration Gradients # # ```{figure} ./figures/flow_sheet_concentration.svg # Flow sheet for load-wash-elute process using a single inlet. # ``` # %% import numpy as np from CADETProcess.processModel import ComponentSystem from CADETProcess.processModel import StericMassAction from CADETProcess.processModel import Inlet, GeneralRateModel, Outlet from CADETProcess.processModel import FlowSheet from CADETProcess.processModel import Process # Component System component_system = ComponentSystem() component_system.add_component('Salt') component_system.add_component('A') component_system.add_component('B') component_system.add_component('C') # Binding Model binding_model = StericMassAction(component_system, name='SMA') binding_model.is_kinetic = True binding_model.adsorption_rate = [0.0, 35.5, 1.59, 7.7] binding_model.desorption_rate = [0.0, 1000, 1000, 1000] binding_model.characteristic_charge = [0.0, 4.7, 5.29, 3.7] binding_model.steric_factor = [0.0, 11.83, 10.6, 10] binding_model.capacity = 1200.0 # Unit Operations inlet = Inlet(component_system, name='inlet') inlet.flow_rate = 6.683738370512285e-8 column = GeneralRateModel(component_system, name='column') column.binding_model = binding_model column.length = 0.014 column.diameter = 0.02 column.bed_porosity = 0.37 column.particle_radius = 4.5e-5 column.particle_porosity = 0.75 column.axial_dispersion = 5.75e-8 column.film_diffusion = column.n_comp*[6.9e-6] column.pore_diffusion = [7e-10, 6.07e-11, 6.07e-11, 6.07e-11] column.surface_diffusion = column.n_bound_states*[0.0] column.c = [50, 0, 0, 0] column.cp = [50, 0, 0, 0] column.q = [binding_model.capacity, 0, 0, 0] outlet = Outlet(component_system, name='outlet') # Flow Sheet flow_sheet = FlowSheet(component_system) flow_sheet.add_unit(inlet) flow_sheet.add_unit(column) flow_sheet.add_unit(outlet, product_outlet=True) flow_sheet.add_connection(inlet, column) flow_sheet.add_connection(column, outlet) # %% [markdown] # ```{figure} ./figures/events_concentration.svg # Events of load-wash-elute process using a single inlet and modifying its concentration. # ``` # %% # Process process = Process(flow_sheet, 'lwe') process.cycle_time = 2000.0 load_duration = 9 t_gradient_start = 90.0 gradient_duration = process.cycle_time - t_gradient_start c_load = np.array([50.0, 1.0, 1.0, 1.0]) c_wash = np.array([50.0, 0.0, 0.0, 0.0]) c_elute = np.array([500.0, 0.0, 0.0, 0.0]) gradient_slope = (c_elute - c_wash)/gradient_duration c_gradient_poly = np.array(list(zip(c_wash, gradient_slope))) process.add_event('load', 'flow_sheet.inlet.c', c_load) process.add_event('wash', 'flow_sheet.inlet.c', c_wash, load_duration) process.add_event('grad_start', 'flow_sheet.inlet.c', c_gradient_poly, t_gradient_start) # %% if __name__ == '__main__': from CADETProcess.simulator import Cadet process_simulator = Cadet() simulation_results = process_simulator.simulate(process) from CADETProcess.plotting import SecondaryAxis sec = SecondaryAxis() sec.components = ['Salt'] sec.y_label = '$c_{salt}$' simulation_results.solution.column.outlet.plot(secondary_axis=sec)
PypiClean
/INGInious-0.8.7.tar.gz/INGInious-0.8.7/inginious/frontend/static/js/task.js
"use strict"; function init_task_page(evaluate) { evaluatedSubmission = evaluate; //Init the task form, if we are on the task submission page var task_form = $('form#task'); task_form.on('submit', function() { submitTask(false); return false; }); //Init the button that start a remote ssh server for debugging $('form#task #task-submit-debug').on('click', function() { submitTask(true); }); //if INGInious tells us to wait for another submission //this takes precedence over the link in the URL, in order to be consistent. if(task_form.attr("data-wait-submission")) { loadOldSubmissionInput(task_form.attr("data-wait-submission"), false); waitForSubmission(task_form.attr("data-wait-submission")); } else { // Check if the page link contains a submission id to load, if needed try { // the class URLSearchParams may not exist in older browsers... var loadFromURL = (new URLSearchParams(document.location.search.substring(1))).get("load"); if(loadFromURL !== null) loadOldSubmissionInput(loadFromURL, true); } catch(error) { console.error(error); } } $('.submission').each(function() { $(this).on('click', clickOnSubmission); }); // Allows to close cards $(document).on('click', '[data-dismiss="card"]', function(event) {event.target.closest('.card').remove()}); } var evaluatedSubmission = 'best'; //True if loading something var loadingSomething = false; //Blur task form function blurTaskForm() { $.each(codeEditors, function(idx, editor) { editor.setOption("readOnly", true); }); var task_form = $('form#task'); $("input, button", task_form).attr("disabled", "disabled").addClass('form-blur'); //task_form.addClass('form-blur'); loadingSomething = true; } function unblurTaskForm() { $.each(codeEditors, function(idx, editor) { editor.setOption("readOnly", false); }); var task_form = $('form#task'); $("input, button", task_form).removeAttr("disabled").removeClass('form-blur'); //task_form.removeClass('form-blur'); loadingSomething = false; } //Reset all alerts function resetAlerts() { $('#task_alert').html(''); $('.task_alert_problem').html(''); } //Increment tries count function incrementTries() { var ttries = $('#task_tries'); ttries.text(parseInt(ttries.text()) + 1); } //Update task status function updateTaskStatus(newStatus, grade) { var task_status = $('#task_status'); var task_grade = $('#task_grade'); task_status.html(newStatus); task_grade.text(grade); } //Creates a new submission (right column) function displayNewSubmission(id) { var submissions = $('#submissions'); submissions.find('.submission-empty').remove(); var submission_link = jQuery('<li/>', { class: "submission list-group-item list-group-item-warning", "data-submission-id": id }).on('click', clickOnSubmission); jQuery('<span id="txt"/>', {}).text(getDateTime()).appendTo(submission_link); //If there exists tags, we add a badge with '0' in the new submission. if($('span', $('#main_tag_group')).length > 0){ submission_link.append('<span class="badge alert-info" id="tag_counter" >0</span>'); } submissions.prepend(submission_link); $("body").tooltip({ selector: '[data-toggle="tooltip"]' }); } function removeSubmission(id) { var item; $('#submissions').find('.submission').each(function() { if($(this).attr('data-submission-id').trim() == id) item = $(this) }); item.remove(); } //Updates a loading submission function updateSubmission(id, result, grade, tags) { grade = grade || "0.0"; var nclass = ""; if(result == "success") nclass = "list-group-item-success"; else if(result == "save") nclass = "list-group-item-save"; else nclass = "list-group-item-danger"; $('#submissions').find('.submission').each(function() { if($(this).attr('data-submission-id').trim() == id) { $(this).removeClass('list-group-item-warning').addClass(nclass); var date = $(this).find("span[id='txt']"); date.text(date.text() + " - " + grade + "%"); //update the badge updateTagsToNewSubmission($(this), tags); } }); } // Change the evaluated submission displayed function displayEvaluatedSubmission(id, fade) { var item; $('#submissions').find('.submission').each(function() { if($(this).attr('data-submission-id').trim() == id) item = $(this) }); // LTI does not support selecting a specific submission for evaluation if($("#my_submission").length) { var text = item.find("span[id='txt']").html(); var submission_link = jQuery('<a/>', { href: "#", id: "my_submission", class: "submission list-group-item list-group-item-action list-group-item-info", "data-submission-id": id }).on('click', clickOnSubmission); jQuery('<i/>', {class: "fa fa-chevron-right fa-fw"}).appendTo(submission_link).after("&nbsp;"); submission_link.append(text); if (fade) { $("#my_submission").fadeOut(function () { $(this).replaceWith(submission_link.fadeIn().removeAttr('style')); }); } else { $("#my_submission").replaceWith(submission_link); } $("#share_my_submission").removeClass("hidden"); } updateTaskStatus(item.hasClass("list-group-item-success") ? "Succeeded" : "Failed", parseFloat(item.text().split("-")[1])); } //Submission's click handler function clickOnSubmission() { if(loadingSomething) return; loadOldSubmissionInput($(this).attr('data-submission-id'), true); $('body').removeClass('sidebar-active'); } //Get current datetime function getDateTime() { var MyDate = new Date(); return ('0' + MyDate.getDate()).slice(-2) + '/' + ('0' + (MyDate.getMonth() + 1)).slice(-2) + '/' + MyDate.getFullYear() + " " + ('0' + MyDate.getHours()).slice(-2) + ':' + ('0' + MyDate.getMinutes()).slice(-2) + ':' + ('0' + MyDate.getSeconds()).slice(-2); } //Verify the task form (files, ...) function taskFormValid() { var answered_to_all = true; var errors = []; var form = $('#task'); form.find('textarea,input[type="text"]').each(function() { if($(this).attr('name') != undefined) //skip codemirror's internal textareas { if($(this).val() == "" && $(this).attr('data-optional') != "True") answered_to_all = false; } }); form.find('input[type="checkbox"],input[type="radio"]').each(function() { if(form.find("input[name='"+ $(this).attr('name')+"']:checked").length == 0) { answered_to_all = false; } }); form.find('input[type="file"]').each(function() { var filename = $(this).val().split(/(\\|\/)/g).pop(); //file input fields cannot be optional if(filename == "") { answered_to_all = false; return; } //verify ext var allowed_extensions = $.parseJSON($(this).attr('data-allowed-exts')); var has_one = false; $.each(allowed_extensions, function(idx, ext){ has_one = has_one || (filename.lastIndexOf(ext) === filename.length - ext.length) > 0; }); if(!has_one) errors.push($("#invalidext").text().replace("{}", filename)); //try to get the size of the file var size = -1; try { size = $(this)[0].files[0].size; } catch (e) {} //modern browsers if(size == -1) try { size = $(this)[0].files[0].fileSize; } catch(e) { } //old versions of Firefox //Verify the maximum size var max_size = parseInt($(this).attr('data-max-size')); if(size != -1 && size > max_size) errors.push($("#filetooheavy").text().replace("{}", filename)); }); if(!answered_to_all) { errors.push($("#answerall").text()); } if(errors.length != 0) { var task_alert = $('#task_alert'); var content = $('<div></div>'); var first = true; $.each(errors, function(idx, elem){ if(!first) content.append($('<br>')); first = false; content.append($('<span></span>').text(elem)); }); task_alert.html(getAlertCode("Error", content.html(), "danger", false)); $('html, body').animate({ scrollTop: task_alert.offset().top - 100 }, 200); return false; } else { return true; } } //Submits a task function submitTask(with_ssh) { if(loadingSomething) return; if(!taskFormValid()) return; $('#task-debug-mode').val(with_ssh ? "ssh" : ""); //Must be done before blurTaskForm as when a form is disabled, no input is sent by the plugin $('form#task').ajaxSubmit( { dataType: 'json', success: function(data) { if("status" in data && data["status"] == "ok" && "submissionid" in data) { displayTaskLoadingAlert(data, data["submissionid"]); incrementTries(); displayNewSubmission(data['submissionid']); waitForSubmission(data['submissionid']); } else if("status" in data && data['status'] == "error" && "text" in data) { displayTaskStudentAlertWithProblems(data, "danger", false); updateTaskStatus(data["text"], 0); unblurTaskForm(); } if("remove" in data) { data["remove"].forEach(function(element, index, array) { removeSubmission(element); }); } }, error: function() { displayTaskStudentAlertWithProblems($("#internalerror").text(), "danger", false); updateTaskStatus($("#internalerror").text(), 0); unblurTaskForm(); } }); blurTaskForm(); resetAlerts(); displayTaskLoadingAlert(null, null); updateTaskStatus("<i class=\"fa fa-spinner fa-pulse fa-fw\" aria-hidden=\"true\"></i>", 0); $('html, body').animate({ scrollTop: $('#task_alert').offset().top - 100 }, 200); } //Wait for a job to end function waitForSubmission(submissionid) { setTimeout(function() { var url = $('form#task').attr("action"); jQuery.post(url, {"@action": "check", "submissionid": submissionid}, null, "json") .done(function(data) { if("status" in data && data['status'] === "waiting") { waitForSubmission(submissionid); if("ssh_host" in data && "ssh_port" in data && "ssh_user" in data && "ssh_password" in data) displayRemoteDebug(submissionid, data); else displayTaskLoadingAlert(data, submissionid); } else if("status" in data && "result" in data && "grade" in data) { updateMainTags(data); if("debug" in data) displayDebugInfo(data["debug"]); if(data['result'] == "failed") displayTaskStudentAlertWithProblems(data, "danger", false); else if(data['result'] == "success") displayTaskStudentAlertWithProblems(data, "success", false); else if(data['result'] == "timeout") displayTaskStudentAlertWithProblems(data, "warning", false); else if(data['result'] == "overflow") displayTaskStudentAlertWithProblems(data, "warning", false); else if(data['result'] == "killed") displayTaskStudentAlertWithProblems(data, "warning", false); else // == "error" displayTaskStudentAlertWithProblems(data, "danger", false); if("tests" in data){ updateSubmission(submissionid, data['result'], data["grade"], data["tests"]); }else{ updateSubmission(submissionid, data['result'], data["grade"], []); } unblurTaskForm(); if("replace" in data && data["replace"] && $('#my_submission').length) { displayEvaluatedSubmission(submissionid, true); } else if($('#my_submission').length) { displayEvaluatedSubmission($('#my_submission').attr('data-submission-id'), false); } if("feedback_script" in data) eval(data["feedback_script"]); } else { displayTaskStudentAlertWithProblems(data, "danger", false); updateSubmission(submissionid, "error", "0.0", []); updateTaskStatus("Failed", 0); unblurTaskForm(); } }) .fail(function() { displayTaskStudentAlertWithProblems(data, "danger", false); updateSubmission(submissionid, "error", "0.0", []); updateTaskStatus("Failed", 0); unblurTaskForm(); }); }, 1000); } //Kill a running submission function killSubmission(submissionid) { $('.kill-submission-btn').attr('disabled', 'disabled'); var url = $('form#task').attr("action"); jQuery.post(url, {"@action": "kill", "submissionid": submissionid}, null, "json").done(function() { $('.kill-submission-btn').removeAttr('disabled'); }).fail(function() { $('.kill-submission-btn').removeAttr('disabled'); }); } //Displays debug info function displayDebugInfo(info) { displayDebugInfoRecur(info, $('#task_debug')); } function displayDebugInfoRecur(info, box) { var data = $(document.createElement('dl')); data.text(" "); box.html(data); jQuery.each(info, function(index, elem) { var namebox = $(document.createElement('dt')); var content = $(document.createElement('dd')); data.append(namebox); data.append(content); namebox.text(index); if(jQuery.isPlainObject(elem)) displayDebugInfoRecur(elem, content); else content.text(elem); }); } //Get the code for a "loading" alert, with a button to kill the current submission function getLoadingAlertCode(title, content, submissionid) { var kill_button = undefined; if(submissionid != null) kill_button = "<button type='button' onclick='killSubmission(\""+submissionid+"\")' class='btn btn-danger kill-submission-btn btn-small'>"+ "<i class='fa fa-close'></i>"+ "</button>"; return getAlertCode(title, content, "info", false, kill_button); } //Displays a loading alert in task form function displayTaskLoadingAlert(submission_wait_data, submissionid) { var task_alert = $('#task_alert'); var title = '<i class="fa fa-spinner fa-pulse fa-fw" aria-hidden="true"></i> '; var content = ""; if(submission_wait_data != null) content += submission_wait_data["text"]; task_alert.html(getLoadingAlertCode(title, content, submissionid)); } //Display informations for remote debugging function displayRemoteDebug(submissionid, submission_wait_data) { var ssh_host = submission_wait_data["ssh_host"]; var ssh_port = submission_wait_data["ssh_port"]; var ssh_user = submission_wait_data["ssh_user"]; var ssh_password = submission_wait_data["ssh_password"]; var pre_content = "ssh " + ssh_user + "@" + ssh_host + " -p " + ssh_port+ " -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o PreferredAuthentications=password"; var task_alert = $('#task_alert'); var title = '<i class="fa fa-spinner fa-pulse fa-fw" aria-hidden="true"></i> '; var content = submission_wait_data["text"]; //If not already set if($('pre#commandssh', task_alert).text() !== pre_content) { var remote_info = $("#ssh_template").clone(); $('#commandssh', remote_info).text(pre_content); // Generate iframe var webtermdiv = $("#webterm", remote_info); var webterm_link = $('#webterm_link', remote_info).val(); if(webterm_link !== undefined) { var full_link = webterm_link + "?host=" + ssh_host + "&port=" + ssh_port + "&password=" + ssh_password; $('<iframe>', { src: full_link, id: 'iframessh', frameborder: 0, scrolling: 'no' }).appendTo(webtermdiv); } task_alert.html(getLoadingAlertCode(title, "<div id='ssh_remote_info'>"+remote_info.html()+"</div>", submissionid)); $("#ssh_remote_info code", task_alert).text(ssh_password); $("#ssh_remote_info", task_alert).show(); } } //Displays a loading input alert in task form function displayTaskInputLoadingAlert() { var task_alert = $('#task_alert'); task_alert.html(getAlertCode("<i class=\"fa fa-spinner fa-pulse fa-fw\" aria-hidden=\"true\"></i>", "", "info", false)); $('html, body').animate( { scrollTop: task_alert.offset().top - 100 }, 200); } //Displays a loading input alert in task form function displayTaskInputErrorAlert() { var task_alert = $('#task_alert'); task_alert.html(getAlertCode("<b>" + $("#internalerror").text() + "</b>", "", "danger", false)); $('html, body').animate( { scrollTop: task_alert.offset().top - 100 }, 200); } //Displays a student error alert in task form function displayTaskStudentAlertWithProblems(content, type) { resetAlerts(); var firstPos = -1; var task_alert = $('#task_alert'); if("title" in content) { task_alert.html(getAlertCode(content.title, content.text, type, true)); firstPos = task_alert.offset().top; } if("problems" in content) { for(var problemid in problems_types) { if(problemid in content.problems) window["load_feedback_" + problems_types[problemid]](problemid, content["problems"][problemid]); } } $('html, body').animate( { scrollTop: firstPos - 100 }, 200); colorizeStaticCode(); MathJax.Hub.Queue(["Typeset",MathJax.Hub]); } function load_feedback_code(key, content) { var alert_type = "danger"; if(content[0] === "timeout" || content[0] === "overflow") alert_type = "warning"; if(content[0] === "success") alert_type = "success"; $("#task_alert_" + key).html(getAlertCode("", content[1], alert_type, true)); } function load_feedback_file(key, content) { load_feedback_code(key, content); } function load_feedback_match(key, content) { load_feedback_code(key, content); } function load_feedback_code_single_line(key, content) { load_feedback_code(key, content); } function load_feedback_multiple_choice(key, content) { load_feedback_code(key, content); } //Create an alert //type is either alert, info, danger, warning //dismissible is a boolean function getAlertCode(title, content, type, dismissible, additionnal_content) { var a = '<div class="card border-' + type + ' mb-3" role="card">'; a += '<div class="row no-gutters">'; //Style 1, when there is a title, display it if(title !== "") { a += '<div class="col">'; a += '<div class="card-header bg-' + type + ' text-white">'; if (dismissible) a += '<button type="button" class="close" data-dismiss="card" style="color: white;"><span aria-hidden="true">×</span><span class="sr-only">Close</span></button>'; a += title; a += '</div>'; if (content !== "") { a += '<div class="card-body">'; a += content; a += '</div>'; } a += '</div>'; } else { //left part a += '<div class="col-auto bg-' + type + ' text-white card-left-icon">'; if(type === "danger") { a += '&times;'; } else if(type === "success") { a += '&#x2713;'; } else { a += '?'; } a += '</div>'; //right part a += '<div class="col">'; a += '<div class="card-body px-2">'; if (dismissible) a += '<button type="button" class="close" data-dismiss="card"><span aria-hidden="true">×</span><span class="sr-only">Close</span></button>'; a += content; a += '</div>'; a += '</div>'; } if(additionnal_content !== undefined) { a += '<div class="col-auto">'; a += additionnal_content; a += '</div>'; } a += '</div>'; a += '</div>'; return a; } //Load an old submission input function loadOldSubmissionInput(id, with_feedback) { if(loadingSomething) return; blurTaskForm(); resetAlerts(); displayTaskInputLoadingAlert(); var url = $('form#task').attr("action"); jQuery.post(url, {"@action": "load_submission_input", "submissionid": id}, null, "json") .done(function(data) { if("status" in data && data['status'] == "ok" && "input" in data) { updateMainTags(data); unblurTaskForm(); load_input(id, data['input']); if(with_feedback) // load feedback in second place as it may affect the input loadOldFeedback(data); } else { displayTaskInputErrorAlert(); unblurTaskForm(); } }).fail(function() { displayTaskInputErrorAlert(); unblurTaskForm(); }); } //Load feedback from an old submission function loadOldFeedback(data) { if("status" in data && "result" in data) { if("debug" in data) displayDebugInfo(data["debug"]); if(data['result'] == "failed") displayTaskStudentAlertWithProblems(data, "danger", false); else if(data['result'] == "success") displayTaskStudentAlertWithProblems(data, "success", false); else if(data['result'] == "timeout") displayTaskStudentAlertWithProblems(data, "warning", false); else if(data['result'] == "overflow") displayTaskStudentAlertWithProblems(data, "warning", false); else if(data['result'] == "killed") displayTaskStudentAlertWithProblems(data, "warning", false); else // == "error" displayTaskStudentAlertWithProblems(data, "danger", false); } else displayTaskStudentAlertWithProblems($("#internalerror").text(), "danger", false); if("feedback_script" in data) eval(data["feedback_script"]); } //Load data from input into the form inputs function load_input(submissionid, input) { for(var key in problems_types) { window["load_input_" + problems_types[key]](submissionid, key, input); } } function load_input_code(submissionid, key, input) { if(key in codeEditors) { if(key in input) codeEditors[key].setValue(input[key], -1); else codeEditors[key].setValue("", -1); } else { var field = $("input[name='" + key + "']"); if(key in input) $(field).val(input[key]); else $(field).val(""); } } function load_input_code_single_line(submissionid, key, input) { load_input_code(submissionid, key, input); } function load_input_file(submissionid, key, input) { if(key in input) { var allowed_exts = $("input[name='" + key + "']").data("allowed-exts"); var url = $('form#task').attr("action") + "?submissionid=" + submissionid + "&questionid=" + key; var input_file = $('#download-input-file-' + key); input_file.attr('href', url ); input_file.css('display', 'block'); if(allowed_exts.indexOf(".pdf") >= 0) { var input_file_pdf = $('#download-input-file-pdf-' + key); input_file_pdf.attr('data', url); input_file_pdf.find("embed").attr("src", url); input_file_pdf.css('display', 'block'); } } } function load_input_multiple_choice(submissionid, key, input) { var field = $(".problem input[name='" + key + "']"); if(key in input) { if($(field).attr('type') == "checkbox" && jQuery.isArray(input[key])) { $(field).each(function () { $(this).prop('checked', input[key].indexOf($(this).val()) > -1); }); } else if($(field).attr('type') == "radio") { $(field).each(function () { $(this).prop('checked', input[key] == $(this).val()); }); } else $(field).prop('checked', false); } else $(field).prop('checked', false); } function load_input_match(submissionid, key, input) { var field = $(".problem input[name='" + key + "']"); if(key in input) $(field).prop('value', input[key]); else $(field).prop('value', ""); } // Share eval submission result on social networks function share_submission(method_id) { var submissionid = $('#my_submission').attr('data-submission-id'); window.location.replace("/auth/share/" + method_id + "?submissionid=" + submissionid) } /* * Update tags visual of HTML nodes that represent tags. * The choice of the color depends of data present in data["tests"] * Tags equals to true are green * Tags equals to false are red * Missing tags are blue */ function updateMainTags(data){ //Reset all tags to info style (blue) to avoid no-updated colors $('span', $('#main_tag_group')).each(function() { //If this is a alert-danger class, this is an misconception if($(this).attr('class') == "badge alert-danger"){ $(this).hide(); }else if($(this).attr('class') == "badge alert-default"){ //Remove auto tags $(this).remove(); }else{ $(this).attr('class', 'badge alert-info'); } }); if("tests" in data){ for (var tag in data["tests"]){ //Get and update the color of HTML nodes that represent tags var elem = $('#'.concat(tag.replace("*", "\\*"))); //The * makes error with JQuery so, we escape it. if(data["tests"][tag]){ //If this is a alert-danger class, this is an misconception if(elem.attr('class') == "badge alert-danger"){ elem.show(); }else{ elem.attr('class', 'badge alert-success') } } if(tag.startsWith("*auto-tag-")){ var max_length = 28; if(data["tests"][tag].length > max_length){ $('#main_tag_group').append('<span class="badge alert-default" data-toggle="tooltip" data-placement="top" data-original-title="'+data["tests"][tag]+'">'+data["tests"][tag].substring(0, max_length)+'…</span>'); } else{ $('#main_tag_group').append('<span class="badge alert-default">'+data["tests"][tag]+'</span>'); } } } } } /* * Update color of tags presents in 'elem' node. * 'data' is a dictionnary that should contains tag values in data["tests"][tag] = True/False */ function updateTagsToNewSubmission(elem, data){ var n_ok = 0; // number of tag equals true var tags_ok = []; var n_tot = 0; // total number of tags var badge = elem.find('span[id="tag_counter"]'); //Get all tags listed in main tag section $('span', $('#main_tag_group')).each(function() { var id = $(this).attr("id"); var color = $(this).attr("class"); //Only consider normal tag (we do not consider misconception if(color != "badge alert-danger"){ if(id in data && data[id]){ n_ok++; tags_ok.push($(this).text()); } n_tot++; } }); badge.text(n_ok); if(n_tot == n_ok){ badge.attr("class", "badge alert-success"); }else if(n_ok > 0){ badge.attr("data-toggle", "tooltip"); badge.attr("data-placement", "left"); badge.attr('data-original-title', tags_ok.join(", ")); } } /* * Loads the submission form from the local storage * and calls the load input functions for each subproblem type */ function load_from_storage(courseid,taskid){ if (typeof(Storage) !== "undefined") { var indict = JSON.parse(localStorage[courseid+"/"+taskid]); for(var problemid in problems_types) { // Submissionid is only used for files that can't be stored here // It is set to null here. window["load_input_" + problems_types[problemid]](null, problemid, indict); } } else { alert("Your browser doesn't support web storage"); } } /* * Saves a serialized version of the form which is typically * how the submission input is stored and passed to the load input function. */ function save_to_storage(courseid,taskid){ if (typeof(Storage) !== "undefined") { var data = $('form').serializeArray().reduce(function(obj, item) { if(item.name in obj) // Should be in an array case obj[item.name].push(item.value); else obj[item.name] = Boolean(is_input_list[item.name]) ? [item.value] : item.value; return obj; }, {}); localStorage.setItem(courseid+"/"+taskid, JSON.stringify(data)); } else { alert("Your browser doesn't support web storage"); } }
PypiClean
/DisPass-0.4.0.tar.gz/DisPass-0.4.0/dispass/commands/generate.py
# Copyright (c) 2012-2016 Tom Willemse <tom@ryuslash.org> # Copyright (c) 2011-2018 Benjamin Althues <benjamin@babab.nl> # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from pycommand import CommandBase from dispass.algos import algorithms from dispass.cli import CLI from dispass.dispass import settings from dispass.commands.decorators import read_labels @read_labels(optional=True) class Command(CommandBase): '''Generate passphrases for one or more labels''' usagestr = ( 'usage: dispass generate [options] <label> [<label2>] [<label3>] [...]' ) description = ( 'Generate passphrases for one or more labels\n\n' "Use the '-v' flag to ask for password twice to avoid typing errors" ) optionList = ( ('help', ('h', False, 'show this help information')), ('verify', ('v', False, 'verify password')), ('length', ('l', '<length>', 'length of passphrase')), ('algo', ('a', '<algorithm>', 'algorithm to use for generation')), ('seqno', ('s', '<seqno>', 'sequence number to use for generation')), ('password', ('p', '<password>', 'password to use for generation')), ('stdout', ('o', False, 'output passphrase(s) directly to stdout')), ('silent', ('', False, 'do not show a prompt when errors occur')), ) def run(self, lf): '''Parse the various arguments and output passphrases for each label Each positional argument is a label. For each label, it will try to find (via `FileHandler.labletup`) if it is in the labelfile so other settings for the label can be applied. If it is not found, the default settings object defined as `dispass.dispass.settings` will be used. The parameters can be overridden through the various optargs. ''' if not self.args or self.flags['help']: print(self.usage) return 1 algo = None length = None seqno = None if self.flags['algo']: if self.flags['algo'] in algorithms: algo = self.flags['algo'] if self.flags['length']: try: length = int(self.flags['length']) except ValueError: print('Error: length argument must be a number') return 1 if self.flags['seqno']: seqno = self.flags['seqno'] console = CLI(lf) console.verifyPassword = self.flags['verify'] if self.flags['password']: password = self.flags['password'] else: password = console.passwordPrompt() for arg in self.args: labeltup = lf.labeltup(arg) if labeltup: console.generate(password, (arg, length or labeltup[1], algo or labeltup[2], seqno or labeltup[3], False)) else: console.generate(password, ( arg, length or settings.passphrase_length, algo or settings.algorithm, seqno or settings.sequence_number, False)) del password if self.flags['stdout']: console.useCurses = False console.scriptableIO = True if not console.output(): print('Error: could not generate keys') return 1 else: return 0
PypiClean
/DataTig-0.5.0.tar.gz/DataTig-0.5.0/datatig/sqliteversioned.py
import hashlib import json import sqlite3 from contextlib import closing from datatig.models.siteconfig import SiteConfigModel # from .exceptions import DuplicateRecordIdException from .models.error import ErrorModel from .models.git_commit import GitCommitModel from .models.record import RecordModel from .models.record_error import RecordErrorModel # from .models.record_error import RecordErrorModel class DataStoreSQLiteVersioned: def __init__(self, out_filename: str): self._out_filename: str = out_filename self._connection = sqlite3.connect(out_filename) self._connection.row_factory = sqlite3.Row with closing(self._connection.cursor()) as cur: cur.execute( """CREATE TABLE config ( id INTEGER PRIMARY KEY, title TEXT, description TEXT, data TEXT, hash TEXT UNIQUE )""" ) cur.execute( """CREATE TABLE git_commit ( id TEXT PRIMARY KEY, config_id INTEGER, FOREIGN KEY(config_id) REFERENCES config(id) )""" ) cur.execute( """CREATE TABLE git_ref ( id TEXT PRIMARY KEY ON CONFLICT REPLACE, commit_id TEXT, FOREIGN KEY(commit_id) REFERENCES git_commit(id) )""" ) cur.execute( """CREATE TABLE error ( commit_id TEXT, filename TEXT, message TEXT, FOREIGN KEY(commit_id) REFERENCES git_commit(id) )""" ) cur.execute( """CREATE TABLE type ( config_id INTEGER, id TEXT , PRIMARY KEY(config_id, id), FOREIGN KEY(config_id) REFERENCES config(id) )""" ) cur.execute( """CREATE TABLE data ( id INTEGER PRIMARY KEY, data TEXT, hash TEXT UNIQUE )""" ) cur.execute( """CREATE TABLE commit_type_record ( commit_id TEXT, type_id TEXT, record_id TEXT, git_filename TEXT, format TEXT, data_id INTEGER, PRIMARY KEY(commit_id, type_id, record_id), FOREIGN KEY(commit_id) REFERENCES git_commit(id), FOREIGN KEY(type_id) REFERENCES type(id), FOREIGN KEY(record_id) REFERENCES record(id) FOREIGN KEY(data_id) REFERENCES data(id) )""" ) cur.execute( """CREATE TABLE commit_type_record_error( commit_id TEXT, type_id TEXT, record_id TEXT, message TEXT, data_path TEXT, schema_path TEXT, generator TEXT, FOREIGN KEY(commit_id) REFERENCES git_commit(id), FOREIGN KEY(type_id) REFERENCES type(id), FOREIGN KEY(record_id) REFERENCES record(id) )""" ) self._connection.commit() def store_config(self, site_config: SiteConfigModel) -> int: config_hash: str = site_config.get_hash() with closing(self._connection.cursor()) as cur: # Look for existing cur.execute("SELECT id FROM config WHERE hash=?", [config_hash]) row = cur.fetchone() if row: return row["id"] # Add new cur.execute( """INSERT INTO config (hash, data, title, description) VALUES (?, ?, ?, ?)""", [ config_hash, json.dumps(site_config.get_serialised()), site_config.get_title(), site_config.get_description(), ], ) config_id: int = cur.lastrowid # type: ignore # Add types for type in site_config.get_types().values(): cur.execute( """INSERT INTO type (config_id, id) VALUES (?, ?)""", [config_id, type.get_id()], ) self._connection.commit() return config_id def store_git_commit(self, git_commit: GitCommitModel, config_id: int): with closing(self._connection.cursor()) as cur: # Look for existing cur.execute( "SELECT id FROM git_commit WHERE id=?", [git_commit.get_commit_hash()] ) row = cur.fetchone() if row: return # Add new cur.execute( """INSERT INTO git_commit (id, config_id) VALUES (?, ?)""", [git_commit.get_commit_hash(), config_id], ) # Refs! for ref in git_commit.get_refs(): cur.execute( """INSERT INTO git_ref (id, commit_id) VALUES (?, ?)""", [ref, git_commit.get_commit_hash()], ) # Done self._connection.commit() def store_record(self, git_commit: GitCommitModel, record: RecordModel): with closing(self._connection.cursor()) as cur: data_str = json.dumps(record.get_data(), default=str, sort_keys=True) data_hash = hashlib.md5(data_str.encode()).hexdigest() # data - existing or new cur.execute("SELECT id FROM data WHERE hash=?", [data_hash]) row = cur.fetchone() if row: data_id = row["id"] else: cur.execute( """INSERT INTO data (data, hash) VALUES (?, ?)""", [data_str, data_hash], ) data_id = cur.lastrowid # config_commit_type_record - exisiting or new cur.execute( "SELECT * FROM commit_type_record WHERE commit_id=? AND type_id=? AND record_id=?", [ git_commit.get_commit_hash(), record.get_type().get_id(), record.get_id(), ], ) row = cur.fetchone() if row: pass else: cur.execute( """INSERT INTO commit_type_record (commit_id, type_id, record_id, data_id, git_filename, format) VALUES (?, ?, ?, ?, ?, ?)""", [ git_commit.get_commit_hash(), record.get_type().get_id(), record.get_id(), data_id, record.get_git_filename(), record.get_format(), ], ) self._connection.commit() def store_error(self, git_commit: GitCommitModel, error: ErrorModel) -> None: with closing(self._connection.cursor()) as cur: insert_data = [ git_commit.get_commit_hash(), error.get_filename(), error.get_message(), ] cur.execute( """INSERT INTO error ( commit_id, filename, message ) VALUES (?, ?, ?)""", insert_data, ) self._connection.commit() def get_all_errors_generator(self, git_commit: GitCommitModel): with closing(self._connection.cursor()) as cur: cur.execute( "SELECT * FROM error WHERE commit_id=?", [git_commit.get_commit_hash()] ) for data in cur.fetchall(): m = ErrorModel() m.load_from_database(data) yield m def get_count_site_errors(self, git_commit: GitCommitModel) -> int: with closing(self._connection.cursor()) as cur: cur.execute( "SELECT count(*) AS c FROM error WHERE commit_id=?", [git_commit.get_commit_hash()], ) return cur.fetchone()["c"] def get_file_name(self) -> str: return self._out_filename def get_git_refs(self) -> list: with closing(self._connection.cursor()) as cur: cur.execute( "SELECT * FROM git_ref", [], ) return [GitCommitModel(i["commit_id"], [i["id"]]) for i in cur.fetchall()] def is_ref_known(self, ref) -> bool: with closing(self._connection.cursor()) as cur: # ref? cur.execute( "SELECT * FROM git_ref WHERE id=?", [ref], ) row = cur.fetchone() if row: return True # A commit can be a ref too? cur.execute( "SELECT * FROM git_commit WHERE id=?", [ref], ) row = cur.fetchone() if row: return True return False def resolve_ref(self, ref) -> str: with closing(self._connection.cursor()) as cur: # Ref? cur.execute( "SELECT commit_id FROM git_ref WHERE id=?", [ref], ) row = cur.fetchone() if row: return row["commit_id"] # Could have just been passed a commit? cur.execute( "SELECT id FROM git_commit WHERE id=?", [ref], ) row = cur.fetchone() if row: return row["id"] # We failed raise Exception("Ref not found!") def is_config_same_between_refs(self, ref1: str, ref2: str) -> bool: with closing(self._connection.cursor()) as cur: cur.execute( "SELECT config_id FROM git_commit WHERE id=?", [self.resolve_ref(ref1)], ) config1: int = cur.fetchone()["config_id"] cur.execute( "SELECT config_id FROM git_commit WHERE id=?", [self.resolve_ref(ref2)], ) config2: int = cur.fetchone()["config_id"] return config1 == config2 def get_data_differences_between_refs(self, ref1: str, ref2: str) -> list: commit1 = self.resolve_ref(ref1) commit2 = self.resolve_ref(ref2) out = [] with closing(self._connection.cursor()) as cur: # compare data items that exist in both cur.execute( "SELECT c1.type_id, c1.record_id FROM commit_type_record AS c1 " + "JOIN commit_type_record AS c2 ON c1.type_id = c2.type_id AND c1.record_id = c2.record_id " + "WHERE c1.commit_id=? AND c2.commit_id = ? " + "AND c1.data_id != c2.data_id", [commit1, commit2], ) for row in cur.fetchall(): out.append( { "type_id": row["type_id"], "record_id": row["record_id"], "action": "edited", } ) # Items that have been removed or added for params, action in [ ([commit2, commit1], "removed"), ([commit1, commit2], "added"), ]: cur.execute( "SELECT c1.type_id, c1.record_id FROM commit_type_record AS c1 " + "LEFT JOIN commit_type_record AS c2 ON c1.type_id = c2.type_id AND c1.record_id = c2.record_id AND c2.commit_id = ? " + "WHERE c1.commit_id=? " + "AND c2.data_id IS NULL", params, ) for row in cur.fetchall(): out.append( { "type_id": row["type_id"], "record_id": row["record_id"], "action": action, } ) # return return out def get_errors_added_between_refs(self, ref1: str, ref2: str) -> list: commit1 = self.resolve_ref(ref1) commit2 = self.resolve_ref(ref2) out = [] with closing(self._connection.cursor()) as cur: cur.execute( "SELECT e2.* " + "FROM error AS e2 " + "LEFT JOIN error AS e1 ON e1.filename = e2.filename AND e1.message = e2.message AND e1.commit_id=? " + "WHERE e2.commit_id = ? AND e1.filename IS NULL", [commit1, commit2], ) for row in cur.fetchall(): m = ErrorModel() m.load_from_database(row) out.append(m) return out def get_errors_removed_between_refs(self, ref1: str, ref2: str) -> list: return self.get_errors_added_between_refs(ref2, ref1) def get_config(self, ref_or_commit: str): with closing(self._connection.cursor()) as cur: cur.execute( "SELECT config_id FROM git_commit WHERE id=?", [self.resolve_ref(ref_or_commit)], ) config_id: int = cur.fetchone()["config_id"] cur.execute( "SELECT data FROM config WHERE id=?", [config_id], ) config_row = cur.fetchone() config: SiteConfigModel = SiteConfigModel("/source_dir_does_not_exist") config.load_from_serialised(json.loads(config_row["data"])) return config def get_ids_in_type(self, ref_or_commit: str, type_id: str): with closing(self._connection.cursor()) as cur: cur.execute( "SELECT record_id FROM commit_type_record WHERE type_id=? AND commit_id=?", [type_id, self.resolve_ref(ref_or_commit)], ) return [i["record_id"] for i in cur.fetchall()] def get_item(self, ref_or_commit: str, type_id: str, record_id: str): commit_hash = self.resolve_ref(ref_or_commit) with closing(self._connection.cursor()) as cur: cur.execute( "SELECT * FROM commit_type_record WHERE commit_id=? AND type_id=? AND record_id=?", [commit_hash, type_id, record_id], ) commit_type_record_row = cur.fetchone() if commit_type_record_row: # Data cur.execute( "SELECT data FROM data WHERE id=?", [commit_type_record_row["data_id"]], ) data_row = cur.fetchone() # Errors cur.execute( "SELECT * FROM commit_type_record_error WHERE commit_id=? AND type_id=? AND record_id=?", [commit_hash, type_id, record_id], ) errors_data = cur.fetchall() # Create model and return record = RecordModel( # TODO self.get_config().get_type() is very ineffeicent, would be better if type class instance was passed instead of type_id type=self.get_config(self.resolve_ref(ref_or_commit)).get_type( type_id ), id=record_id, ) record.load_from_versioned_database( commit_type_record_row, data_row, errors_data=errors_data ) return record def store_json_schema_validation_errors( self, ref_or_commit: str, type_id: str, item_id: str, errors ) -> None: with closing(self._connection.cursor()) as cur: for error in errors: insert_data = [ ref_or_commit, type_id, item_id, error["message"], error["path_str"], error["schema_path_str"], "jsonschema", ] cur.execute( """INSERT INTO commit_type_record_error ( commit_id, type_id, record_id, message, data_path, schema_path, generator ) VALUES (?, ?, ?, ?, ?, ?, ?)""", insert_data, ) self._connection.commit() def get_record_errors_added_between_refs_for_record( self, ref1: str, ref2: str, type_id: str, record_id: str ) -> list: commit1 = self.resolve_ref(ref1) commit2 = self.resolve_ref(ref2) out = [] with closing(self._connection.cursor()) as cur: cur.execute( "SELECT e2.* " + "FROM commit_type_record_error AS e2 " + "LEFT JOIN commit_type_record_error AS e1 ON " + "e1.message = e2.message AND e1.data_path = e2.data_path " + "AND e1.schema_path = e2.schema_path AND e1.generator = e2.generator " + "AND e1.commit_id=? AND e1.type_id=? AND e1.record_id=? " + "WHERE e2.commit_id = ? AND e2.type_id=? AND e2.record_id=? AND e1.commit_id IS NULL", [commit1, type_id, record_id, commit2, type_id, record_id], ) for row in cur.fetchall(): m = RecordErrorModel() m.load_from_database(row) out.append(m) return out def get_record_errors_removed_between_refs_for_record( self, ref1: str, ref2: str, type_id: str, record_id: str ) -> list: return self.get_record_errors_added_between_refs_for_record( ref2, ref1, type_id, record_id )
PypiClean
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/validate/check.js.uncompressed.js
define("dojox/validate/check", ["dojo/_base/kernel", "dojo/_base/lang", "./_base"], function(kernel, lang, validate){ kernel.experimental("dojox.validate.check"); /*===== validate = dojox.validate; =====*/ /** FIXME: How much does this overlap with dojox.form.Manager and friends? Procedural API Description The main aim is to make input validation expressible in a simple format. You define profiles which declare the required and optional fields and any constraints they might have. The results are provided as an object that makes it easy to handle missing and invalid input. Usage var results = dojox.validate.check(form, profile); Profile Object var profile = { // filters change the field value and are applied before validation. trim: ["tx1", "tx2"], uppercase: ["tx9"], lowercase: ["tx5", "tx6", "tx7"], ucfirst: ["tx10"], digit: ["tx11"], // required input fields that are blank will be reported missing. // required radio button groups and drop-down lists with no selection will be reported missing. // checkbox groups and selectboxes can be required to have more than one value selected. // List required fields by name and use this notation to require more than one value: {checkboxgroup: 2}, {selectboxname: 3}. required: ["tx7", "tx8", "pw1", "ta1", "rb1", "rb2", "cb3", "s1", {"doubledip":2}, {"tripledip":3}], // dependant/conditional fields are required if the target field is present and not blank. // At present only textbox, password, and textarea fields are supported. dependencies: { cc_exp: "cc_no", cc_type: "cc_no" }, // Fields can be validated using any boolean valued function. // Use arrays to specify parameters in addition to the field value. constraints: { field_name1: myValidationFunction, field_name2: dojox.validate.isInteger, field_name3: [myValidationFunction, additional parameters], field_name4: [dojox.validate.isValidDate, "YYYY.MM.DD"], field_name5: [dojox.validate.isEmailAddress, false, true] }, // Confirm is a sort of conditional validation. // It associates each field in its property list with another field whose value should be equal. // If the values are not equal, the field in the property list is reported as Invalid. Unless the target field is blank. confirm: { email_confirm: "email", pw2: "pw1" } }; Results Object isSuccessful(): Returns true if there were no invalid or missing fields, else it returns false. hasMissing(): Returns true if the results contain any missing fields. getMissing(): Returns a list of required fields that have values missing. isMissing(field): Returns true if the field is required and the value is missing. hasInvalid(): Returns true if the results contain fields with invalid data. getInvalid(): Returns a list of fields that have invalid values. isInvalid(field): Returns true if the field has an invalid value. */ validate.check = function(/*HTMLFormElement*/form, /*Object*/profile){ // summary: validates user input of an HTML form based on input profile // // description: // returns an object that contains several methods summarizing the results of the validation // // form: form to be validated // profile: specifies how the form fields are to be validated // {trim:Array, uppercase:Array, lowercase:Array, ucfirst:Array, digit:Array, // required:Array, dependencies:Object, constraints:Object, confirm:Object} // Essentially private properties of results object var missing = []; var invalid = []; // results object summarizes the validation var results = { isSuccessful: function() {return ( !this.hasInvalid() && !this.hasMissing() );}, hasMissing: function() {return ( missing.length > 0 );}, getMissing: function() {return missing;}, isMissing: function(elemname) { for(var i = 0; i < missing.length; i++){ if(elemname == missing[i]){ return true; } } return false; }, hasInvalid: function() {return ( invalid.length > 0 );}, getInvalid: function() {return invalid;}, isInvalid: function(elemname){ for(var i = 0; i < invalid.length; i++){ if(elemname == invalid[i]){ return true; } } return false; } }; var _undef = function(name,object){ return (typeof object[name] == "undefined"); }; // Filters are applied before fields are validated. // Trim removes white space at the front and end of the fields. if(profile.trim instanceof Array){ for(var i = 0; i < profile.trim.length; i++){ var elem = form[profile.trim[i]]; if(_undef("type", elem) || elem.type != "text" && elem.type != "textarea" && elem.type != "password"){ continue; } elem.value = elem.value.replace(/(^\s*|\s*$)/g, ""); } } // Convert to uppercase if(profile.uppercase instanceof Array){ for(var i = 0; i < profile.uppercase.length; i++){ var elem = form[profile.uppercase[i]]; if(_undef("type", elem) || elem.type != "text" && elem.type != "textarea" && elem.type != "password"){ continue; } elem.value = elem.value.toUpperCase(); } } // Convert to lowercase if(profile.lowercase instanceof Array){ for (var i = 0; i < profile.lowercase.length; i++){ var elem = form[profile.lowercase[i]]; if(_undef("type", elem) || elem.type != "text" && elem.type != "textarea" && elem.type != "password"){ continue; } elem.value = elem.value.toLowerCase(); } } // Uppercase first letter if(profile.ucfirst instanceof Array){ for(var i = 0; i < profile.ucfirst.length; i++){ var elem = form[profile.ucfirst[i]]; if(_undef("type", elem) || elem.type != "text" && elem.type != "textarea" && elem.type != "password"){ continue; } elem.value = elem.value.replace(/\b\w+\b/g, function(word) { return word.substring(0,1).toUpperCase() + word.substring(1).toLowerCase(); }); } } // Remove non digits characters from the input. if(profile.digit instanceof Array){ for(var i = 0; i < profile.digit.length; i++){ var elem = form[profile.digit[i]]; if(_undef("type", elem) || elem.type != "text" && elem.type != "textarea" && elem.type != "password"){ continue; } elem.value = elem.value.replace(/\D/g, ""); } } // See if required input fields have values missing. if(profile.required instanceof Array){ for(var i = 0; i < profile.required.length; i++){ if(!lang.isString(profile.required[i])){ continue; } var elem = form[profile.required[i]]; // Are textbox, textarea, or password fields blank. if(!_undef("type", elem) && (elem.type == "text" || elem.type == "textarea" || elem.type == "password" || elem.type == "file") && /^\s*$/.test(elem.value)){ missing[missing.length] = elem.name; } // Does drop-down box have option selected. else if(!_undef("type", elem) && (elem.type == "select-one" || elem.type == "select-multiple") && (elem.selectedIndex == -1 || /^\s*$/.test(elem.options[elem.selectedIndex].value))){ missing[missing.length] = elem.name; } // Does radio button group (or check box group) have option checked. else if(elem instanceof Array){ var checked = false; for(var j = 0; j < elem.length; j++){ if (elem[j].checked) { checked = true; } } if(!checked){ missing[missing.length] = elem[0].name; } } } } // See if checkbox groups and select boxes have x number of required values. if(profile.required instanceof Array){ for (var i = 0; i < profile.required.length; i++){ if(!lang.isObject(profile.required[i])){ continue; } var elem, numRequired; for(var name in profile.required[i]){ elem = form[name]; numRequired = profile.required[i][name]; } // case 1: elem is a check box group if(elem instanceof Array){ var checked = 0; for(var j = 0; j < elem.length; j++){ if(elem[j].checked){ checked++; } } if(checked < numRequired){ missing[missing.length] = elem[0].name; } } // case 2: elem is a select box else if(!_undef("type", elem) && elem.type == "select-multiple" ){ var selected = 0; for(var j = 0; j < elem.options.length; j++){ if (elem.options[j].selected && !/^\s*$/.test(elem.options[j].value)) { selected++; } } if(selected < numRequired){ missing[missing.length] = elem.name; } } } } // Dependent fields are required when the target field is present (not blank). // Todo: Support dependent and target fields that are radio button groups, or select drop-down lists. // Todo: Make the dependency based on a specific value of the target field. // Todo: allow dependent fields to have several required values, like {checkboxgroup: 3}. if(lang.isObject(profile.dependencies)){ // properties of dependencies object are the names of dependent fields to be checked for(name in profile.dependencies){ var elem = form[name]; // the dependent element if(_undef("type", elem)){continue;} if(elem.type != "text" && elem.type != "textarea" && elem.type != "password"){ continue; } // limited support if(/\S+/.test(elem.value)){ continue; } // has a value already if(results.isMissing(elem.name)){ continue; } // already listed as missing var target = form[profile.dependencies[name]]; if(target.type != "text" && target.type != "textarea" && target.type != "password"){ continue; } // limited support if(/^\s*$/.test(target.value)){ continue; } // skip if blank missing[missing.length] = elem.name; // ok the dependent field is missing } } // Find invalid input fields. if(lang.isObject(profile.constraints)){ // constraint properties are the names of fields to bevalidated for(name in profile.constraints){ var elem = form[name]; if(!elem) {continue;} // skip if blank - its optional unless required, in which case it // is already listed as missing. if(!_undef("tagName",elem) && (elem.tagName.toLowerCase().indexOf("input") >= 0 || elem.tagName.toLowerCase().indexOf("textarea") >= 0) && /^\s*$/.test(elem.value)){ continue; } var isValid = true; // case 1: constraint value is validation function if(lang.isFunction(profile.constraints[name])){ isValid = profile.constraints[name](elem.value); }else if(lang.isArray(profile.constraints[name])){ // handle nested arrays for multiple constraints if(lang.isArray(profile.constraints[name][0])){ for (var i=0; i<profile.constraints[name].length; i++){ isValid = validate.evaluateConstraint(profile, profile.constraints[name][i], name, elem); if(!isValid){ break; } } }else{ // case 2: constraint value is array, first elem is function, // tail is parameters isValid = validate.evaluateConstraint(profile, profile.constraints[name], name, elem); } } if(!isValid){ invalid[invalid.length] = elem.name; } } } // Find unequal confirm fields and report them as Invalid. if(lang.isObject(profile.confirm)){ for(name in profile.confirm){ var elem = form[name]; // the confirm element var target = form[profile.confirm[name]]; if (_undef("type", elem) || _undef("type", target) || (elem.type != "text" && elem.type != "textarea" && elem.type != "password") ||(target.type != elem.type) ||(target.value == elem.value) // it's valid ||(results.isInvalid(elem.name))// already listed as invalid ||(/^\s*$/.test(target.value))) // skip if blank - only confirm if target has a value { continue; } invalid[invalid.length] = elem.name; } } return results; // Object }; //TODO: evaluateConstraint doesn't use profile or fieldName args? validate.evaluateConstraint=function(profile, /*Array*/constraint, fieldName, elem){ // summary: // Evaluates dojo.validate.check() constraints that are specified as array // arguments // // description: The arrays are expected to be in the format of: // constraints:{ // fieldName: [functionToCall, param1, param2, etc.], // fieldName: [[functionToCallFirst, param1],[functionToCallSecond,param2]] // } // // This function evaluates a single array function in the format of: // [functionName, argument1, argument2, etc] // // The function will be parsed out and evaluated against the incoming parameters. // // profile: The dojo.validate.check() profile that this evaluation is against. // constraint: The single [] array of function and arguments for the function. // fieldName: The form dom name of the field being validated. // elem: The form element field. var isValidSomething = constraint[0]; var params = constraint.slice(1); params.unshift(elem.value); if(typeof isValidSomething != "undefined"){ return isValidSomething.apply(null, params); } return false; // Boolean }; return validate.check; });
PypiClean
/Fchierhelloworld-1.2.0.tar.gz/Fchierhelloworld-1.2.0/README.md
# M1-2022-git ## Introduction This repository is a fork from the previous repository. The bugs in the converter have been fixed and now we want to publish our work as a publicly available python package. ## Getting started You should be working from **your own repository** that is a fork of M1-2022-git-workflow. In order to run this project, you will have to follow these steps. 1. Clone this project 2. move into the project folder ``` shell cd M1-2022-git-workflow ``` 3. Set up a virtual environment * Create the virtual environment ```shell python -m venv myvenv ``` * Activate venv ```shell . venv/bin/activate ``` 4. Install pytest ```shell pip install -U pytest ``` 5. Execute main script ```shell python main.py ``` ## Instructions ### Merge feature branch into dev Now that the converter has no more bugs, we want to merge our branch feature into the dev branch. We will do a pull request (on your own repo). On GitHub, go to the "Pull requests tab" and click "New pull request". Select *base* and *compare*. Remember : we want to merge *decimal-binary-converter* into *dev*. Then, *Create the pull request*. Add some description of the work you have done and publish the Pull Request. **Note** : We could have just merge *decimal-binary-converter* into *dev* from the command line. However, when working on a real project, you probably won't have the rights to do so. Remember : dev is a public branch. So all the work going down there should be carefully reviewed and tested. ### Publish your work We now have an awesome binary/decimal two ways converter ! It's time to publish it so the rest of the world can enjoy it ! We will publish it on PyPI (Python Package Index) which is a public repository of softwares for Python. 1. First, you have to create an account on [Pypi.org](https://pypi.org/) 2. Then, in your repository, you will find a [.github directory](/.github) 3. Inside this directory, you will find .yml files that defines workflows (using GitHub actions) 4. Open the yml files and take time to understand them. Do not hesitate to ask the teacher about them #### Hands on ! 5. We want to create a workflow that will, on publish, test our code and if tests succeed, then publish our package on PyPI 6. In GitHub, click on the *Actions* tab, then click on *new workflow* 7. Choose among the suggestions the template that best suits your use-case and click *Configure* 8. You may find some useful informations in the [Python documentation](https://packaging.python.org/en/latest/tutorials/packaging-projects/) 9. When creating a release on GitHub, do not forget to create a tag corresponding to that release #### Pull request 10. Once the workflow is setup and tested (you have published your package using it) you can open a pull request to the [original repository](https://github.com/rgt-yncrea/M1-2022-git-workflow)
PypiClean