id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
/FinDates-0.2.zip/FinDates-0.2/findates/dateutils.py | import datetime
import string
# Not that we expect that number of days in the week changes any time soon
# but having symbolic name in the source code is more descriptive and easier to search
DAYS_IN_WEEK = 7
MONTHS_IN_YEAR = 12
DAYS_IN_NON_LEAP_YEAR = 365
DAYS_IN_LEAP_YEAR = 366
_datetime_format_strings = dict({
'dd-LLL-dddd dd:dd:dd': '%d-%b-%Y %H:%M:%S',
'dd-LLL-dddd': '%d-%b-%Y',
'd-LLL-dddd': '%d-%b-%Y',
'dd/dd/dd': '%m/%d/%y',
'd/dd/dd': '%m/%d/%y',
'dd/d/dd': '%m/%d/%y',
'd/d/dd': '%m/%d/%y',
'dd/dd/dddd': '%m/%d/%Y',
'dd/d/dddd': '%m/%d/%Y',
'd/dd/dddd': '%m/%d/%Y',
'd/d/dddd': '%m/%d/%Y',
'dddd-LLL-dd': '%Y-%b-%d',
'dddd-LLL-d': '%Y-%b-%d',
'dd-LLL-dd': '%d-%b-%y',
'd-LLL-dd': '%d-%b-%y',
'dddd-dd-dd': '%Y-%m-%d',
'dddd-dd-d': '%Y-%m-%d',
'dddd-d-dd': '%Y-%m-%d',
'dddd-d-d': '%Y-%m-%d',
'dd.dd.dddd': '%d.%m.%Y',
'd.dd.dddd': '%d.%m.%Y',
'dd.d.dddd': '%d.%m.%Y',
'd.d.dddd': '%d.%m.%Y',
"d LLL dddd": '%d %b %Y',
"dd LLL dddd": '%d %b %Y',
"dLLLdddd": '%d%b%Y',
"ddLLLdddd": '%d%b%Y',
"dddddddd": '%Y%m%d'
})
def sniff_datetime_format(dtstr):
""" Try to recognize date representation format from the date string
"""
dtstr = dtstr.lower()
ttab = string.maketrans("0123456789abcdefghijklmnopqrstuvwxyz", "ddddddddddLLLLLLLLLLLLLLLLLLLLLLLLLL")
fmtstring = dtstr.translate(ttab)
if fmtstring in _datetime_format_strings:
return _datetime_format_strings[fmtstring]
else:
raise ValueError("Incorrect date format string: %s" % fmtstring)
def asdatetime(dt):
""" Extract datetime from several possible representations
"""
if isinstance(dt, str):
return datetime.datetime.strptime(dt, sniff_datetime_format(dt))
elif isinstance(dt, datetime.date):
return datetime.datetime(dt.year, dt.month, dt.day)
else:
raise ValueError("Cannot extract date from: %s" % repr(dt))
def asyear(dt):
""" Extract year value from integer, date string, datetime.date or datetime.datetime class
"""
if isinstance(dt, int):
return dt
elif isinstance (dt, datetime.date) or isinstance(dt, datetime.datetime):
return dt.year
elif isinstance(dt, str):
dt = asdatetime(dt)
return dt.year
else:
raise ValueError('Cannot extract year value from %s', repr(dt))
def leapyear(dt):
""" Check if year is a leap year
"""
yr = asyear(dt)
if yr <= 1752:
return yr % 4 == 0
else:
return (yr % 4 == 0) and (yr % 100 !=0 or yr % 400 == 0)
def yeardays(dt):
if leapyear(dt):
return DAYS_IN_LEAP_YEAR
else:
return DAYS_IN_NON_LEAP_YEAR
_days_in_month = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_days_in_month_so_far = [0, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def eom(year, month):
if month == 2 and leapyear(year):
d = 29
else:
d = _days_in_month[month]
return datetime.datetime(year, month, d)
def iseom(dt):
""" Check if date is end of the month
"""
dt = asdatetime(dt)
return eom(dt.year, dt.month) == dt
def lweekday(year, month, weekday):
""" Date of the last occurrence of weekday in month of a given year
"""
last_day = eom(year, month)
last_weekday = last_day.weekday()
return last_day - datetime.timedelta(days = (last_weekday-weekday) % DAYS_IN_WEEK)
def nweekday(year, month, nth, weekday):
""" Date of the n-th occurrence of weekday in month
"""
first_weekday_of_month = datetime.datetime(year, month, 1).weekday()
last_weekday_of_month = eom(year, month)
day = 1+(weekday - first_weekday_of_month) % DAYS_IN_WEEK + (nth-1)*DAYS_IN_WEEK
if day > last_weekday_of_month.day:
raise ValueError("No such n-th weekday in this month")
return datetime.datetime(year, month, day) | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/README.md | # Flask-Statics-Helper
Provides Bootstrap3 and other static resources in a modular fashion.
The main purpose of this extension is to "modularize" static resources (css and js files) on a per-template basis. In a
large Flask application, all views/templates don't use the same static resource such as d3js. If only one view uses d3js
out of five or more, there is no reason to have the d3js `<script />` tag included in all views.
This extension also provides a base template to be extended by your Flask application's templates for Bootstrap3 (like
other Bootstrap3 extensions such as [this](https://github.com/mbr/flask-bootstrap) or
[this](https://github.com/ryanolson/flask-bootstrap3)).
* Python 2.6, 2.7, 3.3, and 3.4 supported on Linux and OS X.
* Python 2.7, 3.3, and 3.4 supported on Windows (both 32 and 64 bit versions of Python).
[]
(https://ci.appveyor.com/project/Robpol86/Flask-Statics-Helper)
[]
(https://travis-ci.org/Robpol86/Flask-Statics-Helper)
[]
(https://codecov.io/github/Robpol86/Flask-Statics-Helper)
[]
(https://pypi.python.org/pypi/Flask-Statics-Helper/)
[]
(https://pypi.python.org/pypi/Flask-Statics-Helper/)
## Quickstart
Install:
```bash
pip install Flask-Statics-Helper
```
Enable:
```python
# example.py
from flask import Flask
from flask.ext.statics import Statics
app = Flask(__name__)
Statics(app)
```
Use with Bootstrap3 (automatically enables jQuery):
```html+django
{% extends 'flask_statics_helper/bootstrap.html' %}
{% set STATICS_ENABLE_RESOURCE_CSSHAKE = True %}
{% block title %}My Application{% endblock %}
{% block navbar %}
<div class="navbar navbar-inverse navbar-static-top" role="navigation">
<div class="container">
<div class="navbar-header"> <!-- navbar-header -->
<button type="button" class="navbar-toggle" data-toggle="collapse"
data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/">My Application</a>
</div> <!-- /navbar-header -->
<div class="collapse navbar-collapse"> <!-- navbar-collapse -->
<ul class="nav navbar-nav">
<li><a href="/">Home</a></li>
</ul>
</div> <!-- /navbar-collapse -->
</div>
</div>
{% endblock %}
{% block container %}
<div class="jumbotron">
<h2 class="shake shake-constantly">Hello World.</h2>
</div>
{% endblock %}
```
## Available Resources
* [Bootstrap](http://getbootstrap.com/) 3.3.1
* [jQuery](http://jquery.com/) 2.1.1
* [Angular JS](https://angularjs.org/) 1.3.4
* [Bootstrap Growl](https://github.com/mouse0270/bootstrap-growl) 2.0.0
* [Bootstrap X-Editable](http://vitalets.github.io/x-editable/) 1.5.1
* [BootstrapValidator](http://bootstrapvalidator.com/) 0.5.3
* [CSShake](https://github.com/elrumordelaluz/csshake) (cloned July 9, 2014)
* [D3](http://d3js.org/) 3.4.13
* [Data Tables](http://datatables.net/) 1.10.4
* [Font Awesome](http://fortawesome.github.io/Font-Awesome/) 4.2.0
* [WHHG Font](http://www.webhostinghub.com/glyphs/) (cloned November 26, 2014)
* [typeahead.js](https://github.com/twitter/typeahead.js) 0.10.5
## Configuration
The only `app.config` specific setting is `STATICS_MINIFY`. Everything else may be set to True either in individual
templates (so that css/js is included only for that template) or you may set it to True in the `app.config` if you want
the resource enabled for all templates for some reason or another.
The following config settings are searched for in the Flask application's configuration dictionary:
* `STATICS_MINIFY` -- Have minified resources selected instead of uncompressed resources.
* `STATICS_ENABLE_RESOURCE_ANGULARJS` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_BOOTSTRAP` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_BOOTSTRAP_EDITABLE` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_BOOTSTRAP_GROWL` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_BOOTSTRAP_TYPEAHEAD` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_BOOTSTRAP_VALIDATOR` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_CSSHAKE` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_D3` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_DATATABLES` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_FONT_AWESOME` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_JQUERY` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_WHHG_FONT` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_ANIMATE` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_COOKIES` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_CSP` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_LOADER` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_MESSAGES` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_MOCKS` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_RESOURCE` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_ROUTE` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_SANITIZE` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_SCENARIO` -- include resource in all templates.
* `STATICS_ENABLE_RESOURCE_ANGULARJS_TOUCH` -- include resource in all templates.
## Changelog
#### 1.0.0
* Windows compatibility.
* Updated Bootstrap to 3.3.1.
* Updated Angular JS to 1.3.4.
* Updated BootstrapValidator to 0.5.3.
* Updated D3 to 3.4.13.
* Updated Data Tables to 1.10.4.
* Updated Font Awesome to 4.2.0.
* Updated WHHG Font to latest as of November 26, 2014.
#### 0.3.0
* Replaced Bootstrap 3 Typeahead with Twitter typeahead.js.
#### 0.2.0
* Added BootstrapValidator resource.
#### 0.1.1
* Added Python 2.6 and 3.x support.
#### 0.1.0
* Initial release.
| PypiClean |
/Ngoto-0.0.39-py3-none-any.whl/ngoto/core/util/rich/logging.py | import logging
from datetime import datetime
from logging import Handler, LogRecord
from pathlib import Path
from types import ModuleType
from typing import ClassVar, Iterable, List, Optional, Type, Union
from ngoto.core.util.rich._null_file import NullFile
from . import get_console
from ._log_render import FormatTimeCallable, LogRender
from .console import Console, ConsoleRenderable
from .highlighter import Highlighter, ReprHighlighter
from .text import Text
from .traceback import Traceback
class RichHandler(Handler):
"""A logging handler that renders output with Rich. The time / level / message and file are displayed in columns.
The level is color coded, and the message is syntax highlighted.
Note:
Be careful when enabling console markup in log messages if you have configured logging for libraries not
under your control. If a dependency writes messages containing square brackets, it may not produce the intended output.
Args:
level (Union[int, str], optional): Log level. Defaults to logging.NOTSET.
console (:class:`~rich.console.Console`, optional): Optional console instance to write logs.
Default will use a global console instance writing to stdout.
show_time (bool, optional): Show a column for the time. Defaults to True.
omit_repeated_times (bool, optional): Omit repetition of the same time. Defaults to True.
show_level (bool, optional): Show a column for the level. Defaults to True.
show_path (bool, optional): Show the path to the original log call. Defaults to True.
enable_link_path (bool, optional): Enable terminal link of path column to file. Defaults to True.
highlighter (Highlighter, optional): Highlighter to style log messages, or None to use ReprHighlighter. Defaults to None.
markup (bool, optional): Enable console markup in log messages. Defaults to False.
rich_tracebacks (bool, optional): Enable rich tracebacks with syntax highlighting and formatting. Defaults to False.
tracebacks_width (Optional[int], optional): Number of characters used to render tracebacks, or None for full width. Defaults to None.
tracebacks_extra_lines (int, optional): Additional lines of code to render tracebacks, or None for full width. Defaults to None.
tracebacks_theme (str, optional): Override pygments theme used in traceback.
tracebacks_word_wrap (bool, optional): Enable word wrapping of long tracebacks lines. Defaults to True.
tracebacks_show_locals (bool, optional): Enable display of locals in tracebacks. Defaults to False.
tracebacks_suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback.
locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to 10.
locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80.
log_time_format (Union[str, TimeFormatterCallable], optional): If ``log_time`` is enabled, either string for strftime or callable that formats the time. Defaults to "[%x %X] ".
keywords (List[str], optional): List of words to highlight instead of ``RichHandler.KEYWORDS``.
"""
KEYWORDS: ClassVar[Optional[List[str]]] = [
"GET",
"POST",
"HEAD",
"PUT",
"DELETE",
"OPTIONS",
"TRACE",
"PATCH",
]
HIGHLIGHTER_CLASS: ClassVar[Type[Highlighter]] = ReprHighlighter
def __init__(
self,
level: Union[int, str] = logging.NOTSET,
console: Optional[Console] = None,
*,
show_time: bool = True,
omit_repeated_times: bool = True,
show_level: bool = True,
show_path: bool = True,
enable_link_path: bool = True,
highlighter: Optional[Highlighter] = None,
markup: bool = False,
rich_tracebacks: bool = False,
tracebacks_width: Optional[int] = None,
tracebacks_extra_lines: int = 3,
tracebacks_theme: Optional[str] = None,
tracebacks_word_wrap: bool = True,
tracebacks_show_locals: bool = False,
tracebacks_suppress: Iterable[Union[str, ModuleType]] = (),
locals_max_length: int = 10,
locals_max_string: int = 80,
log_time_format: Union[str, FormatTimeCallable] = "[%x %X]",
keywords: Optional[List[str]] = None,
) -> None:
super().__init__(level=level)
self.console = console or get_console()
self.highlighter = highlighter or self.HIGHLIGHTER_CLASS()
self._log_render = LogRender(
show_time=show_time,
show_level=show_level,
show_path=show_path,
time_format=log_time_format,
omit_repeated_times=omit_repeated_times,
level_width=None,
)
self.enable_link_path = enable_link_path
self.markup = markup
self.rich_tracebacks = rich_tracebacks
self.tracebacks_width = tracebacks_width
self.tracebacks_extra_lines = tracebacks_extra_lines
self.tracebacks_theme = tracebacks_theme
self.tracebacks_word_wrap = tracebacks_word_wrap
self.tracebacks_show_locals = tracebacks_show_locals
self.tracebacks_suppress = tracebacks_suppress
self.locals_max_length = locals_max_length
self.locals_max_string = locals_max_string
self.keywords = keywords
def get_level_text(self, record: LogRecord) -> Text:
"""Get the level name from the record.
Args:
record (LogRecord): LogRecord instance.
Returns:
Text: A tuple of the style and level name.
"""
level_name = record.levelname
level_text = Text.styled(
level_name.ljust(8), f"logging.level.{level_name.lower()}"
)
return level_text
def emit(self, record: LogRecord) -> None:
"""Invoked by logging."""
message = self.format(record)
traceback = None
if (
self.rich_tracebacks
and record.exc_info
and record.exc_info != (None, None, None)
):
exc_type, exc_value, exc_traceback = record.exc_info
assert exc_type is not None
assert exc_value is not None
traceback = Traceback.from_exception(
exc_type,
exc_value,
exc_traceback,
width=self.tracebacks_width,
extra_lines=self.tracebacks_extra_lines,
theme=self.tracebacks_theme,
word_wrap=self.tracebacks_word_wrap,
show_locals=self.tracebacks_show_locals,
locals_max_length=self.locals_max_length,
locals_max_string=self.locals_max_string,
suppress=self.tracebacks_suppress,
)
message = record.getMessage()
if self.formatter:
record.message = record.getMessage()
formatter = self.formatter
if hasattr(formatter, "usesTime") and formatter.usesTime():
record.asctime = formatter.formatTime(record, formatter.datefmt)
message = formatter.formatMessage(record)
message_renderable = self.render_message(record, message)
log_renderable = self.render(
record=record, traceback=traceback, message_renderable=message_renderable
)
if isinstance(self.console.file, NullFile):
# Handles pythonw, where stdout/stderr are null, and we return NullFile
# instance from Console.file. In this case, we still want to make a log record
# even though we won't be writing anything to a file.
self.handleError(record)
else:
try:
self.console.print(log_renderable)
except Exception:
self.handleError(record)
def render_message(self, record: LogRecord, message: str) -> "ConsoleRenderable":
"""Render message text in to Text.
Args:
record (LogRecord): logging Record.
message (str): String containing log message.
Returns:
ConsoleRenderable: Renderable to display log message.
"""
use_markup = getattr(record, "markup", self.markup)
message_text = Text.from_markup(message) if use_markup else Text(message)
highlighter = getattr(record, "highlighter", self.highlighter)
if highlighter:
message_text = highlighter(message_text)
if self.keywords is None:
self.keywords = self.KEYWORDS
if self.keywords:
message_text.highlight_words(self.keywords, "logging.keyword")
return message_text
def render(
self,
*,
record: LogRecord,
traceback: Optional[Traceback],
message_renderable: "ConsoleRenderable",
) -> "ConsoleRenderable":
"""Render log for display.
Args:
record (LogRecord): logging Record.
traceback (Optional[Traceback]): Traceback instance or None for no Traceback.
message_renderable (ConsoleRenderable): Renderable (typically Text) containing log message contents.
Returns:
ConsoleRenderable: Renderable to display log.
"""
path = Path(record.pathname).name
level = self.get_level_text(record)
time_format = None if self.formatter is None else self.formatter.datefmt
log_time = datetime.fromtimestamp(record.created)
log_renderable = self._log_render(
self.console,
[message_renderable] if not traceback else [message_renderable, traceback],
log_time=log_time,
time_format=time_format,
level=level,
path=path,
line_no=record.lineno,
link_path=record.pathname if self.enable_link_path else None,
)
return log_renderable
if __name__ == "__main__": # pragma: no cover
from time import sleep
FORMAT = "%(message)s"
# FORMAT = "%(asctime)-15s - %(levelname)s - %(message)s"
logging.basicConfig(
level="NOTSET",
format=FORMAT,
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True, tracebacks_show_locals=True)],
)
log = logging.getLogger("rich")
log.info("Server starting...")
log.info("Listening on http://127.0.0.1:8080")
sleep(1)
log.info("GET /index.html 200 1298")
log.info("GET /imgs/backgrounds/back1.jpg 200 54386")
log.info("GET /css/styles.css 200 54386")
log.warning("GET /favicon.ico 404 242")
sleep(1)
log.debug(
"JSONRPC request\n--> %r\n<-- %r",
{
"version": "1.1",
"method": "confirmFruitPurchase",
"params": [["apple", "orange", "mangoes", "pomelo"], 1.123],
"id": "194521489",
},
{"version": "1.1", "result": True, "error": None, "id": "194521489"},
)
log.debug(
"Loading configuration file /adasd/asdasd/qeqwe/qwrqwrqwr/sdgsdgsdg/werwerwer/dfgerert/ertertert/ertetert/werwerwer"
)
log.error("Unable to find 'pomelo' in database!")
log.info("POST /jsonrpc/ 200 65532")
log.info("POST /admin/ 401 42234")
log.warning("password was rejected for admin site.")
def divide() -> None:
number = 1
divisor = 0
foos = ["foo"] * 100
log.debug("in divide")
try:
number / divisor
except:
log.exception("An error of some kind occurred!")
divide()
sleep(1)
log.critical("Out of memory!")
log.info("Server exited with code=-1")
log.info("[bold]EXITING...[/bold]", extra=dict(markup=True)) | PypiClean |
/Django-Pizza-16.10.1.tar.gz/Django-Pizza-16.10.1/pizza/kitchen_sink/static/ks/ckeditor/plugins/a11yhelp/dialogs/lang/ug.js | /*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("a11yhelp","ug",{title:"قوشۇمچە چۈشەندۈرۈش",contents:"ياردەم مەزمۇنى. بۇ سۆزلەشكۈنى ياپماقچى بولسىڭىز ESC نى بېسىڭ.",legend:[{name:"ئادەتتىكى",items:[{name:"قورال بالداق تەھرىر",legend:"${toolbarFocus} بېسىلسا قورال بالداققا يېتەكلەيدۇ، TAB ياكى SHIFT+TAB ئارقىلىق قورال بالداق گۇرۇپپىسى تاللىنىدۇ، ئوڭ سول يا ئوقتا توپچا تاللىنىدۇ، بوشلۇق ياكى Enter كۇنۇپكىسىدا تاللانغان توپچىنى قوللىنىدۇ."},{name:"تەھرىرلىگۈچ سۆزلەشكۈسى",legend:"سۆزلەشكۈدە TAB كۇنۇپكىسىدا كېيىنكى سۆز بۆلىكىگە يۆتكىلىدۇ، SHIFT + TAB بىرىكمە كۇنۇپكىسىدا ئالدىنقى سۆز بۆلىكىگە يۆتكىلىدۇ، ENTER كۇنۇپكىسىدا سۆزلەشكۈنى تاپشۇرىدۇ، ESC كۇنۇپكىسى سۆزلەشكۈدىن ۋاز كېچىدۇ. كۆپ بەتكۈچلۈك سۆزلەشكۈگە نىسبەتەن، ALT + F10 دا بەتكۈچ تىزىمىغا يۆتكەيدۇ. ئاندىن TAB كۇنۇپكىسى ياكى ئوڭ يا ئوق كۇنۇپكىسى كېيىنكى بەتكۈچكە يۆتكەيدۇ؛ SHIFT + TAB كۇنۇپكىسى ياكى سول يا ئوق كۇنۇپكىسى ئالدىنقى بەتكۈچكە يۆتكەيدۇ. بوشلۇق كۇنۇپكىسى ياكى ENTER كۇنۇپكىسى بەتكۈچنى تاللايدۇ."},
{name:"تەھرىرلىگۈچ تىل مۇھىت تىزىملىكى",legend:"${contextMenu} ياكى ئەپ كۇنۇپكىسىدا تىل مۇھىت تىزىملىكىنى ئاچىدۇ. ئاندىن TAB ياكى ئاستى يا ئوق كۇنۇپكىسىدا كېيىنكى تىزىملىك تۈرىگە يۆتكەيدۇ؛ SHIFT+TAB ياكى ئۈستى يا ئوق كۇنۇپكىسىدا ئالدىنقى تىزىملىك تۈرىگە يۆتكەيدۇ. بوشلۇق ياكى ENTER كۇنۇپكىسىدا تىزىملىك تۈرىنى تاللايدۇ. بوشلۇق، ENTER ياكى ئوڭ يا ئوق كۇنۇپكىسىدا تارماق تىزىملىكنى ئاچىدۇ. قايتىش تىزىملىكىگە ESC ياكى سول يا ئوق كۇنۇپكىسى ئىشلىتىلىدۇ. ESC كۇنۇپكىسىدا تىل مۇھىت تىزىملىكى تاقىلىدۇ."},{name:"تەھرىرلىگۈچ تىزىمى",
legend:"تىزىم قۇتىسىدا، كېيىنكى تىزىم تۈرىگە يۆتكەشتە TAB ياكى ئاستى يا ئوق كۇنۇپكىسى ئىشلىتىلىدۇ. ئالدىنقى تىزىم تۈرىگە يۆتكەشتە SHIFT + TAB ياكى ئۈستى يا ئوق كۇنۇپكىسى ئىشلىتىلىدۇ. بوشلۇق ياكى ENTER كۇنۇپكىسىدا تىزىم تۈرىنى تاللايدۇ.ESC كۇنۇپكىسىدا تىزىم قۇتىسىنى يىغىدۇ."},{name:"تەھرىرلىگۈچ ئېلېمېنت يول بالداق",legend:"${elementsPathFocus} بېسىلسا ئېلېمېنت يول بالداققا يېتەكلەيدۇ، TAB ياكى ئوڭ يا ئوقتا كېيىنكى ئېلېمېنت تاللىنىدۇ، SHIFT+TAB ياكى سول يا ئوقتا ئالدىنقى ئېلېمېنت تاللىنىدۇ، بوشلۇق ياكى Enter كۇنۇپكىسىدا تەھرىرلىگۈچتىكى ئېلېمېنت تاللىنىدۇ."}]},
{name:"بۇيرۇق",items:[{name:"بۇيرۇقتىن يېنىۋال",legend:"${undo} نى بېسىڭ"},{name:"قايتىلاش بۇيرۇقى",legend:"${redo} نى بېسىڭ"},{name:"توملىتىش بۇيرۇقى",legend:"${bold} نى بېسىڭ"},{name:"يانتۇ بۇيرۇقى",legend:"${italic} نى بېسىڭ"},{name:"ئاستى سىزىق بۇيرۇقى",legend:"${underline} نى بېسىڭ"},{name:"ئۇلانما بۇيرۇقى",legend:"${link} نى بېسىڭ"},{name:"قورال بالداق قاتلاش بۇيرۇقى",legend:"${toolbarCollapse} نى بېسىڭ"},{name:"ئالدىنقى فوكۇس نۇقتىسىنى زىيارەت قىلىدىغان بۇيرۇق",legend:"${accessPreviousSpace} بېسىپ ^ بەلگىسىگە ئەڭ يېقىن زىيارەت قىلغىلى بولمايدىغان فوكۇس نۇقتا رايونىنىڭ ئالدىنى زىيارەت قىلىدۇ، مەسىلەن: ئۆز ئارا قوشنا ئىككى HR ئېلېمېنت. بۇ بىرىكمە كۇنۇپكا تەكرارلانسا يىراقتىكى فوكۇس نۇقتا رايونىغا يەتكىلى بولىدۇ."},
{name:"كېيىنكى فوكۇس نۇقتىسىنى زىيارەت قىلىدىغان بۇيرۇق",legend:"${accessNextSpace} بېسىپ ^ بەلگىسىگە ئەڭ يېقىن زىيارەت قىلغىلى بولمايدىغان فوكۇس نۇقتا رايونىنىڭ كەينىنى زىيارەت قىلىدۇ، مەسىلەن: ئۆز ئارا قوشنا ئىككى HR ئېلېمېنت. بۇ بىرىكمە كۇنۇپكا تەكرارلانسا يىراقتىكى فوكۇس نۇقتا رايونىغا يەتكىلى بولىدۇ."},{name:"توسالغۇسىز لايىھە چۈشەندۈرۈشى",legend:"${a11yHelp} نى بېسىڭ"}]}]}); | PypiClean |
/CodernityDB-HTTP-0.4.1.tar.gz/CodernityDB-HTTP-0.4.1/CodernityDBHTTP/interface/scripts/lib/codemirror/codemirror.js |
// CodeMirror is the only global var we claim
var CodeMirror = (function() {
// This is the function that produces an editor instance. Its
// closure is used to store the editor state.
function CodeMirror(place, givenOptions) {
// Determine effective options based on given values and defaults.
var options = {}, defaults = CodeMirror.defaults;
for (var opt in defaults)
if (defaults.hasOwnProperty(opt))
options[opt] = (givenOptions && givenOptions.hasOwnProperty(opt) ? givenOptions : defaults)[opt];
// The element in which the editor lives.
var wrapper = document.createElement("div");
wrapper.className = "CodeMirror" + (options.lineWrapping ? " CodeMirror-wrap" : "");
// This mess creates the base DOM structure for the editor.
wrapper.innerHTML =
'<div style="overflow: hidden; position: relative; width: 3px; height: 0px;">' + // Wraps and hides input textarea
'<textarea style="position: absolute; padding: 0; width: 1px; height: 1em" wrap="off" ' +
'autocorrect="off" autocapitalize="off"></textarea></div>' +
'<div class="CodeMirror-scroll" tabindex="-1">' +
'<div style="position: relative">' + // Set to the height of the text, causes scrolling
'<div style="position: relative">' + // Moved around its parent to cover visible view
'<div class="CodeMirror-gutter"><div class="CodeMirror-gutter-text"></div></div>' +
// Provides positioning relative to (visible) text origin
'<div class="CodeMirror-lines"><div style="position: relative; z-index: 0">' +
'<div style="position: absolute; width: 100%; height: 0; overflow: hidden; visibility: hidden;"></div>' +
'<pre class="CodeMirror-cursor"> </pre>' + // Absolutely positioned blinky cursor
'<div style="position: relative; z-index: -1"></div><div></div>' + // DIVs containing the selection and the actual code
'</div></div></div></div></div>';
if (place.appendChild) place.appendChild(wrapper); else place(wrapper);
// I've never seen more elegant code in my life.
var inputDiv = wrapper.firstChild, input = inputDiv.firstChild,
scroller = wrapper.lastChild, code = scroller.firstChild,
mover = code.firstChild, gutter = mover.firstChild, gutterText = gutter.firstChild,
lineSpace = gutter.nextSibling.firstChild, measure = lineSpace.firstChild,
cursor = measure.nextSibling, selectionDiv = cursor.nextSibling,
lineDiv = selectionDiv.nextSibling;
themeChanged();
// Needed to hide big blue blinking cursor on Mobile Safari
if (ios) input.style.width = "0px";
if (!webkit) lineSpace.draggable = true;
lineSpace.style.outline = "none";
if (options.tabindex != null) input.tabIndex = options.tabindex;
if (options.autofocus) focusInput();
if (!options.gutter && !options.lineNumbers) gutter.style.display = "none";
// Needed to handle Tab key in KHTML
if (khtml) inputDiv.style.height = "1px", inputDiv.style.position = "absolute";
// Check for problem with IE innerHTML not working when we have a
// P (or similar) parent node.
try { stringWidth("x"); }
catch (e) {
if (e.message.match(/runtime/i))
e = new Error("A CodeMirror inside a P-style element does not work in Internet Explorer. (innerHTML bug)");
throw e;
}
// Delayed object wrap timeouts, making sure only one is active. blinker holds an interval.
var poll = new Delayed(), highlight = new Delayed(), blinker;
// mode holds a mode API object. doc is the tree of Line objects,
// work an array of lines that should be parsed, and history the
// undo history (instance of History constructor).
var mode, doc = new BranchChunk([new LeafChunk([new Line("")])]), work, focused;
loadMode();
// The selection. These are always maintained to point at valid
// positions. Inverted is used to remember that the user is
// selecting bottom-to-top.
var sel = {from: {line: 0, ch: 0}, to: {line: 0, ch: 0}, inverted: false};
// Selection-related flags. shiftSelecting obviously tracks
// whether the user is holding shift.
var shiftSelecting, lastClick, lastDoubleClick, lastScrollPos = 0, draggingText,
overwrite = false, suppressEdits = false;
// Variables used by startOperation/endOperation to track what
// happened during the operation.
var updateInput, userSelChange, changes, textChanged, selectionChanged, leaveInputAlone,
gutterDirty, callbacks;
// Current visible range (may be bigger than the view window).
var displayOffset = 0, showingFrom = 0, showingTo = 0, lastSizeC = 0;
// bracketHighlighted is used to remember that a bracket has been
// marked.
var bracketHighlighted;
// Tracks the maximum line length so that the horizontal scrollbar
// can be kept static when scrolling.
var maxLine = "", maxWidth;
var tabCache = {};
// Initialize the content.
operation(function(){setValue(options.value || ""); updateInput = false;})();
var history = new History();
// Register our event handlers.
connect(scroller, "mousedown", operation(onMouseDown));
connect(scroller, "dblclick", operation(onDoubleClick));
connect(lineSpace, "dragstart", onDragStart);
connect(lineSpace, "selectstart", e_preventDefault);
// Gecko browsers fire contextmenu *after* opening the menu, at
// which point we can't mess with it anymore. Context menu is
// handled in onMouseDown for Gecko.
if (!gecko) connect(scroller, "contextmenu", onContextMenu);
connect(scroller, "scroll", function() {
lastScrollPos = scroller.scrollTop;
updateDisplay([]);
if (options.fixedGutter) gutter.style.left = scroller.scrollLeft + "px";
if (options.onScroll) options.onScroll(instance);
});
connect(window, "resize", function() {updateDisplay(true);});
connect(input, "keyup", operation(onKeyUp));
connect(input, "input", fastPoll);
connect(input, "keydown", operation(onKeyDown));
connect(input, "keypress", operation(onKeyPress));
connect(input, "focus", onFocus);
connect(input, "blur", onBlur);
connect(scroller, "dragenter", e_stop);
connect(scroller, "dragover", e_stop);
connect(scroller, "drop", operation(onDrop));
connect(scroller, "paste", function(){focusInput(); fastPoll();});
connect(input, "paste", fastPoll);
connect(input, "cut", operation(function(){
if (!options.readOnly) replaceSelection("");
}));
// Needed to handle Tab key in KHTML
if (khtml) connect(code, "mouseup", function() {
if (document.activeElement == input) input.blur();
focusInput();
});
// IE throws unspecified error in certain cases, when
// trying to access activeElement before onload
var hasFocus; try { hasFocus = (document.activeElement == input); } catch(e) { }
if (hasFocus || options.autofocus) setTimeout(onFocus, 20);
else onBlur();
function isLine(l) {return l >= 0 && l < doc.size;}
// The instance object that we'll return. Mostly calls out to
// local functions in the CodeMirror function. Some do some extra
// range checking and/or clipping. operation is used to wrap the
// call so that changes it makes are tracked, and the display is
// updated afterwards.
var instance = wrapper.CodeMirror = {
getValue: getValue,
setValue: operation(setValue),
getSelection: getSelection,
replaceSelection: operation(replaceSelection),
focus: function(){window.focus(); focusInput(); onFocus(); fastPoll();},
setOption: function(option, value) {
var oldVal = options[option];
options[option] = value;
if (option == "mode" || option == "indentUnit") loadMode();
else if (option == "readOnly" && value == "nocursor") {onBlur(); input.blur();}
else if (option == "readOnly" && !value) {resetInput(true);}
else if (option == "theme") themeChanged();
else if (option == "lineWrapping" && oldVal != value) operation(wrappingChanged)();
else if (option == "tabSize") updateDisplay(true);
if (option == "lineNumbers" || option == "gutter" || option == "firstLineNumber" || option == "theme") {
gutterChanged();
updateDisplay(true);
}
},
getOption: function(option) {return options[option];},
undo: operation(undo),
redo: operation(redo),
indentLine: operation(function(n, dir) {
if (typeof dir != "string") {
if (dir == null) dir = options.smartIndent ? "smart" : "prev";
else dir = dir ? "add" : "subtract";
}
if (isLine(n)) indentLine(n, dir);
}),
indentSelection: operation(indentSelected),
historySize: function() {return {undo: history.done.length, redo: history.undone.length};},
clearHistory: function() {history = new History();},
matchBrackets: operation(function(){matchBrackets(true);}),
getTokenAt: operation(function(pos) {
pos = clipPos(pos);
return getLine(pos.line).getTokenAt(mode, getStateBefore(pos.line), pos.ch);
}),
getStateAfter: function(line) {
line = clipLine(line == null ? doc.size - 1: line);
return getStateBefore(line + 1);
},
cursorCoords: function(start, mode) {
if (start == null) start = sel.inverted;
return this.charCoords(start ? sel.from : sel.to, mode);
},
charCoords: function(pos, mode) {
pos = clipPos(pos);
if (mode == "local") return localCoords(pos, false);
if (mode == "div") return localCoords(pos, true);
return pageCoords(pos);
},
coordsChar: function(coords) {
var off = eltOffset(lineSpace);
return coordsChar(coords.x - off.left, coords.y - off.top);
},
markText: operation(markText),
setBookmark: setBookmark,
findMarksAt: findMarksAt,
setMarker: operation(addGutterMarker),
clearMarker: operation(removeGutterMarker),
setLineClass: operation(setLineClass),
hideLine: operation(function(h) {return setLineHidden(h, true);}),
showLine: operation(function(h) {return setLineHidden(h, false);}),
onDeleteLine: function(line, f) {
if (typeof line == "number") {
if (!isLine(line)) return null;
line = getLine(line);
}
(line.handlers || (line.handlers = [])).push(f);
return line;
},
lineInfo: lineInfo,
addWidget: function(pos, node, scroll, vert, horiz) {
pos = localCoords(clipPos(pos));
var top = pos.yBot, left = pos.x;
node.style.position = "absolute";
code.appendChild(node);
if (vert == "over") top = pos.y;
else if (vert == "near") {
var vspace = Math.max(scroller.offsetHeight, doc.height * textHeight()),
hspace = Math.max(code.clientWidth, lineSpace.clientWidth) - paddingLeft();
if (pos.yBot + node.offsetHeight > vspace && pos.y > node.offsetHeight)
top = pos.y - node.offsetHeight;
if (left + node.offsetWidth > hspace)
left = hspace - node.offsetWidth;
}
node.style.top = (top + paddingTop()) + "px";
node.style.left = node.style.right = "";
if (horiz == "right") {
left = code.clientWidth - node.offsetWidth;
node.style.right = "0px";
} else {
if (horiz == "left") left = 0;
else if (horiz == "middle") left = (code.clientWidth - node.offsetWidth) / 2;
node.style.left = (left + paddingLeft()) + "px";
}
if (scroll)
scrollIntoView(left, top, left + node.offsetWidth, top + node.offsetHeight);
},
lineCount: function() {return doc.size;},
clipPos: clipPos,
getCursor: function(start) {
if (start == null) start = sel.inverted;
return copyPos(start ? sel.from : sel.to);
},
somethingSelected: function() {return !posEq(sel.from, sel.to);},
setCursor: operation(function(line, ch, user) {
if (ch == null && typeof line.line == "number") setCursor(line.line, line.ch, user);
else setCursor(line, ch, user);
}),
setSelection: operation(function(from, to, user) {
(user ? setSelectionUser : setSelection)(clipPos(from), clipPos(to || from));
}),
getLine: function(line) {if (isLine(line)) return getLine(line).text;},
getLineHandle: function(line) {if (isLine(line)) return getLine(line);},
setLine: operation(function(line, text) {
if (isLine(line)) replaceRange(text, {line: line, ch: 0}, {line: line, ch: getLine(line).text.length});
}),
removeLine: operation(function(line) {
if (isLine(line)) replaceRange("", {line: line, ch: 0}, clipPos({line: line+1, ch: 0}));
}),
replaceRange: operation(replaceRange),
getRange: function(from, to) {return getRange(clipPos(from), clipPos(to));},
triggerOnKeyDown: operation(onKeyDown),
execCommand: function(cmd) {return commands[cmd](instance);},
// Stuff used by commands, probably not much use to outside code.
moveH: operation(moveH),
deleteH: operation(deleteH),
moveV: operation(moveV),
toggleOverwrite: function() {
if(overwrite){
overwrite = false;
cursor.className = cursor.className.replace(" CodeMirror-overwrite", "");
} else {
overwrite = true;
cursor.className += " CodeMirror-overwrite";
}
},
posFromIndex: function(off) {
var lineNo = 0, ch;
doc.iter(0, doc.size, function(line) {
var sz = line.text.length + 1;
if (sz > off) { ch = off; return true; }
off -= sz;
++lineNo;
});
return clipPos({line: lineNo, ch: ch});
},
indexFromPos: function (coords) {
if (coords.line < 0 || coords.ch < 0) return 0;
var index = coords.ch;
doc.iter(0, coords.line, function (line) {
index += line.text.length + 1;
});
return index;
},
scrollTo: function(x, y) {
if (x != null) scroller.scrollLeft = x;
if (y != null) scroller.scrollTop = y;
updateDisplay([]);
},
operation: function(f){return operation(f)();},
refresh: function(){
updateDisplay(true);
if (scroller.scrollHeight > lastScrollPos)
scroller.scrollTop = lastScrollPos;
},
getInputField: function(){return input;},
getWrapperElement: function(){return wrapper;},
getScrollerElement: function(){return scroller;},
getGutterElement: function(){return gutter;}
};
function getLine(n) { return getLineAt(doc, n); }
function updateLineHeight(line, height) {
gutterDirty = true;
var diff = height - line.height;
for (var n = line; n; n = n.parent) n.height += diff;
}
function setValue(code) {
var top = {line: 0, ch: 0};
updateLines(top, {line: doc.size - 1, ch: getLine(doc.size-1).text.length},
splitLines(code), top, top);
updateInput = true;
}
function getValue(code) {
var text = [];
doc.iter(0, doc.size, function(line) { text.push(line.text); });
return text.join("\n");
}
function onMouseDown(e) {
setShift(e_prop(e, "shiftKey"));
// Check whether this is a click in a widget
for (var n = e_target(e); n != wrapper; n = n.parentNode)
if (n.parentNode == code && n != mover) return;
// See if this is a click in the gutter
for (var n = e_target(e); n != wrapper; n = n.parentNode)
if (n.parentNode == gutterText) {
if (options.onGutterClick)
options.onGutterClick(instance, indexOf(gutterText.childNodes, n) + showingFrom, e);
return e_preventDefault(e);
}
var start = posFromMouse(e);
switch (e_button(e)) {
case 3:
if (gecko && !mac) onContextMenu(e);
return;
case 2:
if (start) setCursor(start.line, start.ch, true);
return;
}
// For button 1, if it was clicked inside the editor
// (posFromMouse returning non-null), we have to adjust the
// selection.
if (!start) {if (e_target(e) == scroller) e_preventDefault(e); return;}
if (!focused) onFocus();
var now = +new Date;
if (lastDoubleClick && lastDoubleClick.time > now - 400 && posEq(lastDoubleClick.pos, start)) {
e_preventDefault(e);
setTimeout(focusInput, 20);
return selectLine(start.line);
} else if (lastClick && lastClick.time > now - 400 && posEq(lastClick.pos, start)) {
lastDoubleClick = {time: now, pos: start};
e_preventDefault(e);
return selectWordAt(start);
} else { lastClick = {time: now, pos: start}; }
var last = start, going;
if (dragAndDrop && !options.readOnly && !posEq(sel.from, sel.to) &&
!posLess(start, sel.from) && !posLess(sel.to, start)) {
// Let the drag handler handle this.
if (webkit) lineSpace.draggable = true;
var up = connect(document, "mouseup", operation(function(e2) {
if (webkit) lineSpace.draggable = false;
draggingText = false;
up();
if (Math.abs(e.clientX - e2.clientX) + Math.abs(e.clientY - e2.clientY) < 10) {
e_preventDefault(e2);
setCursor(start.line, start.ch, true);
focusInput();
}
}), true);
draggingText = true;
// IE's approach to draggable
if (lineSpace.dragDrop) lineSpace.dragDrop();
return;
}
e_preventDefault(e);
setCursor(start.line, start.ch, true);
function extend(e) {
var cur = posFromMouse(e, true);
if (cur && !posEq(cur, last)) {
if (!focused) onFocus();
last = cur;
setSelectionUser(start, cur);
updateInput = false;
var visible = visibleLines();
if (cur.line >= visible.to || cur.line < visible.from)
going = setTimeout(operation(function(){extend(e);}), 150);
}
}
function done(e) {
clearTimeout(going);
var cur = posFromMouse(e);
if (cur) setSelectionUser(start, cur);
e_preventDefault(e);
focusInput();
updateInput = true;
move(); up();
}
var move = connect(document, "mousemove", operation(function(e) {
clearTimeout(going);
e_preventDefault(e);
if (!ie && !e_button(e)) done(e);
else extend(e);
}), true);
var up = connect(document, "mouseup", operation(done), true);
}
function onDoubleClick(e) {
for (var n = e_target(e); n != wrapper; n = n.parentNode)
if (n.parentNode == gutterText) return e_preventDefault(e);
var start = posFromMouse(e);
if (!start) return;
lastDoubleClick = {time: +new Date, pos: start};
e_preventDefault(e);
selectWordAt(start);
}
function onDrop(e) {
e.preventDefault();
var pos = posFromMouse(e, true), files = e.dataTransfer.files;
if (!pos || options.readOnly) return;
if (files && files.length && window.FileReader && window.File) {
function loadFile(file, i) {
var reader = new FileReader;
reader.onload = function() {
text[i] = reader.result;
if (++read == n) {
pos = clipPos(pos);
operation(function() {
var end = replaceRange(text.join(""), pos, pos);
setSelectionUser(pos, end);
})();
}
};
reader.readAsText(file);
}
var n = files.length, text = Array(n), read = 0;
for (var i = 0; i < n; ++i) loadFile(files[i], i);
}
else {
try {
var text = e.dataTransfer.getData("Text");
if (text) {
var curFrom = sel.from, curTo = sel.to;
setSelectionUser(pos, pos);
if (draggingText) replaceRange("", curFrom, curTo);
replaceSelection(text);
focusInput();
}
}
catch(e){}
}
}
function onDragStart(e) {
var txt = getSelection();
e.dataTransfer.setData("Text", txt);
// Use dummy image instead of default browsers image.
if (gecko || chrome) {
var img = document.createElement('img');
img.scr = 'data:image/gif;base64,R0lGODdhAgACAIAAAAAAAP///ywAAAAAAgACAAACAoRRADs='; //1x1 image
e.dataTransfer.setDragImage(img, 0, 0);
}
}
function doHandleBinding(bound, dropShift) {
if (typeof bound == "string") {
bound = commands[bound];
if (!bound) return false;
}
var prevShift = shiftSelecting;
try {
if (options.readOnly) suppressEdits = true;
if (dropShift) shiftSelecting = null;
bound(instance);
} catch(e) {
if (e != Pass) throw e;
return false;
} finally {
shiftSelecting = prevShift;
suppressEdits = false;
}
return true;
}
function handleKeyBinding(e) {
// Handle auto keymap transitions
var startMap = getKeyMap(options.keyMap), next = startMap.auto;
clearTimeout(maybeTransition);
if (next && !isModifierKey(e)) maybeTransition = setTimeout(function() {
if (getKeyMap(options.keyMap) == startMap) {
options.keyMap = (next.call ? next.call(null, instance) : next);
}
}, 50);
var name = keyNames[e_prop(e, "keyCode")], handled = false;
if (name == null || e.altGraphKey) return false;
if (e_prop(e, "altKey")) name = "Alt-" + name;
if (e_prop(e, "ctrlKey")) name = "Ctrl-" + name;
if (e_prop(e, "metaKey")) name = "Cmd-" + name;
if (e_prop(e, "shiftKey")) {
handled = lookupKey("Shift-" + name, options.extraKeys, options.keyMap,
function(b) {return doHandleBinding(b, true);})
|| lookupKey(name, options.extraKeys, options.keyMap, function(b) {
if (typeof b == "string" && /^go[A-Z]/.test(b)) return doHandleBinding(b);
});
} else {
handled = lookupKey(name, options.extraKeys, options.keyMap, doHandleBinding);
}
if (handled) {
e_preventDefault(e);
if (ie) { e.oldKeyCode = e.keyCode; e.keyCode = 0; }
}
return handled;
}
function handleCharBinding(e, ch) {
var handled = lookupKey("'" + ch + "'", options.extraKeys,
options.keyMap, doHandleBinding);
if (handled) e_preventDefault(e);
return handled;
}
var lastStoppedKey = null, maybeTransition;
function onKeyDown(e) {
if (!focused) onFocus();
if (ie && e.keyCode == 27) { e.returnValue = false; }
if (pollingFast) { if (readInput()) pollingFast = false; }
if (options.onKeyEvent && options.onKeyEvent(instance, addStop(e))) return;
var code = e_prop(e, "keyCode");
// IE does strange things with escape.
setShift(code == 16 || e_prop(e, "shiftKey"));
// First give onKeyEvent option a chance to handle this.
var handled = handleKeyBinding(e);
if (window.opera) {
lastStoppedKey = handled ? code : null;
// Opera has no cut event... we try to at least catch the key combo
if (!handled && code == 88 && e_prop(e, mac ? "metaKey" : "ctrlKey"))
replaceSelection("");
}
}
function onKeyPress(e) {
if (pollingFast) readInput();
if (options.onKeyEvent && options.onKeyEvent(instance, addStop(e))) return;
var keyCode = e_prop(e, "keyCode"), charCode = e_prop(e, "charCode");
if (window.opera && keyCode == lastStoppedKey) {lastStoppedKey = null; e_preventDefault(e); return;}
if (((window.opera && !e.which) || khtml) && handleKeyBinding(e)) return;
var ch = String.fromCharCode(charCode == null ? keyCode : charCode);
if (options.electricChars && mode.electricChars && options.smartIndent && !options.readOnly) {
if (mode.electricChars.indexOf(ch) > -1)
setTimeout(operation(function() {indentLine(sel.to.line, "smart");}), 75);
}
if (handleCharBinding(e, ch)) return;
fastPoll();
}
function onKeyUp(e) {
if (options.onKeyEvent && options.onKeyEvent(instance, addStop(e))) return;
if (e_prop(e, "keyCode") == 16) shiftSelecting = null;
}
function onFocus() {
if (options.readOnly == "nocursor") return;
if (!focused) {
if (options.onFocus) options.onFocus(instance);
focused = true;
if (wrapper.className.search(/\bCodeMirror-focused\b/) == -1)
wrapper.className += " CodeMirror-focused";
if (!leaveInputAlone) resetInput(true);
}
slowPoll();
restartBlink();
}
function onBlur() {
if (focused) {
if (options.onBlur) options.onBlur(instance);
focused = false;
if (bracketHighlighted)
operation(function(){
if (bracketHighlighted) { bracketHighlighted(); bracketHighlighted = null; }
})();
wrapper.className = wrapper.className.replace(" CodeMirror-focused", "");
}
clearInterval(blinker);
setTimeout(function() {if (!focused) shiftSelecting = null;}, 150);
}
// Replace the range from from to to by the strings in newText.
// Afterwards, set the selection to selFrom, selTo.
function updateLines(from, to, newText, selFrom, selTo) {
if (suppressEdits) return;
if (history) {
var old = [];
doc.iter(from.line, to.line + 1, function(line) { old.push(line.text); });
history.addChange(from.line, newText.length, old);
while (history.done.length > options.undoDepth) history.done.shift();
}
updateLinesNoUndo(from, to, newText, selFrom, selTo);
}
function unredoHelper(from, to) {
if (!from.length) return;
var set = from.pop(), out = [];
for (var i = set.length - 1; i >= 0; i -= 1) {
var change = set[i];
var replaced = [], end = change.start + change.added;
doc.iter(change.start, end, function(line) { replaced.push(line.text); });
out.push({start: change.start, added: change.old.length, old: replaced});
var pos = clipPos({line: change.start + change.old.length - 1,
ch: editEnd(replaced[replaced.length-1], change.old[change.old.length-1])});
updateLinesNoUndo({line: change.start, ch: 0}, {line: end - 1, ch: getLine(end-1).text.length}, change.old, pos, pos);
}
updateInput = true;
to.push(out);
}
function undo() {unredoHelper(history.done, history.undone);}
function redo() {unredoHelper(history.undone, history.done);}
function updateLinesNoUndo(from, to, newText, selFrom, selTo) {
if (suppressEdits) return;
var recomputeMaxLength = false, maxLineLength = maxLine.length;
if (!options.lineWrapping)
doc.iter(from.line, to.line, function(line) {
if (line.text.length == maxLineLength) {recomputeMaxLength = true; return true;}
});
if (from.line != to.line || newText.length > 1) gutterDirty = true;
var nlines = to.line - from.line, firstLine = getLine(from.line), lastLine = getLine(to.line);
// First adjust the line structure, taking some care to leave highlighting intact.
if (from.ch == 0 && to.ch == 0 && newText[newText.length - 1] == "") {
// This is a whole-line replace. Treated specially to make
// sure line objects move the way they are supposed to.
var added = [], prevLine = null;
if (from.line) {
prevLine = getLine(from.line - 1);
prevLine.fixMarkEnds(lastLine);
} else lastLine.fixMarkStarts();
for (var i = 0, e = newText.length - 1; i < e; ++i)
added.push(Line.inheritMarks(newText[i], prevLine));
if (nlines) doc.remove(from.line, nlines, callbacks);
if (added.length) doc.insert(from.line, added);
} else if (firstLine == lastLine) {
if (newText.length == 1)
firstLine.replace(from.ch, to.ch, newText[0]);
else {
lastLine = firstLine.split(to.ch, newText[newText.length-1]);
firstLine.replace(from.ch, null, newText[0]);
firstLine.fixMarkEnds(lastLine);
var added = [];
for (var i = 1, e = newText.length - 1; i < e; ++i)
added.push(Line.inheritMarks(newText[i], firstLine));
added.push(lastLine);
doc.insert(from.line + 1, added);
}
} else if (newText.length == 1) {
firstLine.replace(from.ch, null, newText[0]);
lastLine.replace(null, to.ch, "");
firstLine.append(lastLine);
doc.remove(from.line + 1, nlines, callbacks);
} else {
var added = [];
firstLine.replace(from.ch, null, newText[0]);
lastLine.replace(null, to.ch, newText[newText.length-1]);
firstLine.fixMarkEnds(lastLine);
for (var i = 1, e = newText.length - 1; i < e; ++i)
added.push(Line.inheritMarks(newText[i], firstLine));
if (nlines > 1) doc.remove(from.line + 1, nlines - 1, callbacks);
doc.insert(from.line + 1, added);
}
if (options.lineWrapping) {
var perLine = scroller.clientWidth / charWidth() - 3;
doc.iter(from.line, from.line + newText.length, function(line) {
if (line.hidden) return;
var guess = Math.ceil(line.text.length / perLine) || 1;
if (guess != line.height) updateLineHeight(line, guess);
});
} else {
doc.iter(from.line, i + newText.length, function(line) {
var l = line.text;
if (l.length > maxLineLength) {
maxLine = l; maxLineLength = l.length; maxWidth = null;
recomputeMaxLength = false;
}
});
if (recomputeMaxLength) {
maxLineLength = 0; maxLine = ""; maxWidth = null;
doc.iter(0, doc.size, function(line) {
var l = line.text;
if (l.length > maxLineLength) {
maxLineLength = l.length; maxLine = l;
}
});
}
}
// Add these lines to the work array, so that they will be
// highlighted. Adjust work lines if lines were added/removed.
var newWork = [], lendiff = newText.length - nlines - 1;
for (var i = 0, l = work.length; i < l; ++i) {
var task = work[i];
if (task < from.line) newWork.push(task);
else if (task > to.line) newWork.push(task + lendiff);
}
var hlEnd = from.line + Math.min(newText.length, 500);
highlightLines(from.line, hlEnd);
newWork.push(hlEnd);
work = newWork;
startWorker(100);
// Remember that these lines changed, for updating the display
changes.push({from: from.line, to: to.line + 1, diff: lendiff});
var changeObj = {from: from, to: to, text: newText};
if (textChanged) {
for (var cur = textChanged; cur.next; cur = cur.next) {}
cur.next = changeObj;
} else textChanged = changeObj;
// Update the selection
function updateLine(n) {return n <= Math.min(to.line, to.line + lendiff) ? n : n + lendiff;}
setSelection(selFrom, selTo, updateLine(sel.from.line), updateLine(sel.to.line));
// Make sure the scroll-size div has the correct height.
if (scroller.clientHeight)
code.style.height = (doc.height * textHeight() + 2 * paddingTop()) + "px";
}
function replaceRange(code, from, to) {
from = clipPos(from);
if (!to) to = from; else to = clipPos(to);
code = splitLines(code);
function adjustPos(pos) {
if (posLess(pos, from)) return pos;
if (!posLess(to, pos)) return end;
var line = pos.line + code.length - (to.line - from.line) - 1;
var ch = pos.ch;
if (pos.line == to.line)
ch += code[code.length-1].length - (to.ch - (to.line == from.line ? from.ch : 0));
return {line: line, ch: ch};
}
var end;
replaceRange1(code, from, to, function(end1) {
end = end1;
return {from: adjustPos(sel.from), to: adjustPos(sel.to)};
});
return end;
}
function replaceSelection(code, collapse) {
replaceRange1(splitLines(code), sel.from, sel.to, function(end) {
if (collapse == "end") return {from: end, to: end};
else if (collapse == "start") return {from: sel.from, to: sel.from};
else return {from: sel.from, to: end};
});
}
function replaceRange1(code, from, to, computeSel) {
var endch = code.length == 1 ? code[0].length + from.ch : code[code.length-1].length;
var newSel = computeSel({line: from.line + code.length - 1, ch: endch});
updateLines(from, to, code, newSel.from, newSel.to);
}
function getRange(from, to) {
var l1 = from.line, l2 = to.line;
if (l1 == l2) return getLine(l1).text.slice(from.ch, to.ch);
var code = [getLine(l1).text.slice(from.ch)];
doc.iter(l1 + 1, l2, function(line) { code.push(line.text); });
code.push(getLine(l2).text.slice(0, to.ch));
return code.join("\n");
}
function getSelection() {
return getRange(sel.from, sel.to);
}
var pollingFast = false; // Ensures slowPoll doesn't cancel fastPoll
function slowPoll() {
if (pollingFast) return;
poll.set(options.pollInterval, function() {
startOperation();
readInput();
if (focused) slowPoll();
endOperation();
});
}
function fastPoll() {
var missed = false;
pollingFast = true;
function p() {
startOperation();
var changed = readInput();
if (!changed && !missed) {missed = true; poll.set(60, p);}
else {pollingFast = false; slowPoll();}
endOperation();
}
poll.set(20, p);
}
// Previnput is a hack to work with IME. If we reset the textarea
// on every change, that breaks IME. So we look for changes
// compared to the previous content instead. (Modern browsers have
// events that indicate IME taking place, but these are not widely
// supported or compatible enough yet to rely on.)
var prevInput = "";
function readInput() {
if (leaveInputAlone || !focused || hasSelection(input) || options.readOnly) return false;
var text = input.value;
if (text == prevInput) return false;
shiftSelecting = null;
var same = 0, l = Math.min(prevInput.length, text.length);
while (same < l && prevInput[same] == text[same]) ++same;
if (same < prevInput.length)
sel.from = {line: sel.from.line, ch: sel.from.ch - (prevInput.length - same)};
else if (overwrite && posEq(sel.from, sel.to))
sel.to = {line: sel.to.line, ch: Math.min(getLine(sel.to.line).text.length, sel.to.ch + (text.length - same))};
replaceSelection(text.slice(same), "end");
prevInput = text;
return true;
}
function resetInput(user) {
if (!posEq(sel.from, sel.to)) {
prevInput = "";
input.value = getSelection();
selectInput(input);
} else if (user) prevInput = input.value = "";
}
function focusInput() {
if (options.readOnly != "nocursor") input.focus();
}
function scrollEditorIntoView() {
if (!cursor.getBoundingClientRect) return;
var rect = cursor.getBoundingClientRect();
// IE returns bogus coordinates when the instance sits inside of an iframe and the cursor is hidden
if (ie && rect.top == rect.bottom) return;
var winH = window.innerHeight || Math.max(document.body.offsetHeight, document.documentElement.offsetHeight);
if (rect.top < 0 || rect.bottom > winH) cursor.scrollIntoView();
}
function scrollCursorIntoView() {
var cursor = localCoords(sel.inverted ? sel.from : sel.to);
var x = options.lineWrapping ? Math.min(cursor.x, lineSpace.offsetWidth) : cursor.x;
return scrollIntoView(x, cursor.y, x, cursor.yBot);
}
function scrollIntoView(x1, y1, x2, y2) {
var pl = paddingLeft(), pt = paddingTop();
y1 += pt; y2 += pt; x1 += pl; x2 += pl;
var screen = scroller.clientHeight, screentop = scroller.scrollTop, scrolled = false, result = true;
if (y1 < screentop) {scroller.scrollTop = Math.max(0, y1); scrolled = true;}
else if (y2 > screentop + screen) {scroller.scrollTop = y2 - screen; scrolled = true;}
var screenw = scroller.clientWidth, screenleft = scroller.scrollLeft;
var gutterw = options.fixedGutter ? gutter.clientWidth : 0;
if (x1 < screenleft + gutterw) {
if (x1 < 50) x1 = 0;
scroller.scrollLeft = Math.max(0, x1 - 10 - gutterw);
scrolled = true;
}
else if (x2 > screenw + screenleft - 3) {
scroller.scrollLeft = x2 + 10 - screenw;
scrolled = true;
if (x2 > code.clientWidth) result = false;
}
if (scrolled && options.onScroll) options.onScroll(instance);
return result;
}
function visibleLines() {
var lh = textHeight(), top = scroller.scrollTop - paddingTop();
var from_height = Math.max(0, Math.floor(top / lh));
var to_height = Math.ceil((top + scroller.clientHeight) / lh);
return {from: lineAtHeight(doc, from_height),
to: lineAtHeight(doc, to_height)};
}
// Uses a set of changes plus the current scroll position to
// determine which DOM updates have to be made, and makes the
// updates.
function updateDisplay(changes, suppressCallback) {
if (!scroller.clientWidth) {
showingFrom = showingTo = displayOffset = 0;
return;
}
// Compute the new visible window
var visible = visibleLines();
// Bail out if the visible area is already rendered and nothing changed.
if (changes !== true && changes.length == 0 && visible.from > showingFrom && visible.to < showingTo) return;
var from = Math.max(visible.from - 100, 0), to = Math.min(doc.size, visible.to + 100);
if (showingFrom < from && from - showingFrom < 20) from = showingFrom;
if (showingTo > to && showingTo - to < 20) to = Math.min(doc.size, showingTo);
// Create a range of theoretically intact lines, and punch holes
// in that using the change info.
var intact = changes === true ? [] :
computeIntact([{from: showingFrom, to: showingTo, domStart: 0}], changes);
// Clip off the parts that won't be visible
var intactLines = 0;
for (var i = 0; i < intact.length; ++i) {
var range = intact[i];
if (range.from < from) {range.domStart += (from - range.from); range.from = from;}
if (range.to > to) range.to = to;
if (range.from >= range.to) intact.splice(i--, 1);
else intactLines += range.to - range.from;
}
if (intactLines == to - from) return;
intact.sort(function(a, b) {return a.domStart - b.domStart;});
var th = textHeight(), gutterDisplay = gutter.style.display;
lineDiv.style.display = "none";
patchDisplay(from, to, intact);
lineDiv.style.display = gutter.style.display = "";
// Position the mover div to align with the lines it's supposed
// to be showing (which will cover the visible display)
var different = from != showingFrom || to != showingTo || lastSizeC != scroller.clientHeight + th;
// This is just a bogus formula that detects when the editor is
// resized or the font size changes.
if (different) lastSizeC = scroller.clientHeight + th;
showingFrom = from; showingTo = to;
displayOffset = heightAtLine(doc, from);
mover.style.top = (displayOffset * th) + "px";
if (scroller.clientHeight)
code.style.height = (doc.height * th + 2 * paddingTop()) + "px";
// Since this is all rather error prone, it is honoured with the
// only assertion in the whole file.
if (lineDiv.childNodes.length != showingTo - showingFrom)
throw new Error("BAD PATCH! " + JSON.stringify(intact) + " size=" + (showingTo - showingFrom) +
" nodes=" + lineDiv.childNodes.length);
function checkHeights() {
maxWidth = scroller.clientWidth;
var curNode = lineDiv.firstChild, heightChanged = false;
doc.iter(showingFrom, showingTo, function(line) {
if (!line.hidden) {
var height = Math.round(curNode.offsetHeight / th) || 1;
if (line.height != height) {
updateLineHeight(line, height);
gutterDirty = heightChanged = true;
}
}
curNode = curNode.nextSibling;
});
if (heightChanged)
code.style.height = (doc.height * th + 2 * paddingTop()) + "px";
return heightChanged;
}
if (options.lineWrapping) {
checkHeights();
} else {
if (maxWidth == null) maxWidth = stringWidth(maxLine);
if (maxWidth > scroller.clientWidth) {
lineSpace.style.width = maxWidth + "px";
// Needed to prevent odd wrapping/hiding of widgets placed in here.
code.style.width = "";
code.style.width = scroller.scrollWidth + "px";
} else {
lineSpace.style.width = code.style.width = "";
}
}
gutter.style.display = gutterDisplay;
if (different || gutterDirty) {
// If the gutter grew in size, re-check heights. If those changed, re-draw gutter.
updateGutter() && options.lineWrapping && checkHeights() && updateGutter();
}
updateSelection();
if (!suppressCallback && options.onUpdate) options.onUpdate(instance);
return true;
}
function computeIntact(intact, changes) {
for (var i = 0, l = changes.length || 0; i < l; ++i) {
var change = changes[i], intact2 = [], diff = change.diff || 0;
for (var j = 0, l2 = intact.length; j < l2; ++j) {
var range = intact[j];
if (change.to <= range.from && change.diff)
intact2.push({from: range.from + diff, to: range.to + diff,
domStart: range.domStart});
else if (change.to <= range.from || change.from >= range.to)
intact2.push(range);
else {
if (change.from > range.from)
intact2.push({from: range.from, to: change.from, domStart: range.domStart});
if (change.to < range.to)
intact2.push({from: change.to + diff, to: range.to + diff,
domStart: range.domStart + (change.to - range.from)});
}
}
intact = intact2;
}
return intact;
}
function patchDisplay(from, to, intact) {
// The first pass removes the DOM nodes that aren't intact.
if (!intact.length) lineDiv.innerHTML = "";
else {
function killNode(node) {
var tmp = node.nextSibling;
node.parentNode.removeChild(node);
return tmp;
}
var domPos = 0, curNode = lineDiv.firstChild, n;
for (var i = 0; i < intact.length; ++i) {
var cur = intact[i];
while (cur.domStart > domPos) {curNode = killNode(curNode); domPos++;}
for (var j = 0, e = cur.to - cur.from; j < e; ++j) {curNode = curNode.nextSibling; domPos++;}
}
while (curNode) curNode = killNode(curNode);
}
// This pass fills in the lines that actually changed.
var nextIntact = intact.shift(), curNode = lineDiv.firstChild, j = from;
var scratch = document.createElement("div");
doc.iter(from, to, function(line) {
if (nextIntact && nextIntact.to == j) nextIntact = intact.shift();
if (!nextIntact || nextIntact.from > j) {
if (line.hidden) var html = scratch.innerHTML = "<pre></pre>";
else {
var html = '<pre' + (line.className ? ' class="' + line.className + '"' : '') + '>'
+ line.getHTML(makeTab) + '</pre>';
// Kludge to make sure the styled element lies behind the selection (by z-index)
if (line.bgClassName)
html = '<div style="position: relative"><pre class="' + line.bgClassName +
'" style="position: absolute; left: 0; right: 0; top: 0; bottom: 0; z-index: -2"> </pre>' + html + "</div>";
}
scratch.innerHTML = html;
lineDiv.insertBefore(scratch.firstChild, curNode);
} else {
curNode = curNode.nextSibling;
}
++j;
});
}
function updateGutter() {
if (!options.gutter && !options.lineNumbers) return;
var hText = mover.offsetHeight, hEditor = scroller.clientHeight;
gutter.style.height = (hText - hEditor < 2 ? hEditor : hText) + "px";
var html = [], i = showingFrom, normalNode;
doc.iter(showingFrom, Math.max(showingTo, showingFrom + 1), function(line) {
if (line.hidden) {
html.push("<pre></pre>");
} else {
var marker = line.gutterMarker;
var text = options.lineNumbers ? i + options.firstLineNumber : null;
if (marker && marker.text)
text = marker.text.replace("%N%", text != null ? text : "");
else if (text == null)
text = "\u00a0";
html.push((marker && marker.style ? '<pre class="' + marker.style + '">' : "<pre>"), text);
for (var j = 1; j < line.height; ++j) html.push("<br/> ");
html.push("</pre>");
if (!marker) normalNode = i;
}
++i;
});
gutter.style.display = "none";
gutterText.innerHTML = html.join("");
// Make sure scrolling doesn't cause number gutter size to pop
if (normalNode != null) {
var node = gutterText.childNodes[normalNode - showingFrom];
var minwidth = String(doc.size).length, val = eltText(node), pad = "";
while (val.length + pad.length < minwidth) pad += "\u00a0";
if (pad) node.insertBefore(document.createTextNode(pad), node.firstChild);
}
gutter.style.display = "";
var resized = Math.abs((parseInt(lineSpace.style.marginLeft) || 0) - gutter.offsetWidth) > 2;
lineSpace.style.marginLeft = gutter.offsetWidth + "px";
gutterDirty = false;
return resized;
}
function updateSelection() {
var collapsed = posEq(sel.from, sel.to);
var fromPos = localCoords(sel.from, true);
var toPos = collapsed ? fromPos : localCoords(sel.to, true);
var headPos = sel.inverted ? fromPos : toPos, th = textHeight();
var wrapOff = eltOffset(wrapper), lineOff = eltOffset(lineDiv);
inputDiv.style.top = Math.max(0, Math.min(scroller.offsetHeight, headPos.y + lineOff.top - wrapOff.top)) + "px";
inputDiv.style.left = Math.max(0, Math.min(scroller.offsetWidth, headPos.x + lineOff.left - wrapOff.left)) + "px";
if (collapsed) {
cursor.style.top = headPos.y + "px";
cursor.style.left = (options.lineWrapping ? Math.min(headPos.x, lineSpace.offsetWidth) : headPos.x) + "px";
cursor.style.display = "";
selectionDiv.style.display = "none";
} else {
var sameLine = fromPos.y == toPos.y, html = "";
function add(left, top, right, height) {
html += '<div class="CodeMirror-selected" style="position: absolute; left: ' + left +
'px; top: ' + top + 'px; right: ' + right + 'px; height: ' + height + 'px"></div>';
}
var clientWidth = lineSpace.clientWidth || lineSpace.offsetWidth;
var clientHeight = lineSpace.clientHeight || lineSpace.offsetHeight;
if (sel.from.ch && fromPos.y >= 0) {
var right = sameLine ? clientWidth - toPos.x : 0;
add(fromPos.x, fromPos.y, right, th);
}
var middleStart = Math.max(0, fromPos.y + (sel.from.ch ? th : 0));
var middleHeight = Math.min(toPos.y, clientHeight) - middleStart;
if (middleHeight > 0.2 * th)
add(0, middleStart, 0, middleHeight);
if ((!sameLine || !sel.from.ch) && toPos.y < clientHeight - .5 * th)
add(0, toPos.y, clientWidth - toPos.x, th);
selectionDiv.innerHTML = html;
cursor.style.display = "none";
selectionDiv.style.display = "";
}
}
function setShift(val) {
if (val) shiftSelecting = shiftSelecting || (sel.inverted ? sel.to : sel.from);
else shiftSelecting = null;
}
function setSelectionUser(from, to) {
var sh = shiftSelecting && clipPos(shiftSelecting);
if (sh) {
if (posLess(sh, from)) from = sh;
else if (posLess(to, sh)) to = sh;
}
setSelection(from, to);
userSelChange = true;
}
// Update the selection. Last two args are only used by
// updateLines, since they have to be expressed in the line
// numbers before the update.
function setSelection(from, to, oldFrom, oldTo) {
goalColumn = null;
if (oldFrom == null) {oldFrom = sel.from.line; oldTo = sel.to.line;}
if (posEq(sel.from, from) && posEq(sel.to, to)) return;
if (posLess(to, from)) {var tmp = to; to = from; from = tmp;}
// Skip over hidden lines.
if (from.line != oldFrom) {
var from1 = skipHidden(from, oldFrom, sel.from.ch);
// If there is no non-hidden line left, force visibility on current line
if (!from1) setLineHidden(from.line, false);
else from = from1;
}
if (to.line != oldTo) to = skipHidden(to, oldTo, sel.to.ch);
if (posEq(from, to)) sel.inverted = false;
else if (posEq(from, sel.to)) sel.inverted = false;
else if (posEq(to, sel.from)) sel.inverted = true;
if (options.autoClearEmptyLines && posEq(sel.from, sel.to)) {
var head = sel.inverted ? from : to;
if (head.line != sel.from.line && sel.from.line < doc.size) {
var oldLine = getLine(sel.from.line);
if (/^\s+$/.test(oldLine.text))
setTimeout(operation(function() {
if (oldLine.parent && /^\s+$/.test(oldLine.text)) {
var no = lineNo(oldLine);
replaceRange("", {line: no, ch: 0}, {line: no, ch: oldLine.text.length});
}
}, 10));
}
}
sel.from = from; sel.to = to;
selectionChanged = true;
}
function skipHidden(pos, oldLine, oldCh) {
function getNonHidden(dir) {
var lNo = pos.line + dir, end = dir == 1 ? doc.size : -1;
while (lNo != end) {
var line = getLine(lNo);
if (!line.hidden) {
var ch = pos.ch;
if (ch > oldCh || ch > line.text.length) ch = line.text.length;
return {line: lNo, ch: ch};
}
lNo += dir;
}
}
var line = getLine(pos.line);
if (!line.hidden) return pos;
if (pos.line >= oldLine) return getNonHidden(1) || getNonHidden(-1);
else return getNonHidden(-1) || getNonHidden(1);
}
function setCursor(line, ch, user) {
var pos = clipPos({line: line, ch: ch || 0});
(user ? setSelectionUser : setSelection)(pos, pos);
}
function clipLine(n) {return Math.max(0, Math.min(n, doc.size-1));}
function clipPos(pos) {
if (pos.line < 0) return {line: 0, ch: 0};
if (pos.line >= doc.size) return {line: doc.size-1, ch: getLine(doc.size-1).text.length};
var ch = pos.ch, linelen = getLine(pos.line).text.length;
if (ch == null || ch > linelen) return {line: pos.line, ch: linelen};
else if (ch < 0) return {line: pos.line, ch: 0};
else return pos;
}
function findPosH(dir, unit) {
var end = sel.inverted ? sel.from : sel.to, line = end.line, ch = end.ch;
var lineObj = getLine(line);
function findNextLine() {
for (var l = line + dir, e = dir < 0 ? -1 : doc.size; l != e; l += dir) {
var lo = getLine(l);
if (!lo.hidden) { line = l; lineObj = lo; return true; }
}
}
function moveOnce(boundToLine) {
if (ch == (dir < 0 ? 0 : lineObj.text.length)) {
if (!boundToLine && findNextLine()) ch = dir < 0 ? lineObj.text.length : 0;
else return false;
} else ch += dir;
return true;
}
if (unit == "char") moveOnce();
else if (unit == "column") moveOnce(true);
else if (unit == "word") {
var sawWord = false;
for (;;) {
if (dir < 0) if (!moveOnce()) break;
if (isWordChar(lineObj.text.charAt(ch))) sawWord = true;
else if (sawWord) {if (dir < 0) {dir = 1; moveOnce();} break;}
if (dir > 0) if (!moveOnce()) break;
}
}
return {line: line, ch: ch};
}
function moveH(dir, unit) {
var pos = dir < 0 ? sel.from : sel.to;
if (shiftSelecting || posEq(sel.from, sel.to)) pos = findPosH(dir, unit);
setCursor(pos.line, pos.ch, true);
}
function deleteH(dir, unit) {
if (!posEq(sel.from, sel.to)) replaceRange("", sel.from, sel.to);
else if (dir < 0) replaceRange("", findPosH(dir, unit), sel.to);
else replaceRange("", sel.from, findPosH(dir, unit));
userSelChange = true;
}
var goalColumn = null;
function moveV(dir, unit) {
var dist = 0, pos = localCoords(sel.inverted ? sel.from : sel.to, true);
if (goalColumn != null) pos.x = goalColumn;
if (unit == "page") dist = Math.min(scroller.clientHeight, window.innerHeight || document.documentElement.clientHeight);
else if (unit == "line") dist = textHeight();
var target = coordsChar(pos.x, pos.y + dist * dir + 2);
if (unit == "page") scroller.scrollTop += localCoords(target, true).y - pos.y;
setCursor(target.line, target.ch, true);
goalColumn = pos.x;
}
function selectWordAt(pos) {
var line = getLine(pos.line).text;
var start = pos.ch, end = pos.ch;
while (start > 0 && isWordChar(line.charAt(start - 1))) --start;
while (end < line.length && isWordChar(line.charAt(end))) ++end;
setSelectionUser({line: pos.line, ch: start}, {line: pos.line, ch: end});
}
function selectLine(line) {
setSelectionUser({line: line, ch: 0}, clipPos({line: line + 1, ch: 0}));
}
function indentSelected(mode) {
if (posEq(sel.from, sel.to)) return indentLine(sel.from.line, mode);
var e = sel.to.line - (sel.to.ch ? 0 : 1);
for (var i = sel.from.line; i <= e; ++i) indentLine(i, mode);
}
function indentLine(n, how) {
if (!how) how = "add";
if (how == "smart") {
if (!mode.indent) how = "prev";
else var state = getStateBefore(n);
}
var line = getLine(n), curSpace = line.indentation(options.tabSize),
curSpaceString = line.text.match(/^\s*/)[0], indentation;
if (how == "prev") {
if (n) indentation = getLine(n-1).indentation(options.tabSize);
else indentation = 0;
}
else if (how == "smart") indentation = mode.indent(state, line.text.slice(curSpaceString.length), line.text);
else if (how == "add") indentation = curSpace + options.indentUnit;
else if (how == "subtract") indentation = curSpace - options.indentUnit;
indentation = Math.max(0, indentation);
var diff = indentation - curSpace;
if (!diff) {
if (sel.from.line != n && sel.to.line != n) return;
var indentString = curSpaceString;
}
else {
var indentString = "", pos = 0;
if (options.indentWithTabs)
for (var i = Math.floor(indentation / options.tabSize); i; --i) {pos += options.tabSize; indentString += "\t";}
while (pos < indentation) {++pos; indentString += " ";}
}
replaceRange(indentString, {line: n, ch: 0}, {line: n, ch: curSpaceString.length});
}
function loadMode() {
mode = CodeMirror.getMode(options, options.mode);
doc.iter(0, doc.size, function(line) { line.stateAfter = null; });
work = [0];
startWorker();
}
function gutterChanged() {
var visible = options.gutter || options.lineNumbers;
gutter.style.display = visible ? "" : "none";
if (visible) gutterDirty = true;
else lineDiv.parentNode.style.marginLeft = 0;
}
function wrappingChanged(from, to) {
if (options.lineWrapping) {
wrapper.className += " CodeMirror-wrap";
var perLine = scroller.clientWidth / charWidth() - 3;
doc.iter(0, doc.size, function(line) {
if (line.hidden) return;
var guess = Math.ceil(line.text.length / perLine) || 1;
if (guess != 1) updateLineHeight(line, guess);
});
lineSpace.style.width = code.style.width = "";
} else {
wrapper.className = wrapper.className.replace(" CodeMirror-wrap", "");
maxWidth = null; maxLine = "";
doc.iter(0, doc.size, function(line) {
if (line.height != 1 && !line.hidden) updateLineHeight(line, 1);
if (line.text.length > maxLine.length) maxLine = line.text;
});
}
changes.push({from: 0, to: doc.size});
}
function makeTab(col) {
var w = options.tabSize - col % options.tabSize, cached = tabCache[w];
if (cached) return cached;
for (var str = '<span class="cm-tab">', i = 0; i < w; ++i) str += " ";
return (tabCache[w] = {html: str + "</span>", width: w});
}
function themeChanged() {
scroller.className = scroller.className.replace(/\s*cm-s-\w+/g, "") +
options.theme.replace(/(^|\s)\s*/g, " cm-s-");
}
function TextMarker() { this.set = []; }
TextMarker.prototype.clear = operation(function() {
var min = Infinity, max = -Infinity;
for (var i = 0, e = this.set.length; i < e; ++i) {
var line = this.set[i], mk = line.marked;
if (!mk || !line.parent) continue;
var lineN = lineNo(line);
min = Math.min(min, lineN); max = Math.max(max, lineN);
for (var j = 0; j < mk.length; ++j)
if (mk[j].marker == this) mk.splice(j--, 1);
}
if (min != Infinity)
changes.push({from: min, to: max + 1});
});
TextMarker.prototype.find = function() {
var from, to;
for (var i = 0, e = this.set.length; i < e; ++i) {
var line = this.set[i], mk = line.marked;
for (var j = 0; j < mk.length; ++j) {
var mark = mk[j];
if (mark.marker == this) {
if (mark.from != null || mark.to != null) {
var found = lineNo(line);
if (found != null) {
if (mark.from != null) from = {line: found, ch: mark.from};
if (mark.to != null) to = {line: found, ch: mark.to};
}
}
}
}
}
return {from: from, to: to};
};
function markText(from, to, className) {
from = clipPos(from); to = clipPos(to);
var tm = new TextMarker();
if (!posLess(from, to)) return tm;
function add(line, from, to, className) {
getLine(line).addMark(new MarkedText(from, to, className, tm));
}
if (from.line == to.line) add(from.line, from.ch, to.ch, className);
else {
add(from.line, from.ch, null, className);
for (var i = from.line + 1, e = to.line; i < e; ++i)
add(i, null, null, className);
add(to.line, null, to.ch, className);
}
changes.push({from: from.line, to: to.line + 1});
return tm;
}
function setBookmark(pos) {
pos = clipPos(pos);
var bm = new Bookmark(pos.ch);
getLine(pos.line).addMark(bm);
return bm;
}
function findMarksAt(pos) {
pos = clipPos(pos);
var markers = [], marked = getLine(pos.line).marked;
if (!marked) return markers;
for (var i = 0, e = marked.length; i < e; ++i) {
var m = marked[i];
if ((m.from == null || m.from <= pos.ch) &&
(m.to == null || m.to >= pos.ch))
markers.push(m.marker || m);
}
return markers;
}
function addGutterMarker(line, text, className) {
if (typeof line == "number") line = getLine(clipLine(line));
line.gutterMarker = {text: text, style: className};
gutterDirty = true;
return line;
}
function removeGutterMarker(line) {
if (typeof line == "number") line = getLine(clipLine(line));
line.gutterMarker = null;
gutterDirty = true;
}
function changeLine(handle, op) {
var no = handle, line = handle;
if (typeof handle == "number") line = getLine(clipLine(handle));
else no = lineNo(handle);
if (no == null) return null;
if (op(line, no)) changes.push({from: no, to: no + 1});
else return null;
return line;
}
function setLineClass(handle, className, bgClassName) {
return changeLine(handle, function(line) {
if (line.className != className || line.bgClassName != bgClassName) {
line.className = className;
line.bgClassName = bgClassName;
return true;
}
});
}
function setLineHidden(handle, hidden) {
return changeLine(handle, function(line, no) {
if (line.hidden != hidden) {
line.hidden = hidden;
updateLineHeight(line, hidden ? 0 : 1);
var fline = sel.from.line, tline = sel.to.line;
if (hidden && (fline == no || tline == no)) {
var from = fline == no ? skipHidden({line: fline, ch: 0}, fline, 0) : sel.from;
var to = tline == no ? skipHidden({line: tline, ch: 0}, tline, 0) : sel.to;
// Can't hide the last visible line, we'd have no place to put the cursor
if (!to) return;
setSelection(from, to);
}
return (gutterDirty = true);
}
});
}
function lineInfo(line) {
if (typeof line == "number") {
if (!isLine(line)) return null;
var n = line;
line = getLine(line);
if (!line) return null;
}
else {
var n = lineNo(line);
if (n == null) return null;
}
var marker = line.gutterMarker;
return {line: n, handle: line, text: line.text, markerText: marker && marker.text,
markerClass: marker && marker.style, lineClass: line.className, bgClass: line.bgClassName};
}
function stringWidth(str) {
measure.innerHTML = "<pre><span>x</span></pre>";
measure.firstChild.firstChild.firstChild.nodeValue = str;
return measure.firstChild.firstChild.offsetWidth || 10;
}
// These are used to go from pixel positions to character
// positions, taking varying character widths into account.
function charFromX(line, x) {
if (x <= 0) return 0;
var lineObj = getLine(line), text = lineObj.text;
function getX(len) {
measure.innerHTML = "<pre><span>" + lineObj.getHTML(makeTab, len) + "</span></pre>";
return measure.firstChild.firstChild.offsetWidth;
}
var from = 0, fromX = 0, to = text.length, toX;
// Guess a suitable upper bound for our search.
var estimated = Math.min(to, Math.ceil(x / charWidth()));
for (;;) {
var estX = getX(estimated);
if (estX <= x && estimated < to) estimated = Math.min(to, Math.ceil(estimated * 1.2));
else {toX = estX; to = estimated; break;}
}
if (x > toX) return to;
// Try to guess a suitable lower bound as well.
estimated = Math.floor(to * 0.8); estX = getX(estimated);
if (estX < x) {from = estimated; fromX = estX;}
// Do a binary search between these bounds.
for (;;) {
if (to - from <= 1) return (toX - x > x - fromX) ? from : to;
var middle = Math.ceil((from + to) / 2), middleX = getX(middle);
if (middleX > x) {to = middle; toX = middleX;}
else {from = middle; fromX = middleX;}
}
}
var tempId = Math.floor(Math.random() * 0xffffff).toString(16);
function measureLine(line, ch) {
if (ch == 0) return {top: 0, left: 0};
var extra = "";
// Include extra text at the end to make sure the measured line is wrapped in the right way.
if (options.lineWrapping) {
var end = line.text.indexOf(" ", ch + 6);
extra = htmlEscape(line.text.slice(ch + 1, end < 0 ? line.text.length : end + (ie ? 5 : 0)));
}
measure.innerHTML = "<pre>" + line.getHTML(makeTab, ch) +
'<span id="CodeMirror-temp-' + tempId + '">' + htmlEscape(line.text.charAt(ch) || " ") + "</span>" +
extra + "</pre>";
var elt = document.getElementById("CodeMirror-temp-" + tempId);
var top = elt.offsetTop, left = elt.offsetLeft;
// Older IEs report zero offsets for spans directly after a wrap
if (ie && top == 0 && left == 0) {
var backup = document.createElement("span");
backup.innerHTML = "x";
elt.parentNode.insertBefore(backup, elt.nextSibling);
top = backup.offsetTop;
}
return {top: top, left: left};
}
function localCoords(pos, inLineWrap) {
var x, lh = textHeight(), y = lh * (heightAtLine(doc, pos.line) - (inLineWrap ? displayOffset : 0));
if (pos.ch == 0) x = 0;
else {
var sp = measureLine(getLine(pos.line), pos.ch);
x = sp.left;
if (options.lineWrapping) y += Math.max(0, sp.top);
}
return {x: x, y: y, yBot: y + lh};
}
// Coords must be lineSpace-local
function coordsChar(x, y) {
if (y < 0) y = 0;
var th = textHeight(), cw = charWidth(), heightPos = displayOffset + Math.floor(y / th);
var lineNo = lineAtHeight(doc, heightPos);
if (lineNo >= doc.size) return {line: doc.size - 1, ch: getLine(doc.size - 1).text.length};
var lineObj = getLine(lineNo), text = lineObj.text;
var tw = options.lineWrapping, innerOff = tw ? heightPos - heightAtLine(doc, lineNo) : 0;
if (x <= 0 && innerOff == 0) return {line: lineNo, ch: 0};
function getX(len) {
var sp = measureLine(lineObj, len);
if (tw) {
var off = Math.round(sp.top / th);
return Math.max(0, sp.left + (off - innerOff) * scroller.clientWidth);
}
return sp.left;
}
var from = 0, fromX = 0, to = text.length, toX;
// Guess a suitable upper bound for our search.
var estimated = Math.min(to, Math.ceil((x + innerOff * scroller.clientWidth * .9) / cw));
for (;;) {
var estX = getX(estimated);
if (estX <= x && estimated < to) estimated = Math.min(to, Math.ceil(estimated * 1.2));
else {toX = estX; to = estimated; break;}
}
if (x > toX) return {line: lineNo, ch: to};
// Try to guess a suitable lower bound as well.
estimated = Math.floor(to * 0.8); estX = getX(estimated);
if (estX < x) {from = estimated; fromX = estX;}
// Do a binary search between these bounds.
for (;;) {
if (to - from <= 1) return {line: lineNo, ch: (toX - x > x - fromX) ? from : to};
var middle = Math.ceil((from + to) / 2), middleX = getX(middle);
if (middleX > x) {to = middle; toX = middleX;}
else {from = middle; fromX = middleX;}
}
}
function pageCoords(pos) {
var local = localCoords(pos, true), off = eltOffset(lineSpace);
return {x: off.left + local.x, y: off.top + local.y, yBot: off.top + local.yBot};
}
var cachedHeight, cachedHeightFor, measureText;
function textHeight() {
if (measureText == null) {
measureText = "<pre>";
for (var i = 0; i < 49; ++i) measureText += "x<br/>";
measureText += "x</pre>";
}
var offsetHeight = lineDiv.clientHeight;
if (offsetHeight == cachedHeightFor) return cachedHeight;
cachedHeightFor = offsetHeight;
measure.innerHTML = measureText;
cachedHeight = measure.firstChild.offsetHeight / 50 || 1;
measure.innerHTML = "";
return cachedHeight;
}
var cachedWidth, cachedWidthFor = 0;
function charWidth() {
if (scroller.clientWidth == cachedWidthFor) return cachedWidth;
cachedWidthFor = scroller.clientWidth;
return (cachedWidth = stringWidth("x"));
}
function paddingTop() {return lineSpace.offsetTop;}
function paddingLeft() {return lineSpace.offsetLeft;}
function posFromMouse(e, liberal) {
var offW = eltOffset(scroller, true), x, y;
// Fails unpredictably on IE[67] when mouse is dragged around quickly.
try { x = e.clientX; y = e.clientY; } catch (e) { return null; }
// This is a mess of a heuristic to try and determine whether a
// scroll-bar was clicked or not, and to return null if one was
// (and !liberal).
if (!liberal && (x - offW.left > scroller.clientWidth || y - offW.top > scroller.clientHeight))
return null;
var offL = eltOffset(lineSpace, true);
return coordsChar(x - offL.left, y - offL.top);
}
function onContextMenu(e) {
var pos = posFromMouse(e), scrollPos = scroller.scrollTop;
if (!pos || window.opera) return; // Opera is difficult.
if (posEq(sel.from, sel.to) || posLess(pos, sel.from) || !posLess(pos, sel.to))
operation(setCursor)(pos.line, pos.ch);
var oldCSS = input.style.cssText;
inputDiv.style.position = "absolute";
input.style.cssText = "position: fixed; width: 30px; height: 30px; top: " + (e.clientY - 5) +
"px; left: " + (e.clientX - 5) + "px; z-index: 1000; background: white; " +
"border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);";
leaveInputAlone = true;
var val = input.value = getSelection();
focusInput();
selectInput(input);
function rehide() {
var newVal = splitLines(input.value).join("\n");
if (newVal != val) operation(replaceSelection)(newVal, "end");
inputDiv.style.position = "relative";
input.style.cssText = oldCSS;
if (ie_lt9) scroller.scrollTop = scrollPos;
leaveInputAlone = false;
resetInput(true);
slowPoll();
}
if (gecko) {
e_stop(e);
var mouseup = connect(window, "mouseup", function() {
mouseup();
setTimeout(rehide, 20);
}, true);
} else {
setTimeout(rehide, 50);
}
}
// Cursor-blinking
function restartBlink() {
clearInterval(blinker);
var on = true;
cursor.style.visibility = "";
blinker = setInterval(function() {
cursor.style.visibility = (on = !on) ? "" : "hidden";
}, 650);
}
var matching = {"(": ")>", ")": "(<", "[": "]>", "]": "[<", "{": "}>", "}": "{<"};
function matchBrackets(autoclear) {
var head = sel.inverted ? sel.from : sel.to, line = getLine(head.line), pos = head.ch - 1;
var match = (pos >= 0 && matching[line.text.charAt(pos)]) || matching[line.text.charAt(++pos)];
if (!match) return;
var ch = match.charAt(0), forward = match.charAt(1) == ">", d = forward ? 1 : -1, st = line.styles;
for (var off = pos + 1, i = 0, e = st.length; i < e; i+=2)
if ((off -= st[i].length) <= 0) {var style = st[i+1]; break;}
var stack = [line.text.charAt(pos)], re = /[(){}[\]]/;
function scan(line, from, to) {
if (!line.text) return;
var st = line.styles, pos = forward ? 0 : line.text.length - 1, cur;
for (var i = forward ? 0 : st.length - 2, e = forward ? st.length : -2; i != e; i += 2*d) {
var text = st[i];
if (st[i+1] != null && st[i+1] != style) {pos += d * text.length; continue;}
for (var j = forward ? 0 : text.length - 1, te = forward ? text.length : -1; j != te; j += d, pos+=d) {
if (pos >= from && pos < to && re.test(cur = text.charAt(j))) {
var match = matching[cur];
if (match.charAt(1) == ">" == forward) stack.push(cur);
else if (stack.pop() != match.charAt(0)) return {pos: pos, match: false};
else if (!stack.length) return {pos: pos, match: true};
}
}
}
}
for (var i = head.line, e = forward ? Math.min(i + 100, doc.size) : Math.max(-1, i - 100); i != e; i+=d) {
var line = getLine(i), first = i == head.line;
var found = scan(line, first && forward ? pos + 1 : 0, first && !forward ? pos : line.text.length);
if (found) break;
}
if (!found) found = {pos: null, match: false};
var style = found.match ? "CodeMirror-matchingbracket" : "CodeMirror-nonmatchingbracket";
var one = markText({line: head.line, ch: pos}, {line: head.line, ch: pos+1}, style),
two = found.pos != null && markText({line: i, ch: found.pos}, {line: i, ch: found.pos + 1}, style);
var clear = operation(function(){one.clear(); two && two.clear();});
if (autoclear) setTimeout(clear, 800);
else bracketHighlighted = clear;
}
// Finds the line to start with when starting a parse. Tries to
// find a line with a stateAfter, so that it can start with a
// valid state. If that fails, it returns the line with the
// smallest indentation, which tends to need the least context to
// parse correctly.
function findStartLine(n) {
var minindent, minline;
for (var search = n, lim = n - 40; search > lim; --search) {
if (search == 0) return 0;
var line = getLine(search-1);
if (line.stateAfter) return search;
var indented = line.indentation(options.tabSize);
if (minline == null || minindent > indented) {
minline = search - 1;
minindent = indented;
}
}
return minline;
}
function getStateBefore(n) {
var start = findStartLine(n), state = start && getLine(start-1).stateAfter;
if (!state) state = startState(mode);
else state = copyState(mode, state);
doc.iter(start, n, function(line) {
line.highlight(mode, state, options.tabSize);
line.stateAfter = copyState(mode, state);
});
if (start < n) changes.push({from: start, to: n});
if (n < doc.size && !getLine(n).stateAfter) work.push(n);
return state;
}
function highlightLines(start, end) {
var state = getStateBefore(start);
doc.iter(start, end, function(line) {
line.highlight(mode, state, options.tabSize);
line.stateAfter = copyState(mode, state);
});
}
function highlightWorker() {
var end = +new Date + options.workTime;
var foundWork = work.length;
while (work.length) {
if (!getLine(showingFrom).stateAfter) var task = showingFrom;
else var task = work.pop();
if (task >= doc.size) continue;
var start = findStartLine(task), state = start && getLine(start-1).stateAfter;
if (state) state = copyState(mode, state);
else state = startState(mode);
var unchanged = 0, compare = mode.compareStates, realChange = false,
i = start, bail = false;
doc.iter(i, doc.size, function(line) {
var hadState = line.stateAfter;
if (+new Date > end) {
work.push(i);
startWorker(options.workDelay);
if (realChange) changes.push({from: task, to: i + 1});
return (bail = true);
}
var changed = line.highlight(mode, state, options.tabSize);
if (changed) realChange = true;
line.stateAfter = copyState(mode, state);
if (compare) {
if (hadState && compare(hadState, state)) return true;
} else {
if (changed !== false || !hadState) unchanged = 0;
else if (++unchanged > 3 && (!mode.indent || mode.indent(hadState, "") == mode.indent(state, "")))
return true;
}
++i;
});
if (bail) return;
if (realChange) changes.push({from: task, to: i + 1});
}
if (foundWork && options.onHighlightComplete)
options.onHighlightComplete(instance);
}
function startWorker(time) {
if (!work.length) return;
highlight.set(time, operation(highlightWorker));
}
// Operations are used to wrap changes in such a way that each
// change won't have to update the cursor and display (which would
// be awkward, slow, and error-prone), but instead updates are
// batched and then all combined and executed at once.
function startOperation() {
updateInput = userSelChange = textChanged = null;
changes = []; selectionChanged = false; callbacks = [];
}
function endOperation() {
var reScroll = false, updated;
if (selectionChanged) reScroll = !scrollCursorIntoView();
if (changes.length) updated = updateDisplay(changes, true);
else {
if (selectionChanged) updateSelection();
if (gutterDirty) updateGutter();
}
if (reScroll) scrollCursorIntoView();
if (selectionChanged) {scrollEditorIntoView(); restartBlink();}
if (focused && !leaveInputAlone &&
(updateInput === true || (updateInput !== false && selectionChanged)))
resetInput(userSelChange);
if (selectionChanged && options.matchBrackets)
setTimeout(operation(function() {
if (bracketHighlighted) {bracketHighlighted(); bracketHighlighted = null;}
if (posEq(sel.from, sel.to)) matchBrackets(false);
}), 20);
var tc = textChanged, cbs = callbacks; // these can be reset by callbacks
if (selectionChanged && options.onCursorActivity)
options.onCursorActivity(instance);
if (tc && options.onChange && instance)
options.onChange(instance, tc);
for (var i = 0; i < cbs.length; ++i) cbs[i](instance);
if (updated && options.onUpdate) options.onUpdate(instance);
}
var nestedOperation = 0;
function operation(f) {
return function() {
if (!nestedOperation++) startOperation();
try {var result = f.apply(this, arguments);}
finally {if (!--nestedOperation) endOperation();}
return result;
};
}
for (var ext in extensions)
if (extensions.propertyIsEnumerable(ext) &&
!instance.propertyIsEnumerable(ext))
instance[ext] = extensions[ext];
return instance;
} // (end of function CodeMirror)
// The default configuration options.
CodeMirror.defaults = {
value: "",
mode: null,
theme: "default",
indentUnit: 2,
indentWithTabs: false,
smartIndent: true,
tabSize: 4,
keyMap: "default",
extraKeys: null,
electricChars: true,
autoClearEmptyLines: false,
onKeyEvent: null,
lineWrapping: false,
lineNumbers: false,
gutter: false,
fixedGutter: false,
firstLineNumber: 1,
readOnly: false,
onChange: null,
onCursorActivity: null,
onGutterClick: null,
onHighlightComplete: null,
onUpdate: null,
onFocus: null, onBlur: null, onScroll: null,
matchBrackets: false,
workTime: 100,
workDelay: 200,
pollInterval: 100,
undoDepth: 40,
tabindex: null,
autofocus: null
};
var ios = /AppleWebKit/.test(navigator.userAgent) && /Mobile\/\w+/.test(navigator.userAgent);
var mac = ios || /Mac/.test(navigator.platform);
var win = /Win/.test(navigator.platform);
// Known modes, by name and by MIME
var modes = CodeMirror.modes = {}, mimeModes = CodeMirror.mimeModes = {};
CodeMirror.defineMode = function(name, mode) {
if (!CodeMirror.defaults.mode && name != "null") CodeMirror.defaults.mode = name;
modes[name] = mode;
};
CodeMirror.defineMIME = function(mime, spec) {
mimeModes[mime] = spec;
};
CodeMirror.resolveMode = function(spec) {
if (typeof spec == "string" && mimeModes.hasOwnProperty(spec))
spec = mimeModes[spec];
else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+xml$/.test(spec))
return CodeMirror.resolveMode("application/xml");
if (typeof spec == "string") return {name: spec};
else return spec || {name: "null"};
};
CodeMirror.getMode = function(options, spec) {
var spec = CodeMirror.resolveMode(spec);
var mfactory = modes[spec.name];
if (!mfactory) {
if (window.console) console.warn("No mode " + spec.name + " found, falling back to plain text.");
return CodeMirror.getMode(options, "text/plain");
}
return mfactory(options, spec);
};
CodeMirror.listModes = function() {
var list = [];
for (var m in modes)
if (modes.propertyIsEnumerable(m)) list.push(m);
return list;
};
CodeMirror.listMIMEs = function() {
var list = [];
for (var m in mimeModes)
if (mimeModes.propertyIsEnumerable(m)) list.push({mime: m, mode: mimeModes[m]});
return list;
};
var extensions = CodeMirror.extensions = {};
CodeMirror.defineExtension = function(name, func) {
extensions[name] = func;
};
var commands = CodeMirror.commands = {
selectAll: function(cm) {cm.setSelection({line: 0, ch: 0}, {line: cm.lineCount() - 1});},
killLine: function(cm) {
var from = cm.getCursor(true), to = cm.getCursor(false), sel = !posEq(from, to);
if (!sel && cm.getLine(from.line).length == from.ch) cm.replaceRange("", from, {line: from.line + 1, ch: 0});
else cm.replaceRange("", from, sel ? to : {line: from.line});
},
deleteLine: function(cm) {var l = cm.getCursor().line; cm.replaceRange("", {line: l, ch: 0}, {line: l});},
undo: function(cm) {cm.undo();},
redo: function(cm) {cm.redo();},
goDocStart: function(cm) {cm.setCursor(0, 0, true);},
goDocEnd: function(cm) {cm.setSelection({line: cm.lineCount() - 1}, null, true);},
goLineStart: function(cm) {cm.setCursor(cm.getCursor().line, 0, true);},
goLineStartSmart: function(cm) {
var cur = cm.getCursor();
var text = cm.getLine(cur.line), firstNonWS = Math.max(0, text.search(/\S/));
cm.setCursor(cur.line, cur.ch <= firstNonWS && cur.ch ? 0 : firstNonWS, true);
},
goLineEnd: function(cm) {cm.setSelection({line: cm.getCursor().line}, null, true);},
goLineUp: function(cm) {cm.moveV(-1, "line");},
goLineDown: function(cm) {cm.moveV(1, "line");},
goPageUp: function(cm) {cm.moveV(-1, "page");},
goPageDown: function(cm) {cm.moveV(1, "page");},
goCharLeft: function(cm) {cm.moveH(-1, "char");},
goCharRight: function(cm) {cm.moveH(1, "char");},
goColumnLeft: function(cm) {cm.moveH(-1, "column");},
goColumnRight: function(cm) {cm.moveH(1, "column");},
goWordLeft: function(cm) {cm.moveH(-1, "word");},
goWordRight: function(cm) {cm.moveH(1, "word");},
delCharLeft: function(cm) {cm.deleteH(-1, "char");},
delCharRight: function(cm) {cm.deleteH(1, "char");},
delWordLeft: function(cm) {cm.deleteH(-1, "word");},
delWordRight: function(cm) {cm.deleteH(1, "word");},
indentAuto: function(cm) {cm.indentSelection("smart");},
indentMore: function(cm) {cm.indentSelection("add");},
indentLess: function(cm) {cm.indentSelection("subtract");},
insertTab: function(cm) {cm.replaceSelection("\t", "end");},
transposeChars: function(cm) {
var cur = cm.getCursor(), line = cm.getLine(cur.line);
if (cur.ch > 0 && cur.ch < line.length - 1)
cm.replaceRange(line.charAt(cur.ch) + line.charAt(cur.ch - 1),
{line: cur.line, ch: cur.ch - 1}, {line: cur.line, ch: cur.ch + 1});
},
newlineAndIndent: function(cm) {
cm.replaceSelection("\n", "end");
cm.indentLine(cm.getCursor().line);
},
toggleOverwrite: function(cm) {cm.toggleOverwrite();}
};
var keyMap = CodeMirror.keyMap = {};
keyMap.basic = {
"Left": "goCharLeft", "Right": "goCharRight", "Up": "goLineUp", "Down": "goLineDown",
"End": "goLineEnd", "Home": "goLineStartSmart", "PageUp": "goPageUp", "PageDown": "goPageDown",
"Delete": "delCharRight", "Backspace": "delCharLeft", "Tab": "insertTab", "Shift-Tab": "indentAuto",
"Enter": "newlineAndIndent", "Insert": "toggleOverwrite"
};
// Note that the save and find-related commands aren't defined by
// default. Unknown commands are simply ignored.
keyMap.pcDefault = {
"Ctrl-A": "selectAll", "Ctrl-D": "deleteLine", "Ctrl-Z": "undo", "Shift-Ctrl-Z": "redo", "Ctrl-Y": "redo",
"Ctrl-Home": "goDocStart", "Alt-Up": "goDocStart", "Ctrl-End": "goDocEnd", "Ctrl-Down": "goDocEnd",
"Ctrl-Left": "goWordLeft", "Ctrl-Right": "goWordRight", "Alt-Left": "goLineStart", "Alt-Right": "goLineEnd",
"Ctrl-Backspace": "delWordLeft", "Ctrl-Delete": "delWordRight", "Ctrl-S": "save", "Ctrl-F": "find",
"Ctrl-G": "findNext", "Shift-Ctrl-G": "findPrev", "Shift-Ctrl-F": "replace", "Shift-Ctrl-R": "replaceAll",
"Ctrl-[": "indentLess", "Ctrl-]": "indentMore",
fallthrough: "basic"
};
keyMap.macDefault = {
"Cmd-A": "selectAll", "Cmd-D": "deleteLine", "Cmd-Z": "undo", "Shift-Cmd-Z": "redo", "Cmd-Y": "redo",
"Cmd-Up": "goDocStart", "Cmd-End": "goDocEnd", "Cmd-Down": "goDocEnd", "Alt-Left": "goWordLeft",
"Alt-Right": "goWordRight", "Cmd-Left": "goLineStart", "Cmd-Right": "goLineEnd", "Alt-Backspace": "delWordLeft",
"Ctrl-Alt-Backspace": "delWordRight", "Alt-Delete": "delWordRight", "Cmd-S": "save", "Cmd-F": "find",
"Cmd-G": "findNext", "Shift-Cmd-G": "findPrev", "Cmd-Alt-F": "replace", "Shift-Cmd-Alt-F": "replaceAll",
"Cmd-[": "indentLess", "Cmd-]": "indentMore",
fallthrough: ["basic", "emacsy"]
};
keyMap["default"] = mac ? keyMap.macDefault : keyMap.pcDefault;
keyMap.emacsy = {
"Ctrl-F": "goCharRight", "Ctrl-B": "goCharLeft", "Ctrl-P": "goLineUp", "Ctrl-N": "goLineDown",
"Alt-F": "goWordRight", "Alt-B": "goWordLeft", "Ctrl-A": "goLineStart", "Ctrl-E": "goLineEnd",
"Ctrl-V": "goPageUp", "Shift-Ctrl-V": "goPageDown", "Ctrl-D": "delCharRight", "Ctrl-H": "delCharLeft",
"Alt-D": "delWordRight", "Alt-Backspace": "delWordLeft", "Ctrl-K": "killLine", "Ctrl-T": "transposeChars"
};
function getKeyMap(val) {
if (typeof val == "string") return keyMap[val];
else return val;
}
function lookupKey(name, extraMap, map, handle) {
function lookup(map) {
map = getKeyMap(map);
var found = map[name];
if (found != null && handle(found)) return true;
if (map.catchall) return handle(map.catchall);
var fallthrough = map.fallthrough;
if (fallthrough == null) return false;
if (Object.prototype.toString.call(fallthrough) != "[object Array]")
return lookup(fallthrough);
for (var i = 0, e = fallthrough.length; i < e; ++i) {
if (lookup(fallthrough[i])) return true;
}
return false;
}
if (extraMap && lookup(extraMap)) return true;
return lookup(map);
}
function isModifierKey(event) {
var name = keyNames[e_prop(event, "keyCode")];
return name == "Ctrl" || name == "Alt" || name == "Shift" || name == "Mod";
}
CodeMirror.fromTextArea = function(textarea, options) {
if (!options) options = {};
options.value = textarea.value;
if (!options.tabindex && textarea.tabindex)
options.tabindex = textarea.tabindex;
if (options.autofocus == null && textarea.getAttribute("autofocus") != null)
options.autofocus = true;
function save() {textarea.value = instance.getValue();}
if (textarea.form) {
// Deplorable hack to make the submit method do the right thing.
var rmSubmit = connect(textarea.form, "submit", save, true);
if (typeof textarea.form.submit == "function") {
var realSubmit = textarea.form.submit;
function wrappedSubmit() {
save();
textarea.form.submit = realSubmit;
textarea.form.submit();
textarea.form.submit = wrappedSubmit;
}
textarea.form.submit = wrappedSubmit;
}
}
textarea.style.display = "none";
var instance = CodeMirror(function(node) {
textarea.parentNode.insertBefore(node, textarea.nextSibling);
}, options);
instance.save = save;
instance.getTextArea = function() { return textarea; };
instance.toTextArea = function() {
save();
textarea.parentNode.removeChild(instance.getWrapperElement());
textarea.style.display = "";
if (textarea.form) {
rmSubmit();
if (typeof textarea.form.submit == "function")
textarea.form.submit = realSubmit;
}
};
return instance;
};
// Utility functions for working with state. Exported because modes
// sometimes need to do this.
function copyState(mode, state) {
if (state === true) return state;
if (mode.copyState) return mode.copyState(state);
var nstate = {};
for (var n in state) {
var val = state[n];
if (val instanceof Array) val = val.concat([]);
nstate[n] = val;
}
return nstate;
}
CodeMirror.copyState = copyState;
function startState(mode, a1, a2) {
return mode.startState ? mode.startState(a1, a2) : true;
}
CodeMirror.startState = startState;
// The character stream used by a mode's parser.
function StringStream(string, tabSize) {
this.pos = this.start = 0;
this.string = string;
this.tabSize = tabSize || 8;
}
StringStream.prototype = {
eol: function() {return this.pos >= this.string.length;},
sol: function() {return this.pos == 0;},
peek: function() {return this.string.charAt(this.pos);},
next: function() {
if (this.pos < this.string.length)
return this.string.charAt(this.pos++);
},
eat: function(match) {
var ch = this.string.charAt(this.pos);
if (typeof match == "string") var ok = ch == match;
else var ok = ch && (match.test ? match.test(ch) : match(ch));
if (ok) {++this.pos; return ch;}
},
eatWhile: function(match) {
var start = this.pos;
while (this.eat(match)){}
return this.pos > start;
},
eatSpace: function() {
var start = this.pos;
while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) ++this.pos;
return this.pos > start;
},
skipToEnd: function() {this.pos = this.string.length;},
skipTo: function(ch) {
var found = this.string.indexOf(ch, this.pos);
if (found > -1) {this.pos = found; return true;}
},
backUp: function(n) {this.pos -= n;},
column: function() {return countColumn(this.string, this.start, this.tabSize);},
indentation: function() {return countColumn(this.string, null, this.tabSize);},
match: function(pattern, consume, caseInsensitive) {
if (typeof pattern == "string") {
function cased(str) {return caseInsensitive ? str.toLowerCase() : str;}
if (cased(this.string).indexOf(cased(pattern), this.pos) == this.pos) {
if (consume !== false) this.pos += pattern.length;
return true;
}
}
else {
var match = this.string.slice(this.pos).match(pattern);
if (match && consume !== false) this.pos += match[0].length;
return match;
}
},
current: function(){return this.string.slice(this.start, this.pos);}
};
CodeMirror.StringStream = StringStream;
function MarkedText(from, to, className, marker) {
this.from = from; this.to = to; this.style = className; this.marker = marker;
}
MarkedText.prototype = {
attach: function(line) { this.marker.set.push(line); },
detach: function(line) {
var ix = indexOf(this.marker.set, line);
if (ix > -1) this.marker.set.splice(ix, 1);
},
split: function(pos, lenBefore) {
if (this.to <= pos && this.to != null) return null;
var from = this.from < pos || this.from == null ? null : this.from - pos + lenBefore;
var to = this.to == null ? null : this.to - pos + lenBefore;
return new MarkedText(from, to, this.style, this.marker);
},
dup: function() { return new MarkedText(null, null, this.style, this.marker); },
clipTo: function(fromOpen, from, toOpen, to, diff) {
if (fromOpen && to > this.from && (to < this.to || this.to == null))
this.from = null;
else if (this.from != null && this.from >= from)
this.from = Math.max(to, this.from) + diff;
if (toOpen && (from < this.to || this.to == null) && (from > this.from || this.from == null))
this.to = null;
else if (this.to != null && this.to > from)
this.to = to < this.to ? this.to + diff : from;
},
isDead: function() { return this.from != null && this.to != null && this.from >= this.to; },
sameSet: function(x) { return this.marker == x.marker; }
};
function Bookmark(pos) {
this.from = pos; this.to = pos; this.line = null;
}
Bookmark.prototype = {
attach: function(line) { this.line = line; },
detach: function(line) { if (this.line == line) this.line = null; },
split: function(pos, lenBefore) {
if (pos < this.from) {
this.from = this.to = (this.from - pos) + lenBefore;
return this;
}
},
isDead: function() { return this.from > this.to; },
clipTo: function(fromOpen, from, toOpen, to, diff) {
if ((fromOpen || from < this.from) && (toOpen || to > this.to)) {
this.from = 0; this.to = -1;
} else if (this.from > from) {
this.from = this.to = Math.max(to, this.from) + diff;
}
},
sameSet: function(x) { return false; },
find: function() {
if (!this.line || !this.line.parent) return null;
return {line: lineNo(this.line), ch: this.from};
},
clear: function() {
if (this.line) {
var found = indexOf(this.line.marked, this);
if (found != -1) this.line.marked.splice(found, 1);
this.line = null;
}
}
};
// Line objects. These hold state related to a line, including
// highlighting info (the styles array).
function Line(text, styles) {
this.styles = styles || [text, null];
this.text = text;
this.height = 1;
this.marked = this.gutterMarker = this.className = this.bgClassName = this.handlers = null;
this.stateAfter = this.parent = this.hidden = null;
}
Line.inheritMarks = function(text, orig) {
var ln = new Line(text), mk = orig && orig.marked;
if (mk) {
for (var i = 0; i < mk.length; ++i) {
if (mk[i].to == null && mk[i].style) {
var newmk = ln.marked || (ln.marked = []), mark = mk[i];
var nmark = mark.dup(); newmk.push(nmark); nmark.attach(ln);
}
}
}
return ln;
}
Line.prototype = {
// Replace a piece of a line, keeping the styles around it intact.
replace: function(from, to_, text) {
var st = [], mk = this.marked, to = to_ == null ? this.text.length : to_;
copyStyles(0, from, this.styles, st);
if (text) st.push(text, null);
copyStyles(to, this.text.length, this.styles, st);
this.styles = st;
this.text = this.text.slice(0, from) + text + this.text.slice(to);
this.stateAfter = null;
if (mk) {
var diff = text.length - (to - from);
for (var i = 0; i < mk.length; ++i) {
var mark = mk[i];
mark.clipTo(from == null, from || 0, to_ == null, to, diff);
if (mark.isDead()) {mark.detach(this); mk.splice(i--, 1);}
}
}
},
// Split a part off a line, keeping styles and markers intact.
split: function(pos, textBefore) {
var st = [textBefore, null], mk = this.marked;
copyStyles(pos, this.text.length, this.styles, st);
var taken = new Line(textBefore + this.text.slice(pos), st);
if (mk) {
for (var i = 0; i < mk.length; ++i) {
var mark = mk[i];
var newmark = mark.split(pos, textBefore.length);
if (newmark) {
if (!taken.marked) taken.marked = [];
taken.marked.push(newmark); newmark.attach(taken);
if (newmark == mark) mk.splice(i--, 1);
}
}
}
return taken;
},
append: function(line) {
var mylen = this.text.length, mk = line.marked, mymk = this.marked;
this.text += line.text;
copyStyles(0, line.text.length, line.styles, this.styles);
if (mymk) {
for (var i = 0; i < mymk.length; ++i)
if (mymk[i].to == null) mymk[i].to = mylen;
}
if (mk && mk.length) {
if (!mymk) this.marked = mymk = [];
outer: for (var i = 0; i < mk.length; ++i) {
var mark = mk[i];
if (!mark.from) {
for (var j = 0; j < mymk.length; ++j) {
var mymark = mymk[j];
if (mymark.to == mylen && mymark.sameSet(mark)) {
mymark.to = mark.to == null ? null : mark.to + mylen;
if (mymark.isDead()) {
mymark.detach(this);
mk.splice(i--, 1);
}
continue outer;
}
}
}
mymk.push(mark);
mark.attach(this);
mark.from += mylen;
if (mark.to != null) mark.to += mylen;
}
}
},
fixMarkEnds: function(other) {
var mk = this.marked, omk = other.marked;
if (!mk) return;
for (var i = 0; i < mk.length; ++i) {
var mark = mk[i], close = mark.to == null;
if (close && omk) {
for (var j = 0; j < omk.length; ++j)
if (omk[j].sameSet(mark)) {close = false; break;}
}
if (close) mark.to = this.text.length;
}
},
fixMarkStarts: function() {
var mk = this.marked;
if (!mk) return;
for (var i = 0; i < mk.length; ++i)
if (mk[i].from == null) mk[i].from = 0;
},
addMark: function(mark) {
mark.attach(this);
if (this.marked == null) this.marked = [];
this.marked.push(mark);
this.marked.sort(function(a, b){return (a.from || 0) - (b.from || 0);});
},
// Run the given mode's parser over a line, update the styles
// array, which contains alternating fragments of text and CSS
// classes.
highlight: function(mode, state, tabSize) {
var stream = new StringStream(this.text, tabSize), st = this.styles, pos = 0;
var changed = false, curWord = st[0], prevWord;
if (this.text == "" && mode.blankLine) mode.blankLine(state);
while (!stream.eol()) {
var style = mode.token(stream, state);
var substr = this.text.slice(stream.start, stream.pos);
stream.start = stream.pos;
if (pos && st[pos-1] == style)
st[pos-2] += substr;
else if (substr) {
if (!changed && (st[pos+1] != style || (pos && st[pos-2] != prevWord))) changed = true;
st[pos++] = substr; st[pos++] = style;
prevWord = curWord; curWord = st[pos];
}
// Give up when line is ridiculously long
if (stream.pos > 5000) {
st[pos++] = this.text.slice(stream.pos); st[pos++] = null;
break;
}
}
if (st.length != pos) {st.length = pos; changed = true;}
if (pos && st[pos-2] != prevWord) changed = true;
// Short lines with simple highlights return null, and are
// counted as changed by the driver because they are likely to
// highlight the same way in various contexts.
return changed || (st.length < 5 && this.text.length < 10 ? null : false);
},
// Fetch the parser token for a given character. Useful for hacks
// that want to inspect the mode state (say, for completion).
getTokenAt: function(mode, state, ch) {
var txt = this.text, stream = new StringStream(txt);
while (stream.pos < ch && !stream.eol()) {
stream.start = stream.pos;
var style = mode.token(stream, state);
}
return {start: stream.start,
end: stream.pos,
string: stream.current(),
className: style || null,
state: state};
},
indentation: function(tabSize) {return countColumn(this.text, null, tabSize);},
// Produces an HTML fragment for the line, taking selection,
// marking, and highlighting into account.
getHTML: function(makeTab, endAt) {
var html = [], first = true, col = 0;
function span(text, style) {
if (!text) return;
// Work around a bug where, in some compat modes, IE ignores leading spaces
if (first && ie && text.charAt(0) == " ") text = "\u00a0" + text.slice(1);
first = false;
if (text.indexOf("\t") == -1) {
col += text.length;
var escaped = htmlEscape(text);
} else {
var escaped = "";
for (var pos = 0;;) {
var idx = text.indexOf("\t", pos);
if (idx == -1) {
escaped += htmlEscape(text.slice(pos));
col += text.length - pos;
break;
} else {
col += idx - pos;
var tab = makeTab(col);
escaped += htmlEscape(text.slice(pos, idx)) + tab.html;
col += tab.width;
pos = idx + 1;
}
}
}
if (style) html.push('<span class="', style, '">', escaped, "</span>");
else html.push(escaped);
}
var st = this.styles, allText = this.text, marked = this.marked;
var len = allText.length;
if (endAt != null) len = Math.min(endAt, len);
function styleToClass(style) {
if (!style) return null;
return "cm-" + style.replace(/ +/g, " cm-");
}
if (!allText && endAt == null)
span(" ");
else if (!marked || !marked.length)
for (var i = 0, ch = 0; ch < len; i+=2) {
var str = st[i], style = st[i+1], l = str.length;
if (ch + l > len) str = str.slice(0, len - ch);
ch += l;
span(str, styleToClass(style));
}
else {
var pos = 0, i = 0, text = "", style, sg = 0;
var nextChange = marked[0].from || 0, marks = [], markpos = 0;
function advanceMarks() {
var m;
while (markpos < marked.length &&
((m = marked[markpos]).from == pos || m.from == null)) {
if (m.style != null) marks.push(m);
++markpos;
}
nextChange = markpos < marked.length ? marked[markpos].from : Infinity;
for (var i = 0; i < marks.length; ++i) {
var to = marks[i].to || Infinity;
if (to == pos) marks.splice(i--, 1);
else nextChange = Math.min(to, nextChange);
}
}
var m = 0;
while (pos < len) {
if (nextChange == pos) advanceMarks();
var upto = Math.min(len, nextChange);
while (true) {
if (text) {
var end = pos + text.length;
var appliedStyle = style;
for (var j = 0; j < marks.length; ++j)
appliedStyle = (appliedStyle ? appliedStyle + " " : "") + marks[j].style;
span(end > upto ? text.slice(0, upto - pos) : text, appliedStyle);
if (end >= upto) {text = text.slice(upto - pos); pos = upto; break;}
pos = end;
}
text = st[i++]; style = styleToClass(st[i++]);
}
}
}
return html.join("");
},
cleanUp: function() {
this.parent = null;
if (this.marked)
for (var i = 0, e = this.marked.length; i < e; ++i) this.marked[i].detach(this);
}
};
// Utility used by replace and split above
function copyStyles(from, to, source, dest) {
for (var i = 0, pos = 0, state = 0; pos < to; i+=2) {
var part = source[i], end = pos + part.length;
if (state == 0) {
if (end > from) dest.push(part.slice(from - pos, Math.min(part.length, to - pos)), source[i+1]);
if (end >= from) state = 1;
}
else if (state == 1) {
if (end > to) dest.push(part.slice(0, to - pos), source[i+1]);
else dest.push(part, source[i+1]);
}
pos = end;
}
}
// Data structure that holds the sequence of lines.
function LeafChunk(lines) {
this.lines = lines;
this.parent = null;
for (var i = 0, e = lines.length, height = 0; i < e; ++i) {
lines[i].parent = this;
height += lines[i].height;
}
this.height = height;
}
LeafChunk.prototype = {
chunkSize: function() { return this.lines.length; },
remove: function(at, n, callbacks) {
for (var i = at, e = at + n; i < e; ++i) {
var line = this.lines[i];
this.height -= line.height;
line.cleanUp();
if (line.handlers)
for (var j = 0; j < line.handlers.length; ++j) callbacks.push(line.handlers[j]);
}
this.lines.splice(at, n);
},
collapse: function(lines) {
lines.splice.apply(lines, [lines.length, 0].concat(this.lines));
},
insertHeight: function(at, lines, height) {
this.height += height;
this.lines.splice.apply(this.lines, [at, 0].concat(lines));
for (var i = 0, e = lines.length; i < e; ++i) lines[i].parent = this;
},
iterN: function(at, n, op) {
for (var e = at + n; at < e; ++at)
if (op(this.lines[at])) return true;
}
};
function BranchChunk(children) {
this.children = children;
var size = 0, height = 0;
for (var i = 0, e = children.length; i < e; ++i) {
var ch = children[i];
size += ch.chunkSize(); height += ch.height;
ch.parent = this;
}
this.size = size;
this.height = height;
this.parent = null;
}
BranchChunk.prototype = {
chunkSize: function() { return this.size; },
remove: function(at, n, callbacks) {
this.size -= n;
for (var i = 0; i < this.children.length; ++i) {
var child = this.children[i], sz = child.chunkSize();
if (at < sz) {
var rm = Math.min(n, sz - at), oldHeight = child.height;
child.remove(at, rm, callbacks);
this.height -= oldHeight - child.height;
if (sz == rm) { this.children.splice(i--, 1); child.parent = null; }
if ((n -= rm) == 0) break;
at = 0;
} else at -= sz;
}
if (this.size - n < 25) {
var lines = [];
this.collapse(lines);
this.children = [new LeafChunk(lines)];
this.children[0].parent = this;
}
},
collapse: function(lines) {
for (var i = 0, e = this.children.length; i < e; ++i) this.children[i].collapse(lines);
},
insert: function(at, lines) {
var height = 0;
for (var i = 0, e = lines.length; i < e; ++i) height += lines[i].height;
this.insertHeight(at, lines, height);
},
insertHeight: function(at, lines, height) {
this.size += lines.length;
this.height += height;
for (var i = 0, e = this.children.length; i < e; ++i) {
var child = this.children[i], sz = child.chunkSize();
if (at <= sz) {
child.insertHeight(at, lines, height);
if (child.lines && child.lines.length > 50) {
while (child.lines.length > 50) {
var spilled = child.lines.splice(child.lines.length - 25, 25);
var newleaf = new LeafChunk(spilled);
child.height -= newleaf.height;
this.children.splice(i + 1, 0, newleaf);
newleaf.parent = this;
}
this.maybeSpill();
}
break;
}
at -= sz;
}
},
maybeSpill: function() {
if (this.children.length <= 10) return;
var me = this;
do {
var spilled = me.children.splice(me.children.length - 5, 5);
var sibling = new BranchChunk(spilled);
if (!me.parent) { // Become the parent node
var copy = new BranchChunk(me.children);
copy.parent = me;
me.children = [copy, sibling];
me = copy;
} else {
me.size -= sibling.size;
me.height -= sibling.height;
var myIndex = indexOf(me.parent.children, me);
me.parent.children.splice(myIndex + 1, 0, sibling);
}
sibling.parent = me.parent;
} while (me.children.length > 10);
me.parent.maybeSpill();
},
iter: function(from, to, op) { this.iterN(from, to - from, op); },
iterN: function(at, n, op) {
for (var i = 0, e = this.children.length; i < e; ++i) {
var child = this.children[i], sz = child.chunkSize();
if (at < sz) {
var used = Math.min(n, sz - at);
if (child.iterN(at, used, op)) return true;
if ((n -= used) == 0) break;
at = 0;
} else at -= sz;
}
}
};
function getLineAt(chunk, n) {
while (!chunk.lines) {
for (var i = 0;; ++i) {
var child = chunk.children[i], sz = child.chunkSize();
if (n < sz) { chunk = child; break; }
n -= sz;
}
}
return chunk.lines[n];
}
function lineNo(line) {
if (line.parent == null) return null;
var cur = line.parent, no = indexOf(cur.lines, line);
for (var chunk = cur.parent; chunk; cur = chunk, chunk = chunk.parent) {
for (var i = 0, e = chunk.children.length; ; ++i) {
if (chunk.children[i] == cur) break;
no += chunk.children[i].chunkSize();
}
}
return no;
}
function lineAtHeight(chunk, h) {
var n = 0;
outer: do {
for (var i = 0, e = chunk.children.length; i < e; ++i) {
var child = chunk.children[i], ch = child.height;
if (h < ch) { chunk = child; continue outer; }
h -= ch;
n += child.chunkSize();
}
return n;
} while (!chunk.lines);
for (var i = 0, e = chunk.lines.length; i < e; ++i) {
var line = chunk.lines[i], lh = line.height;
if (h < lh) break;
h -= lh;
}
return n + i;
}
function heightAtLine(chunk, n) {
var h = 0;
outer: do {
for (var i = 0, e = chunk.children.length; i < e; ++i) {
var child = chunk.children[i], sz = child.chunkSize();
if (n < sz) { chunk = child; continue outer; }
n -= sz;
h += child.height;
}
return h;
} while (!chunk.lines);
for (var i = 0; i < n; ++i) h += chunk.lines[i].height;
return h;
}
// The history object 'chunks' changes that are made close together
// and at almost the same time into bigger undoable units.
function History() {
this.time = 0;
this.done = []; this.undone = [];
}
History.prototype = {
addChange: function(start, added, old) {
this.undone.length = 0;
var time = +new Date, cur = this.done[this.done.length - 1], last = cur && cur[cur.length - 1];
var dtime = time - this.time;
if (dtime > 400 || !last) {
this.done.push([{start: start, added: added, old: old}]);
} else if (last.start > start + old.length || last.start + last.added < start - last.added + last.old.length) {
cur.push({start: start, added: added, old: old});
} else {
var oldoff = 0;
if (start < last.start) {
for (var i = last.start - start - 1; i >= 0; --i)
last.old.unshift(old[i]);
oldoff = Math.min(0, added - old.length);
last.added += last.start - start + oldoff;
last.start = start;
} else if (last.start < start) {
oldoff = start - last.start;
added += oldoff;
}
for (var i = last.added - oldoff, e = old.length; i < e; ++i)
last.old.push(old[i]);
if (last.added < added) last.added = added;
}
this.time = time;
}
};
function stopMethod() {e_stop(this);}
// Ensure an event has a stop method.
function addStop(event) {
if (!event.stop) event.stop = stopMethod;
return event;
}
function e_preventDefault(e) {
if (e.preventDefault) e.preventDefault();
else e.returnValue = false;
}
function e_stopPropagation(e) {
if (e.stopPropagation) e.stopPropagation();
else e.cancelBubble = true;
}
function e_stop(e) {e_preventDefault(e); e_stopPropagation(e);}
CodeMirror.e_stop = e_stop;
CodeMirror.e_preventDefault = e_preventDefault;
CodeMirror.e_stopPropagation = e_stopPropagation;
function e_target(e) {return e.target || e.srcElement;}
function e_button(e) {
if (e.which) return e.which;
else if (e.button & 1) return 1;
else if (e.button & 2) return 3;
else if (e.button & 4) return 2;
}
// Allow 3rd-party code to override event properties by adding an override
// object to an event object.
function e_prop(e, prop) {
var overridden = e.override && e.override.hasOwnProperty(prop);
return overridden ? e.override[prop] : e[prop];
}
// Event handler registration. If disconnect is true, it'll return a
// function that unregisters the handler.
function connect(node, type, handler, disconnect) {
if (typeof node.addEventListener == "function") {
node.addEventListener(type, handler, false);
if (disconnect) return function() {node.removeEventListener(type, handler, false);};
}
else {
var wrapHandler = function(event) {handler(event || window.event);};
node.attachEvent("on" + type, wrapHandler);
if (disconnect) return function() {node.detachEvent("on" + type, wrapHandler);};
}
}
CodeMirror.connect = connect;
function Delayed() {this.id = null;}
Delayed.prototype = {set: function(ms, f) {clearTimeout(this.id); this.id = setTimeout(f, ms);}};
var Pass = CodeMirror.Pass = {toString: function(){return "CodeMirror.Pass";}};
var gecko = /gecko\/\d{7}/i.test(navigator.userAgent);
var ie = /MSIE \d/.test(navigator.userAgent);
var ie_lt9 = /MSIE [1-8]\b/.test(navigator.userAgent);
var webkit = /WebKit\//.test(navigator.userAgent);
var chrome = /Chrome\//.test(navigator.userAgent);
var khtml = /KHTML\//.test(navigator.userAgent);
// Detect drag-and-drop
var dragAndDrop = function() {
// There is *some* kind of drag-and-drop support in IE6-8, but I
// couldn't get it to work yet.
if (ie_lt9) return false;
var div = document.createElement('div');
return "draggable" in div || "dragDrop" in div;
}();
var lineSep = "\n";
// Feature-detect whether newlines in textareas are converted to \r\n
(function () {
var te = document.createElement("textarea");
te.value = "foo\nbar";
if (te.value.indexOf("\r") > -1) lineSep = "\r\n";
}());
// Counts the column offset in a string, taking tabs into account.
// Used mostly to find indentation.
function countColumn(string, end, tabSize) {
if (end == null) {
end = string.search(/[^\s\u00a0]/);
if (end == -1) end = string.length;
}
for (var i = 0, n = 0; i < end; ++i) {
if (string.charAt(i) == "\t") n += tabSize - (n % tabSize);
else ++n;
}
return n;
}
function computedStyle(elt) {
if (elt.currentStyle) return elt.currentStyle;
return window.getComputedStyle(elt, null);
}
// Find the position of an element by following the offsetParent chain.
// If screen==true, it returns screen (rather than page) coordinates.
function eltOffset(node, screen) {
var bod = node.ownerDocument.body;
var x = 0, y = 0, skipBody = false;
for (var n = node; n; n = n.offsetParent) {
var ol = n.offsetLeft, ot = n.offsetTop;
// Firefox reports weird inverted offsets when the body has a border.
if (n == bod) { x += Math.abs(ol); y += Math.abs(ot); }
else { x += ol, y += ot; }
if (screen && computedStyle(n).position == "fixed")
skipBody = true;
}
var e = screen && !skipBody ? null : bod;
for (var n = node.parentNode; n != e; n = n.parentNode)
if (n.scrollLeft != null) { x -= n.scrollLeft; y -= n.scrollTop;}
return {left: x, top: y};
}
// Use the faster and saner getBoundingClientRect method when possible.
if (document.documentElement.getBoundingClientRect != null) eltOffset = function(node, screen) {
// Take the parts of bounding client rect that we are interested in so we are able to edit if need be,
// since the returned value cannot be changed externally (they are kept in sync as the element moves within the page)
try { var box = node.getBoundingClientRect(); box = { top: box.top, left: box.left }; }
catch(e) { box = {top: 0, left: 0}; }
if (!screen) {
// Get the toplevel scroll, working around browser differences.
if (window.pageYOffset == null) {
var t = document.documentElement || document.body.parentNode;
if (t.scrollTop == null) t = document.body;
box.top += t.scrollTop; box.left += t.scrollLeft;
} else {
box.top += window.pageYOffset; box.left += window.pageXOffset;
}
}
return box;
};
// Get a node's text content.
function eltText(node) {
return node.textContent || node.innerText || node.nodeValue || "";
}
function selectInput(node) {
if (ios) { // Mobile Safari apparently has a bug where select() is broken.
node.selectionStart = 0;
node.selectionEnd = node.value.length;
} else node.select();
}
// Operations on {line, ch} objects.
function posEq(a, b) {return a.line == b.line && a.ch == b.ch;}
function posLess(a, b) {return a.line < b.line || (a.line == b.line && a.ch < b.ch);}
function copyPos(x) {return {line: x.line, ch: x.ch};}
var escapeElement = document.createElement("pre");
function htmlEscape(str) {
escapeElement.textContent = str;
return escapeElement.innerHTML;
}
// Recent (late 2011) Opera betas insert bogus newlines at the start
// of the textContent, so we strip those.
if (htmlEscape("a") == "\na")
htmlEscape = function(str) {
escapeElement.textContent = str;
return escapeElement.innerHTML.slice(1);
};
// Some IEs don't preserve tabs through innerHTML
else if (htmlEscape("\t") != "\t")
htmlEscape = function(str) {
escapeElement.innerHTML = "";
escapeElement.appendChild(document.createTextNode(str));
return escapeElement.innerHTML;
};
CodeMirror.htmlEscape = htmlEscape;
// Used to position the cursor after an undo/redo by finding the
// last edited character.
function editEnd(from, to) {
if (!to) return 0;
if (!from) return to.length;
for (var i = from.length, j = to.length; i >= 0 && j >= 0; --i, --j)
if (from.charAt(i) != to.charAt(j)) break;
return j + 1;
}
function indexOf(collection, elt) {
if (collection.indexOf) return collection.indexOf(elt);
for (var i = 0, e = collection.length; i < e; ++i)
if (collection[i] == elt) return i;
return -1;
}
function isWordChar(ch) {
return /\w/.test(ch) || ch.toUpperCase() != ch.toLowerCase();
}
// See if "".split is the broken IE version, if so, provide an
// alternative way to split lines.
var splitLines = "\n\nb".split(/\n/).length != 3 ? function(string) {
var pos = 0, nl, result = [];
while ((nl = string.indexOf("\n", pos)) > -1) {
result.push(string.slice(pos, string.charAt(nl-1) == "\r" ? nl - 1 : nl));
pos = nl + 1;
}
result.push(string.slice(pos));
return result;
} : function(string){return string.split(/\r?\n/);};
CodeMirror.splitLines = splitLines;
var hasSelection = window.getSelection ? function(te) {
try { return te.selectionStart != te.selectionEnd; }
catch(e) { return false; }
} : function(te) {
try {var range = te.ownerDocument.selection.createRange();}
catch(e) {}
if (!range || range.parentElement() != te) return false;
return range.compareEndPoints("StartToEnd", range) != 0;
};
CodeMirror.defineMode("null", function() {
return {token: function(stream) {stream.skipToEnd();}};
});
CodeMirror.defineMIME("text/plain", "null");
var keyNames = {3: "Enter", 8: "Backspace", 9: "Tab", 13: "Enter", 16: "Shift", 17: "Ctrl", 18: "Alt",
19: "Pause", 20: "CapsLock", 27: "Esc", 32: "Space", 33: "PageUp", 34: "PageDown", 35: "End",
36: "Home", 37: "Left", 38: "Up", 39: "Right", 40: "Down", 44: "PrintScrn", 45: "Insert",
46: "Delete", 59: ";", 91: "Mod", 92: "Mod", 93: "Mod", 127: "Delete", 186: ";", 187: "=", 188: ",",
189: "-", 190: ".", 191: "/", 192: "`", 219: "[", 220: "\\", 221: "]", 222: "'", 63276: "PageUp",
63277: "PageDown", 63275: "End", 63273: "Home", 63234: "Left", 63232: "Up", 63235: "Right",
63233: "Down", 63302: "Insert", 63272: "Delete"};
CodeMirror.keyNames = keyNames;
(function() {
// Number keys
for (var i = 0; i < 10; i++) keyNames[i + 48] = String(i);
// Alphabetic keys
for (var i = 65; i <= 90; i++) keyNames[i] = String.fromCharCode(i);
// Function keys
for (var i = 1; i <= 12; i++) keyNames[i + 111] = keyNames[i + 63235] = "F" + i;
})();
return CodeMirror;
})(); | PypiClean |
/Autologging-1.3.2.zip/Autologging-1.3.2/README.md | # Autologging - easier logging and tracing for Python classes
http://ninthtest.info/python-autologging/
[](https://pypi.python.org/pypi/Autologging)
[](https://pypi.python.org/pypi/Autologging)
[](https://pypi.python.org/pypi/Autologging)
[](https://github.com/mzipay/Autologging/blob/master/LICENSE.txt)
[](https://pypi.python.org/pypi/Autologging)
## Introduction
Autologging eliminates boilerplate logging setup code and tracing code,
and provides a means to separate application logging from program flow
and data tracing.
Python modules that make use of Autologging are cleaner, leaner, and
more resilient to changes that would otherwise require updating tracing
statements.
Autologging allows for tracing to be configured (and controlled)
independently from application logging. Toggle tracing on/off, write
trace log records to a separate log, and use different formatting for
trace log entries - all via standard Python logging facilities, and
without affecting your application logging.
### What's in the `autologging` namespace?
Autologging exposes two decorators and a custom log level:
**`logged`**
Decorate a class to create a `__log` member. The logger is named by
default to match the dotted-name of the containing class. A function
may also be decorated, creating a `_log` attribute on the function
object whose default name matches the containing module.
A specifically-named logger may also be passed to the decorator (i.e.
`logged(my_logger)`).
**`traced`**
Decorate a class to provide **automatic** method call/return tracing. By
default, all class, static, and instance methods are traced (excluding
"__special__" methods, with the exception of `__init__` and `__call__`).
As with the `logged` decorator, the default name of the tracing logger
matches the dotted-name of the containing class and may be overridden by
passing a specifically-named logger to the decorator.
Additionally, this decorator accepts multiple string arguments that
explicitly name the methods to be traced (and may even name
"__special__" methods).
Module-level functions may also be traced using this decorator.
*New in version 1.2.0:* automatic yield/stop tracing of Python
[generator iterators](https://docs.python.org/3/glossary.html#term-generator-iterator)
(if the [generator](https://docs.python.org/3/glossary.html#term-generator)
function is traced).
**`TRACE`**
The `autologging.TRACE` (level 1) log level is registered with the
Python `logging` module when Autologging is imported so that tracing
can be configured and controlled independently of application logging.
Tracing may be disabled entirely by setting the
`AUTOLOGGING_TRACED_NOOP` environment variable or by calling the
`autologging.install_traced_noop()` function.
## A brief example
A simple logged and traced class:
```python
1 import logging
2 import sys
3
4 from autologging import logged, TRACE, traced
5
6 @traced
7 @logged
8 class Example:
9
10 def __init__(self):
11 self.__log.info("initialized")
12
13 def backwards(self, *words):
14 for word in words:
15 yield "".join(reversed(word))
16
17
18 if __name__ == "__main__":
19 logging.basicConfig(
20 level=TRACE, stream=sys.stderr,
21 format="%(levelname)s:%(filename)s,%(lineno)d:%(name)s.%(funcName)s:%(message)s")
22 example = Example()
23 for result in example.backwards("spam", "eggs"):
24 print(result)
```
Logging and tracing output:
```bash
$ python example.py
TRACE:example.py,10:__main__.Example.__init__:CALL *() **{}
INFO:example.py,11:__main__.Example.__init__:initialized
TRACE:example.py,11:__main__.Example.__init__:RETURN None
TRACE:example.py,13:__main__.Example.backwards:CALL *('spam', 'eggs') **{}
TRACE:example.py,15:__main__.Example.backwards:RETURN <generator object backwards at 0x7fa534d61eb0>
TRACE:example.py,15:__main__.Example.backwards:YIELD 'maps'
maps
TRACE:example.py,15:__main__.Example.backwards:YIELD 'sgge'
sgge
TRACE:example.py,15:__main__.Example.backwards:STOP
```
## Installation
The easiest way to install Autologging is to use
[pip](https://pip.pypa.io/):
```bash
$ pip install Autologging
```
### Source installation
Clone or fork the repository:
```bash
$ git clone https://github.com/mzipay/Autologging.git
```
Alternatively, download and extract a source .zip or .tar.gz archive
from https://github.com/mzipay/Autologging/releases,
https://pypi.python.org/pypi/Autologging or
https://sourceforge.net/projects/autologging/files/.
Run the test suite and install the `autologging` module: (make sure you
have [setuptools](https://pypi.python.org/pypi/setuptools) installed!)
```bash
$ cd Autologging
$ python setup.py test
$ python setup.py install
```
### Binary installation
Download the Python wheel (.whl) or a Windows installer from
https://pypi.python.org/pypi/Autologging or
https://sourceforge.net/projects/autologging/files/.
(Use [pip](https://pip.pypa.io/) or
[wheel](https://pypi.python.org/pypi/wheel) to install the .whl.)
| PypiClean |
/MultiPyDown-0.0.2-py3-none-any.whl/pydown/main.py | from concurrent.futures.thread import ThreadPoolExecutor
import threading
import time
from pySmartDL import SmartDL
download_list = ["http://dl2.soft98.ir/soft/m/MKVToolnix.43.0.0.x64.zip?1580074028",
"http://dl2.soft98.ir/soft/m/MKVToolnix.43.0.0.x86.zip?1580074028",
"http://dl2.soft98.ir/soft/m/MKVToolnix.42.0.0.Portable.exe?1580074028", ]
#
# def download(url):
# dest = "C:\\Downloads\\junkpy" # or '~/Downloads/' on linux
#
# obj = SmartDL(url, progress_bar=False, dest=dest)
# obj.start()
#
#
# with ThreadPoolExecutor(max_workers=3) as executor:
# future = executor.submit(download, download_list[0])
# future = executor.submit(download, download_list[1])
# future = executor.submit(download, download_list[2])
# print("All tasks complete")
# def task(n):
# print("Processing {}".format(n))
#
#
# def main():
# print("Starting ThreadPoolExecutor")
# with ThreadPoolExecutor(max_workers=3) as executor:
# future = executor.submit(task, (2))
# future = executor.submit(task, (3))
# future = executor.submit(task, (4))
# print("All tasks complete")
#
class Counter:
def __init__(self, workers_count: int):
self.workers_count = workers_count
self.pos = []
for item in range(workers_count):
self.pos.append(0)
def update(self, which: int, amount: int):
self.pos[which] = amount
print(self.pos)
print("now = > " + str((sum(self.pos) / self.workers_count)))
class Worker(threading.Thread):
def __init__(self, url, counter: Counter, which: int):
super().__init__()
self.url = url
self.counter = counter
self.which = which
ns = threading.local()
def run(self):
dest = "C:\\Users\\Iman\\Desktop\\python\\PyDown\\downloaded\\" # or '~/Downloads/' on linux
obj = SmartDL(self.url, progress_bar=False, dest=dest)
obj.start(blocking=False)
while not obj.isFinished():
self.counter.update(self.which, obj.get_progress() * 100)
time.sleep(1)
if obj.isSuccessful():
path = obj.get_dest()
print("downloaded file to '%s'" % obj.get_dest())
print("download task took %ss" % obj.get_dl_time(human=True))
print("File hashes:")
print(" * MD5: %s" % obj.get_data_hash('md5'))
print(" * SHA1: %s" % obj.get_data_hash('sha1'))
print(" * SHA256: %s" % obj.get_data_hash('sha256'))
else:
print("There were some errors:")
for e in obj.get_errors():
print(str(e))
if __name__ == '__main__':
counter = Counter(2)
w1 = Worker(download_list[0], counter, 0)
w2 = Worker(download_list[1], counter, 1)
w1.start()
w2.start()
w1.join()
w2.join() | PypiClean |
/NucleoATAC-0.3.4.tar.gz/NucleoATAC-0.3.4/nucleoatac/run_nfr.py | import multiprocessing as mp
import numpy as np
import os
import traceback
import itertools
import pysam
from pyatac.utils import shell_command, read_chrom_sizes_from_fasta, read_chrom_sizes_from_bam
from pyatac.chunk import ChunkList
from nucleoatac.NFRCalling import NFRParameters, NFRChunk
from pyatac.bias import PWM
def _nfrHelper(arg):
"""function to get occupancy for a set of bed regions
"""
(chunk, params) = arg
try:
nfr = NFRChunk(chunk)
nfr.process(params)
if params.ins_track is None:
out = (nfr.nfrs, nfr.ins)
else:
out = nfr.nfrs
nfr.removeData()
except Exception as e:
print('Caught exception when processing:\n'+ chunk.asBed()+"\n")
traceback.print_exc()
print()
raise e
return out
def _writeNFR(pos_queue, out):
out_handle = open(out + '.nfrpos.bed','a')
try:
for poslist in iter(pos_queue.get, 'STOP'):
for pos in poslist:
pos.write(out_handle)
pos_queue.task_done()
except Exception, e:
print('Caught exception when writing occupancy track\n')
traceback.print_exc()
print()
raise e
out_handle.close()
return True
def _writeIns(track_queue, out):
out_handle = open(out + '.ins.bedgraph','a')
try:
for track in iter(track_queue.get, 'STOP'):
track.write_track(out_handle)
track_queue.task_done()
except Exception, e:
print('Caught exception when writing insertion track\n')
traceback.print_exc()
print()
raise e
out_handle.close()
return True
def run_nfr(args):
"""run nfr calling
"""
if args.bam is None and args.ins_track is None:
raise Exception("Must supply either bam file or insertion track")
if not args.out:
args.out = '.'.join(os.path.basename(args.calls).split('.')[0:-3])
if args.fasta is not None:
chrs_fasta = read_chrom_sizes_from_fasta(args.fasta)
pwm = PWM.open(args.pwm)
chunks = ChunkList.read(args.bed, chromDict = chrs_fasta, min_offset = max(pwm.up, pwm.down))
else:
chunks = ChunkList.read(args.bed)
if args.bam is not None:
chrs_bam = read_chrom_sizes_from_bam(args.bam)
chunks.checkChroms(chrs_bam, chrom_source = "BAM file")
chunks.merge()
maxQueueSize = args.cores * 10
params = NFRParameters(args.occ_track, args.calls, args.ins_track, args.bam, max_occ = args.max_occ, max_occ_upper = args.max_occ_upper,
fasta = args.fasta, pwm = args.pwm)
sets = chunks.split(items = args.cores * 5)
pool1 = mp.Pool(processes = max(1,args.cores-1))
nfr_handle = open(args.out + '.nfrpos.bed','w')
nfr_handle.close()
nfr_queue = mp.JoinableQueue()
nfr_process = mp.Process(target = _writeNFR, args=(nfr_queue, args.out))
nfr_process.start()
if params.ins_track is None:
ins_handle = open(args.out + '.ins.bedgraph','w')
ins_handle.close()
ins_queue = mp.JoinableQueue()
ins_process = mp.Process(target = _writeIns, args=(ins_queue, args.out))
ins_process.start()
for j in sets:
tmp = pool1.map(_nfrHelper, zip(j,itertools.repeat(params)))
for result in tmp:
if params.ins_track is None:
nfr_queue.put(result[0])
ins_queue.put(result[1])
else:
nfr_queue.put(result)
pool1.close()
pool1.join()
nfr_queue.put('STOP')
nfr_process.join()
if params.ins_track is None:
ins_queue.put('STOP')
ins_process.join()
pysam.tabix_compress(args.out + '.nfrpos.bed', args.out + '.nfrpos.bed.gz',force = True)
shell_command('rm ' + args.out + '.nfrpos.bed')
pysam.tabix_index(args.out + '.nfrpos.bed.gz', preset = "bed", force = True)
if params.ins_track is None:
pysam.tabix_compress(args.out + '.ins.bedgraph', args.out + '.ins.bedgraph.gz', force = True)
shell_command('rm ' + args.out + '.ins.bedgraph')
pysam.tabix_index(args.out + '.ins.bedgraph.gz', preset = "bed", force = True) | PypiClean |
/DeepRank-GNN-0.1.22.tar.gz/DeepRank-GNN-0.1.22/deeprank_gnn/tools/StructureSimilarity.py | import numpy as np
import pdb2sql
import os
import pickle
def _printif(string, cond): return print(string) if cond else None
class StructureSimilarity(object):
def __init__(self, decoy, ref, verbose=False):
"""Compute the structure similarity between different molecules.
This class allows to compute the i-RMSD, L-RMSD, Fnat and DockQ score of a given conformation.
This can be a replacement for ProFIT. Note that the calculation of the zones are done by the class itself
and does not require any extra input. This local class could be replaced by pdb2sql.StructureSimilarity.
(https://github.com/DeepRank/pdb2sql)
Args:
decoy (str): file name of the decoy
ref (str): file name of the reference
verbose (bool, optional): print debug information
Example :
>>> from deeprank.tools import StructureSimilarity
>>> decoy = '1AK4_1w.pdb'
>>> ref = '1AK4.pdb'
>>> sim = StructureSimilarity(decoy,ref)
>>> irmsd_fast = sim.compute_irmsd_fast(method='svd',izone='1AK4.izone')
>>> irmsd = sim.compute_irmsd_pdb2sql(method='svd',izone='1AK4.izone')
>>> lrmsd_fast = sim.compute_lrmsd_fast(method='svd',lzone='1AK4.lzone',check=True)
>>> lrmsd = sim.compute_lrmsd_pdb2sql(exportpath=None,method='svd')
>>> Fnat = sim.compute_Fnat_pdb2sql()
>>> Fnat_fast = sim.compute_Fnat_fast(ref_pairs='1AK4.ref_pairs')
>>> dockQ = sim.compute_DockQScore(Fnat_fast,lrmsd_fast,irmsd_fast)
"""
self.decoy = decoy
self.ref = ref
self.verbose = verbose
def compute_lrmsd_fast(self, lzone=None, method='svd', check=True):
"""Fast routine to compute the L-RMSD.
This routine parse the PDB directly without using pdb2sql.
L-RMSD is computed by aligning the longest chain of the decoy to the one of the reference
and computing the RMSD of the shortest chain between decoy and reference.
Ref : DockQ: A Quality Measure for Protein-Protein Docking Models
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0161879
Args:
lzone (None, optional): name of the file containing the zone definition. If None the file will be calculated first
method (str, optional): Method to align the fragments ('svd','quaternion')
check (bool, optional): Check if the sequences are aligned and fix it if not. Should be True
Returns:
float: L-RMSD value of the conformation
"""
# create/read the lzone file
if lzone is None:
resData = self.compute_lzone(save_file=False)
elif not os.path.isfile(lzone):
self.compute_lzone(save_file=True, filename=lzone)
resData = self.read_zone(lzone)
else:
resData = self.read_zone(lzone)
##################################################
# the check make sure that all the
# atoms are in the correct order
# I STRONGLY discourage turning the check off
# it actually reorder the xyz data of the native/decoy
# to match.
##################################################
if check:
data_decoy_long, data_decoy_short = self.read_data_zone(
self.decoy, resData, return_not_in_zone=True)
data_ref_long, data_ref_short = self.read_data_zone(
self.ref, resData, return_not_in_zone=True)
atom_decoy_long = [data[:3] for data in data_decoy_long]
atom_ref_long = [data[:3] for data in data_ref_long]
xyz_decoy_long, xyz_ref_long = [], []
for ind_decoy, at in enumerate(atom_decoy_long):
try:
ind_ref = atom_ref_long.index(at)
xyz_decoy_long.append(
data_decoy_long[ind_decoy][3:])
xyz_ref_long.append(data_ref_long[ind_ref][3:])
except ValueError:
pass
atom_decoy_short = [data[:3] for data in data_decoy_short]
atom_ref_short = [data[:3] for data in data_ref_short]
xyz_decoy_short, xyz_ref_short = [], []
for ind_decoy, at in enumerate(atom_decoy_short):
try:
ind_ref = atom_ref_short.index(at)
xyz_decoy_short.append(
data_decoy_short[ind_decoy][3:])
xyz_ref_short.append(data_ref_short[ind_ref][3:])
except ValueError:
pass
# extract the xyz
else:
print('WARNING : The atom order have not been checked. Switch to check=True or continue at your own risk')
xyz_decoy_long, xyz_decoy_short = self.read_xyz_zone(
self.decoy, resData, return_not_in_zone=True)
xyz_ref_long, xyz_ref_short = self.read_xyz_zone(
self.ref, resData, return_not_in_zone=True)
# get the translation so that both A chains are centered
tr_decoy = self.get_trans_vect(xyz_decoy_long)
tr_ref = self.get_trans_vect(xyz_ref_long)
# translate everything for 1
xyz_decoy_short = self.translation(xyz_decoy_short, tr_decoy)
xyz_decoy_long = self.translation(xyz_decoy_long, tr_decoy)
# translate everuthing for 2
xyz_ref_short = self.translation(xyz_ref_short, tr_ref)
xyz_ref_long = self.translation(xyz_ref_long, tr_ref)
# get the ideql rotation matrix
# to superimpose the A chains
U = self.get_rotation_matrix(
xyz_decoy_long, xyz_ref_long, method=method)
# rotate the entire fragment
xyz_decoy_short = self.rotation_matrix(
xyz_decoy_short, U, center=False)
# compute the RMSD
return self.get_rmsd(xyz_decoy_short, xyz_ref_short)
def compute_lzone(self, save_file=True, filename=None):
"""Compute the zone for L-RMSD calculation
Args:
save_file (bool, optional): save the zone file
filename (str, optional): name of the file
Returns:
dict: definition of the zone
"""
sql_ref = pdb2sql(self.ref)
nA = len(sql_ref.get('x,y,z', chainID='A'))
nB = len(sql_ref.get('x,y,z', chainID='B'))
# detect which chain is the longest
long_chain = 'A'
if nA < nB:
long_chain = 'B'
# extract data about the residue
data_test = [tuple(data) for data in sql_ref.get(
'chainID,resSeq', chainID=long_chain)]
data_test = sorted(set(data_test))
# close the sql
sql_ref.close()
if save_file:
if filename is None:
f = open(self.ref.split('.')[0]+'.lzone', 'w')
else:
f = open(filename, 'w')
for res in data_test:
chain = res[0]
num = res[1]
f.write('zone %s%d-%s%d\n' % (chain, num, chain, num))
f.close()
return
else:
resData = {}
for res in data_test:
chain = res[0]
num = res[1]
if chain not in resData.keys():
resData[chain] = []
resData[chain].append(num)
return resData
def compute_irmsd_fast(self, izone=None, method='svd', cutoff=10.0, check=True):
"""Fast method to compute the i-rmsd
Require the precalculation of the izone
A dedicated routine is implemented to comoute the izone
if izone is not given in argument the routine will compute them automatically
i-RMSD is computed selecting the back bone of the contact residue with a cutoff of 10A
in the decoy. Align these as best as possible with their coutner part in the ref
and and compute the RMSD
Ref : DockQ: A Quality Measure for Protein-Protein Docking Models
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0161879
Args:
izone (None, optional): file name of the zone. if None the zones will be calculated first
method (str, optional): Method to align the fragments ('svd','quaternion')
cutoff (float, optional): cutoff for the contact atoms
check (bool, optional): Check if the sequences are aligned and fix it if not. Should be True
Returns:
float: i-RMSD value of the conformation
"""
# read the izone file
if izone is None:
resData = self.compute_izone(cutoff, save_file=False)
elif not os.path.isfile(izone):
self.compute_izone(cutoff, save_file=True, filename=izone)
resData = self.read_zone(izone)
else:
resData = self.read_zone(izone)
##################################################
# the check make sure that all the
# atoms are in the correct order
# I STRONGLY discourage turning the check off
# it actually fixes the order
##################################################
if check:
data_decoy = self.read_data_zone(
self.decoy, resData, return_not_in_zone=False)
data_ref = self.read_data_zone(
self.ref, resData, return_not_in_zone=False)
atom_decoy = [data[:3] for data in data_decoy]
atom_ref = [data[:3] for data in data_ref]
xyz_contact_decoy, xyz_contact_ref = [], []
for ind_decoy, at in enumerate(atom_decoy):
try:
ind_ref = atom_ref.index(at)
xyz_contact_decoy.append(
data_decoy[ind_decoy][3:])
xyz_contact_ref.append(data_ref[ind_ref][3:])
except ValueError:
pass
# extract the xyz
else:
print('WARNING : The atom order have not been checked. Switch to check=True or continue at your own risk')
xyz_contact_decoy = self.read_xyz_zone(
self.decoy, resData)
xyz_contact_ref = self.read_xyz_zone(self.ref, resData)
# get the translation so that both A chains are centered
tr_decoy = self.get_trans_vect(xyz_contact_decoy)
tr_ref = self.get_trans_vect(xyz_contact_ref)
# translate everything
xyz_contact_decoy = self.translation(
xyz_contact_decoy, tr_decoy)
xyz_contact_ref = self.translation(xyz_contact_ref, tr_ref)
# get the ideql rotation matrix
# to superimpose the A chains
U = self.get_rotation_matrix(
xyz_contact_decoy, xyz_contact_ref, method=method)
# rotate the entire fragment
xyz_contact_decoy = self.rotation_matrix(
xyz_contact_decoy, U, center=False)
# return the RMSD
return self.get_rmsd(xyz_contact_decoy, xyz_contact_ref)
def compute_izone(self, cutoff=5.0, save_file=True, filename=None):
"""Compute the zones for i-rmsd calculationss
Args:
cutoff (float, optional): cutoff for the contact atoms
save_file (bool, optional): svae file containing the zone
filename (str, optional): filename
Returns:
dict: i-zone definition
"""
sql_ref = pdb2sql(self.ref)
contact_ref = sql_ref.get_contact_atoms(
cutoff=cutoff, extend_to_residue=True, return_only_backbone_atoms=True)
index_contact_ref = contact_ref[0]+contact_ref[1]
# get the xyz and atom identifier of the decoy contact atoms
#xyz_contact_ref = sql_ref.get('x,y,z',rowID=index_contact_ref)
data_test = [tuple(data) for data in sql_ref.get(
'chainID,resSeq', rowID=index_contact_ref)]
data_test = sorted(set(data_test))
# close the sql
sql_ref.close()
if save_file:
if filename is None:
f = open(self.ref.split('.')[0]+'.izone', 'w')
else:
f = open(filename, 'w')
for res in data_test:
chain = res[0]
num = res[1]
f.write('zone %s%d-%s%d\n' % (chain, num, chain, num))
f.close()
return
else:
resData = {}
for res in data_test:
chain = res[0]
num = res[1]
if chain not in resData.keys():
resData[chain] = []
resData[chain].append(num)
return resData
def compute_Fnat_fast(self, ref_pairs=None, cutoff=5):
"""Compute the FNAT of the conformation
Args:
ref_pairs (str, optional): file name describing the pairs
cutoff (int, optional): cutoff for the contact atoms
Returns:
float: FNAT value
Raises:
ValueError: if the decoy file is not found
"""
# read the izone file
if ref_pairs is None:
residue_pairs_ref = self.compute_residue_pairs_ref(
cutoff, save_file=False)
elif not os.path.isfile(ref_pairs):
self.compute_residue_pairs_ref(
cutoff, save_file=True, filename=ref_pairs)
f = open(ref_pairs, 'rb')
residue_pairs_ref = pickle.load(f)
f.close()
else:
f = open(ref_pairs, 'rb')
residue_pairs_ref = pickle.load(f)
f.close()
# create a dict of the ecoy data
if isinstance(self.decoy, str) and os.path.isfile(self.decoy):
with open(self.decoy, 'r') as f:
data_decoy = f.readlines()
decoy_name = os.path.basename(self.decoy)
elif isinstance(self.decoy, np.ndarray):
data_decoy = [l.decode('utf-8') for l in self.decoy]
decoy_name = 'decoy'
else:
raise ValueError('Decoy not found')
# read the decoy data
atom_decoy, xyz_decoy = [], []
residue_xyz = {}
residue_name = {}
# go through all the lines
# that starts with ATOM
for line in data_decoy:
if line.startswith('ATOM'):
# chain ID
chainID = line[21]
if chainID == ' ':
chainID = line[72]
# atom info
resSeq = int(line[22:26])
resName = line[17:20].strip()
name = line[12:16].strip()
# position
x, y, z = float(line[30:38]), float(
line[38:46]), float(line[46:54])
# dict entry
key = (chainID, resSeq, resName)
# create the dict entry if necessary
if key not in residue_xyz.keys():
residue_xyz[key] = []
residue_name[key] = []
# we exclude the Hydrogens from the search
if name[0] != 'H':
residue_xyz[key].append([x, y, z])
residue_name[key].append(name)
# loop over the residue pairs of the
# and increment common if an atom pair is close enough
nCommon, nTotal = 0, 0
for resA, resB_list in residue_pairs_ref.items():
if resA in residue_xyz:
xyzA = residue_xyz[resA]
for resB in resB_list:
if resB in residue_xyz.keys():
xyzB = residue_xyz[resB]
dist_min = np.min(np.array([np.sqrt(np.sum(
(np.array(p1)-np.array(p2))**2)) for p1 in xyzA for p2 in xyzB]))
if dist_min <= cutoff:
nCommon += 1
nTotal += 1
else:
msg = '\t FNAT Warning could not find residue: ', resA, ' in: ', decoy_name
_printif(msg, self.verbose)
# normalize
return nCommon/nTotal
def compute_residue_pairs_ref(self, cutoff=5.0, save_file=True, filename=None):
"""Compute the residue pair on the reference conformation
Args:
cutoff (float, optional): cutoff for the contact atoms
save_file (bool, optional): save the file containing the residue pairs
filename (None, optional): filename
Returns:
dict: defintition of the residue pairs
"""
sql_ref = pdb2sql(self.ref, sqlfile='mol2.db')
residue_pairs_ref = sql_ref.get_contact_residue(cutoff=cutoff, return_contact_pairs=True,
excludeH=True)
sql_ref.close()
if save_file:
if filename is None:
f = open(self.ref.split('.')[
0]+'residue_contact_pairs.pckl', 'wb')
else:
f = open(filename, 'wb')
# save as pickle
pickle.dump(residue_pairs_ref, f)
f.close()
return
else:
return residue_pairs_ref
def compute_lrmsd_pdb2sql(self, exportpath=None, method='svd'):
"""Slow routine to compute the L-RMSD.
This routine parse the PDB directly using pdb2sql.
L-RMSD is computed by aligning the longest chain of the decoy to the one of the reference
and computing the RMSD of the shortest chain between decoy and reference.
Ref : DockQ: A Quality Measure for Protein-Protein Docking Models
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0161879
Args:
exportpath (str, optional): file name where the aligned pdbs are exported
method (str, optional): Method to align the fragments ('svd','quaternion')
Returns:
float: L-RMSD value of the conformation
"""
# create the sql
sql_decoy = pdb2sql(self.decoy, sqlfile='decoy.db')
sql_ref = pdb2sql(self.ref, sqlfile='ref.db')
# extract the pos of chains A
xyz_decoy_A = np.array(sql_decoy.get('x,y,z', chainID='A'))
xyz_ref_A = np.array(sql_ref.get('x,y,z', chainID='A'))
# extract the pos of chains B
xyz_decoy_B = np.array(sql_decoy.get('x,y,z', chainID='B'))
xyz_ref_B = np.array(sql_ref.get('x,y,z', chainID='B'))
# check the lengthes
if len(xyz_decoy_A) != len(xyz_ref_A):
xyz_decoy_A, xyz_ref_A = self.get_identical_atoms(
sql_decoy, sql_ref, 'A')
if len(xyz_decoy_B) != len(xyz_ref_B):
xyz_decoy_B, xyz_ref_B = self.get_identical_atoms(
sql_decoy, sql_ref, 'B')
# detect which chain is the longest
nA, nB = len(xyz_decoy_A), len(xyz_decoy_B)
if nA > nB:
xyz_decoy_long = xyz_decoy_A
xyz_ref_long = xyz_ref_A
xyz_decoy_short = xyz_decoy_B
xyz_ref_short = xyz_ref_B
else:
xyz_decoy_long = xyz_decoy_B
xyz_ref_long = xyz_ref_B
xyz_decoy_short = xyz_decoy_A
xyz_ref_short = xyz_ref_A
# get the translation so that both A chains are centered
tr_decoy = self.get_trans_vect(xyz_decoy_long)
tr_ref = self.get_trans_vect(xyz_ref_long)
# translate everything for 1
xyz_decoy_short = self.translation(xyz_decoy_short, tr_decoy)
xyz_decoy_long = self.translation(xyz_decoy_long, tr_decoy)
# translate everuthing for 2
xyz_ref_short = self.translation(xyz_ref_short, tr_ref)
xyz_ref_long = self.translation(xyz_ref_long, tr_ref)
# get the ideal rotation matrix
# to superimpose the A chains
U = self.get_rotation_matrix(
xyz_decoy_long, xyz_ref_long, method=method)
# rotate the entire fragment
xyz_decoy_short = self.rotation_matrix(
xyz_decoy_short, U, center=False)
# compute the RMSD
lrmsd = self.get_rmsd(xyz_decoy_short, xyz_ref_short)
# export the pdb for verifiactions
if exportpath is not None:
# extract the pos of the dimer
xyz_decoy = np.array(sql_decoy.get('x,y,z'))
xyz_ref = np.array(sql_ref.get('x,y,z'))
# translate
xyz_ref = self.translation(xyz_ref, tr_ref)
xyz_decoy = self.translation(xyz_decoy, tr_decoy)
# rotate decoy
xyz_decoy = self.rotation_matrix(
xyz_decoy, U, center=False)
# update the sql database
sql_decoy.update_xyz(xyz_decoy)
sql_ref.update_xyz(xyz_ref)
# export
sql_decoy.exportpdb(exportpath+'/lrmsd_decoy.pdb')
sql_ref.exportpdb(exportpath+'/lrmsd_aligned.pdb')
# close the db
sql_decoy.close()
sql_ref.close()
return lrmsd
@staticmethod
def get_identical_atoms(db1, db2, chain):
"""Return that atoms shared by both databse for a specific chain
Args:
db1 (TYPE): pdb2sql database of the first conformation
db2 (TYPE): pdb2sql database of the 2nd conformation
chain (str): chain name
Returns:
list, list: list of xyz for both database
"""
# get data
data1 = db1.get('chainID,resSeq,name', chainID=chain)
data2 = db2.get('chainID,resSeq,name', chainID=chain)
# tuplify
data1 = [tuple(d1) for d1 in data1]
data2 = [tuple(d2) for d2 in data2]
# get the intersection
shared_data = list(set(data1).intersection(data2))
# get the xyz
xyz1, xyz2 = [], []
for data in shared_data:
query = 'SELECT x,y,z from ATOM WHERE chainID=? AND resSeq=? and name=?'
xyz1.append(list(list(db1.c.execute(query, data))[0]))
xyz2.append(list(list(db2.c.execute(query, data))[0]))
return xyz1, xyz2
def compute_irmsd_pdb2sql(self, cutoff=10, method='svd', izone=None, exportpath=None):
"""Slow method to compute the i-rmsd
Require the precalculation of the izone. A dedicated routine is implemented to comoute the izone
if izone is not given in argument the routine will compute them automatically
i-RMSD is computed selecting the back bone of the contact residue with a cutoff of 10A
in the decoy. Align these as best as possible with their coutner part in the ref
and and compute the RMSD
Ref : DockQ: A Quality Measure for Protein-Protein Docking Models
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0161879
Args:
izone (None, optional): file name of the zone. if None the zones will be calculated first
method (str, optional): Method to align the fragments ('svd','quaternion')
cutoff (float, optional): cutoff for the contact atoms
exportpath (str, optional): file name where the aligned pdbs are exported
Returns:
float: i-RMSD value of the conformation
"""
# create thes sql
sql_decoy = pdb2sql(self.decoy)
sql_ref = pdb2sql(self.ref)
# get the contact atoms
if izone is None:
contact_ref = sql_ref.get_contact_atoms(cutoff=cutoff, extend_to_residue=True,
return_only_backbone_atoms=False)
index_contact_ref = contact_ref[0]+contact_ref[1]
else:
index_contact_ref = self.get_izone_rowID(
sql_ref, izone, return_only_backbone_atoms=False)
# get the xyz and atom identifier of the decoy contact atoms
xyz_contact_ref = sql_ref.get(
'x,y,z', rowID=index_contact_ref)
data_contact_ref = sql_ref.get(
'chainID,resSeq,resName,name', rowID=index_contact_ref)
# get the xyz and atom indeitifier of the reference
xyz_decoy = sql_decoy.get('x,y,z')
data_decoy = sql_decoy.get('chainID,resSeq,resName,name')
# loop through the ref label
# check if the atom is in the decoy
# if yes -> add xyz to xyz_contact_decoy
# if no -> remove the corresponding to xyz_contact_ref
xyz_contact_decoy = []
index_contact_decoy = []
clean_ref = False
for iat, atom in enumerate(data_contact_ref):
try:
index = data_decoy.index(atom)
index_contact_decoy.append(index)
xyz_contact_decoy.append(xyz_decoy[index])
except Exception:
xyz_contact_ref[iat] = None
index_contact_ref[iat] = None
clean_ref = True
# clean the xyz
if clean_ref:
xyz_contact_ref = [
xyz for xyz in xyz_contact_ref if xyz is not None]
index_contact_ref = [
ind for ind in index_contact_ref if ind is not None]
# check that we still have atoms in both chains
chain_decoy = list(
set(sql_decoy.get('chainID', rowID=index_contact_decoy)))
chain_ref = list(
set(sql_ref.get('chainID', rowID=index_contact_ref)))
if len(chain_decoy) < 1 or len(chain_ref) < 1:
raise ValueError(
'Error in i-rmsd: only one chain represented in one chain')
# get the translation so that both A chains are centered
tr_decoy = self.get_trans_vect(xyz_contact_decoy)
tr_ref = self.get_trans_vect(xyz_contact_ref)
# translate everything
xyz_contact_decoy = self.translation(
xyz_contact_decoy, tr_decoy)
xyz_contact_ref = self.translation(xyz_contact_ref, tr_ref)
# get the ideql rotation matrix
# to superimpose the A chains
U = self.get_rotation_matrix(
xyz_contact_decoy, xyz_contact_ref, method=method)
# rotate the entire fragment
xyz_contact_decoy = self.rotation_matrix(
xyz_contact_decoy, U, center=False)
# compute the RMSD
irmsd = self.get_rmsd(xyz_contact_decoy, xyz_contact_ref)
# export the pdb for verifiactions
if exportpath is not None:
# update the sql database
sql_decoy.update_xyz(
xyz_contact_decoy, index=index_contact_decoy)
sql_ref.update_xyz(
xyz_contact_ref, index=index_contact_ref)
sql_decoy.exportpdb(
exportpath+'/irmsd_decoy.pdb', index=index_contact_decoy)
sql_ref.exportpdb(
exportpath+'/irmsd_ref.pdb', index=index_contact_ref)
# close the db
sql_decoy.close()
sql_ref.close()
return irmsd
@staticmethod
def get_izone_rowID(sql, izone, return_only_backbone_atoms=True):
"""Compute the index of the izone atoms
Args:
sql (pdb2sql): database of the conformation
izone (str): filename to store the zone
return_only_backbone_atoms (bool, optional): Returns only the backbone atoms
Returns:
lis(int): index of the atoms in the zone
Raises:
FileNotFoundError: if the izone file is not found
"""
# read the file
if not os.path.isfile(izone):
raise FileNotFoundError('i-zone file not found', izone)
with open(izone, 'r') as f:
data = f.readlines()
# get the data out of it
resData = {}
for line in data:
res = line.split()[1].split('-')[0]
chainID, resSeq = res[0], int(res[1:])
if chainID not in resData.keys():
resData[chainID] = []
resData[chainID].append(resSeq)
# get the rowID
index_contact = []
for chainID, resSeq in resData.items():
if return_only_backbone_atoms:
index_contact += sql.get('rowID', chainID=chainID, resSeq=resSeq,
name=['C', 'CA', 'N', 'O'])
else:
index_contact += sql.get('rowID',
chainID=chainID, resSeq=resSeq)
return index_contact
def compute_Fnat_pdb2sql(self, cutoff=5.0):
"""Slow method to compute the FNAT usign pdb2sql
Args:
cutoff (float, optional): cutoff for the contact atoms
Returns:
float: Fnat value for the conformation
"""
# create the sql
sql_decoy = pdb2sql(self.decoy)
sql_ref = pdb2sql(self.ref)
# get the contact atoms of the decoy
residue_pairs_decoy = sql_decoy.get_contact_residue(cutoff=cutoff,
return_contact_pairs=True,
excludeH=True)
# get the contact atoms of the ref
residue_pairs_ref = sql_ref.get_contact_residue(cutoff=cutoff,
return_contact_pairs=True,
excludeH=True)
# form the pair data
data_pair_decoy = []
for resA, resB_list in residue_pairs_decoy.items():
data_pair_decoy += [(resA, resB) for resB in resB_list]
# form the pair data
data_pair_ref = []
for resA, resB_list in residue_pairs_ref.items():
data_pair_ref += [(resA, resB) for resB in resB_list]
# find the umber of residue that ref and decoys hace in common
nCommon = len(
set(data_pair_ref).intersection(data_pair_decoy))
# normalize
Fnat = nCommon/len(data_pair_ref)
sql_decoy.close()
sql_ref.close()
return Fnat
################################################################################################
#
# HELPER ROUTINES TO HANDLE THE ZONE FILES
#
#################################################################################################
@staticmethod
def read_xyz_zone(pdb_file, resData, return_not_in_zone=False):
"""Read the xyz of the zone atoms
Args:
pdb_file (str): filename containing the pdb of the molecule
resData (dict): information about the residues
return_not_in_zone (bool, optional): Do we return the atoms not in the zone
Returns:
list(float): XYZ of the atoms in the zone
"""
# read the ref file
with open(pdb_file, 'r') as f:
data = f.readlines()
# get the xyz of the
xyz_in_zone = []
xyz_not_in_zone = []
for line in data:
if line.startswith('ATOM'):
chainID = line[21]
if chainID == ' ':
chainID = line[72]
resSeq = int(line[22:26])
name = line[12:16].strip()
x = float(line[30:38])
y = float(line[38:46])
z = float(line[46:54])
if chainID in resData.keys():
if resSeq in resData[chainID] and name in ['C', 'CA', 'N', 'O']:
xyz_in_zone.append([x, y, z])
elif resSeq not in resData[chainID] and name in ['C', 'CA', 'N', 'O']:
xyz_not_in_zone.append([x, y, z])
else:
if name in ['C', 'CA', 'N', 'O']:
xyz_not_in_zone.append([x, y, z])
if return_not_in_zone:
return xyz_in_zone, xyz_not_in_zone
else:
return xyz_in_zone
@staticmethod
def read_data_zone(pdb_file, resData, return_not_in_zone=False):
"""Read the data of the atoms in the zone.
Args:
pdb_file (str): filename containing the pdb of the molecule
resData (dict): information about the residues
return_not_in_zone (bool, optional): Do we return the atoms not in the zone
Returns:
list(float): data of the atoms in the zone
"""
# read the ref file
if isinstance(pdb_file, str) and os.path.isfile(pdb_file):
with open(pdb_file, 'r') as f:
data = f.readlines()
elif isinstance(pdb_file, np.ndarray):
data = [l.decode('utf-8') for l in pdb_file]
# get the xyz of the
data_in_zone = []
data_not_in_zone = []
for line in data:
if line.startswith('ATOM'):
chainID = line[21]
if chainID == ' ':
chainID = line[72]
resSeq = int(line[22:26])
name = line[12:16].strip()
x = float(line[30:38])
y = float(line[38:46])
z = float(line[46:54])
if chainID in resData.keys():
if resSeq in resData[chainID] and name in ['C', 'CA', 'N', 'O']:
data_in_zone.append(
[chainID, resSeq, name, x, y, z])
elif resSeq not in resData[chainID] and name in ['C', 'CA', 'N', 'O']:
data_not_in_zone.append(
[chainID, resSeq, name, x, y, z])
else:
if name in ['C', 'CA', 'N', 'O']:
data_not_in_zone.append(
[chainID, resSeq, name, x, y, z])
if return_not_in_zone:
return data_in_zone, data_not_in_zone
else:
return data_in_zone
@staticmethod
def read_zone(zone_file):
"""Read the zone file.
Args:
zone_file (str): name of the file
Returns:
dict: Info aboyt the residues in the zone
Raises:
FileNotFoundError: if the zone file is not found
"""
# read the izone file
if not os.path.isfile(zone_file):
raise FileNotFoundError('zone file not found', zone_file)
with open(zone_file, 'r') as f:
data = f.readlines()
# get the data out of it
resData = {}
for line in data:
# line = zone A4-A4 for positive resNum
# or line = zone A-4-A-4 for negative resNum
# that happens for example in 2OUL
# split the line
res = line.split()[1].split('-')
# if the resnum was positive
# we have e.g res = [A4,A4]
if len(res) == 2:
res = res[0]
chainID, resSeq = res[0], int(res[1:])
# if the resnum was negative was negtive
# we have e.g res = [A,4,A,4]
elif len(res) == 4:
chainID, resSeq = res[0], -int(res[1])
if chainID not in resData.keys():
resData[chainID] = []
resData[chainID].append(resSeq)
return resData
###################################################################
#
# ROUTINES TO ACTUALY ALIGN THE MOLECULES
#
###################################################################
@staticmethod
def compute_DockQScore(Fnat, lrmsd, irmsd, d1=8.5, d2=1.5):
"""Compute the DockQ Score
Args:
Fnat (float): Fnat value
lrmsd (float): lrmsd value
irmsd (float): irmsd value
d1 (float, optional): first coefficient for the DockQ calculations
d2 (float, optional): second coefficient for the DockQ calculations
"""
def scale_rms(rms, d):
return(1./(1+(rms/d)**2))
return 1./3 * (Fnat + scale_rms(lrmsd, d1) + scale_rms(irmsd, d2))
@staticmethod
def get_rmsd(P, Q):
"""compute the RMSD
Args:
P (np.array(nx3)): position of the points in the first molecule
Q (np.array(nx3)): position of the points in the second molecule
Returns:
float: RMSD value
"""
n = len(P)
return np.sqrt(1./n*np.sum((P-Q)**2))
@staticmethod
def get_trans_vect(P):
"""Get the translationv vector to the origin
Args:
P (np.array(nx3)): position of the points in the molecule
Returns:
float: minus mean value of the xyz columns
"""
return -np.mean(P, 0)
# main switch for the rotation matrix
# add new methods here if necessary
def get_rotation_matrix(self, P, Q, method='svd'):
# get the matrix with Kabsh method
if method.lower() == 'svd':
return self.get_rotation_matrix_Kabsh(P, Q)
# or with the quaternion method
elif method.lower() == 'quaternion':
return self.get_rotation_matrix_quaternion(P, Q)
else:
raise ValueError(
'%s is not a valid method for rmsd alignement.\n Options are svd or quaternions' % method)
@staticmethod
def get_rotation_matrix_Kabsh(P, Q):
'''Get the rotation matrix to aligh two point clouds.
The method is based on th Kabsh approach
https://cnx.org/contents/HV-RsdwL@23/Molecular-Distance-Measures
Args:
P (np.array): xyz of the first point cloud
Q (np.array): xyz of the second point cloud
Returns:
np.array: rotation matrix
Raises:
ValueError: matrix have different sizes
'''
pshape = P.shape
qshape = Q.shape
if pshape[0] == qshape[0]:
npts = pshape[0]
else:
raise ValueError(
"Matrix don't have the same number of points", P.shape, Q.shape)
p0, q0 = np.abs(np.mean(P, 0)), np.abs(np.mean(Q, 0))
eps = 1E-6
if any(p0 > eps) or any(q0 > eps):
raise ValueError(
'You must center the fragment first', p0, q0)
# form the covariance matrix
A = np.dot(P.T, Q)/npts
# SVD the matrix
V, S, W = np.linalg.svd(A)
# the W matrix returned here is
# already its transpose
# https://docs.scipy.org/doc/numpy-1.13.0/reference/generated/numpy.linalg.svd.html
W = W.T
# determinant
d = np.linalg.det(np.dot(W, V.T))
# form the U matrix
Id = np.eye(3)
if d < 0:
Id[2, 2] = -1
U = np.dot(W, np.dot(Id, V.T))
return U
@staticmethod
def get_rotation_matrix_quaternion(P, Q):
'''Get the rotation matrix to aligh two point clouds
The method is based on the quaternion approach
http://www.ams.stonybrook.edu/~coutsias/papers/rmsd17.pdf
Args:
P (np.array): xyz of the first point cloud
Q (np.array): xyz of the second point cloud
Returns:
np.array: rotation matrix
Raises:
ValueError: matrix have different sizes
'''
pshape = P.shape
qshape = Q.shape
if pshape[0] != qshape[0]:
raise ValueError(
"Matrix don't have the same number of points", P.shape, Q.shape)
p0, q0 = np.abs(np.mean(P, 0)), np.abs(np.mean(Q, 0))
eps = 1E-6
if any(p0 > eps) or any(q0 > eps):
raise ValueError(
'You must center the fragment first', p0, q0)
# form the correlation matrix
R = np.dot(P.T, Q)
# form the F matrix (eq. 10 of ref[1])
F = np.zeros((4, 4))
F[0, 0] = np.trace(R)
F[0, 1] = R[1, 2]-R[2, 1]
F[0, 2] = R[2, 0]-R[0, 2]
F[0, 3] = R[0, 1]-R[1, 0]
F[1, 0] = R[1, 2]-R[2, 1]
F[1, 1] = R[0, 0]-R[1, 1]-R[2, 2]
F[1, 2] = R[0, 1]+R[1, 0]
F[1, 3] = R[0, 2]+R[2, 0]
F[2, 0] = R[2, 0]-R[0, 2]
F[2, 1] = R[0, 1]+R[1, 0]
F[2, 2] = -R[0, 0]+R[1, 1]-R[2, 2]
F[2, 3] = R[1, 2]+R[2, 1]
F[3, 0] = R[0, 1]-R[1, 0]
F[3, 1] = R[0, 2]+R[2, 0]
F[3, 2] = R[1, 2]+R[2, 1]
F[3, 3] = -R[0, 0]-R[1, 1]+R[2, 2]
# diagonalize it
l, U = np.linalg.eig(F)
# extract the eigenvect of the highest eigenvalues
indmax = np.argmax(l)
q0, q1, q2, q3 = U[:, indmax]
# form the rotation matrix (eq. 33 ref[1])
U = np.zeros((3, 3))
U[0, 0] = q0**2+q1**2-q2**2-q3**2
U[0, 1] = 2*(q1*q2-q0*q3)
U[0, 2] = 2*(q1*q3+q0*q2)
U[1, 1] = 2*(q1*q2+q0*q3)
U[1, 2] = q0**2-q1**2+q2*2-q3**2
U[1, 2] = 2*(q2*q3-q0*q1)
U[2, 0] = 2*(q1*q3-q0*q2)
U[2, 1] = 2*(q2*q3+q0*q1)
U[2, 2] = q0**2-q1**2-q2**2+q3**2
return U
@staticmethod
def translation(xyz, vect):
"""Translate a fragment
Args:
xyz (np.array): position of the fragment
vect (np.array): translation vector
Returns:
np.array: translated positions
"""
return xyz + vect
@staticmethod
def rotation_around_axis(xyz, axis, angle):
"""Rotate a fragment around an axis.
Args:
xyz (np.array): original positions
axis (np.array): axis of rotation
angle (float): angle of rotation (radians)
Returns:
np.array: Rotated positions
"""
# get the data
ct, st = np.cos(angle), np.sin(angle)
ux, uy, uz = axis
# get the center of the molecule
xyz0 = np.mean(xyz, 0)
# definition of the rotation matrix
# see https://en.wikipedia.org/wiki/Rotation_matrix
rot_mat = np.array([
[ct + ux**2*(1-ct), ux*uy*(1-ct) -
uz*st, ux*uz*(1-ct) + uy*st],
[uy*ux*(1-ct) + uz*st, ct + uy**2 *
(1-ct), uy*uz*(1-ct) - ux*st],
[uz*ux*(1-ct) - uy*st, uz*uy*(1-ct) + ux*st, ct + uz**2*(1-ct)]])
# apply the rotation
return np.dot(rot_mat, (xyz-xyz0).T).T + xyz0
@staticmethod
def rotation_euler(xyz, alpha, beta, gamma):
"""Rotate a fragment from Euler rotation angle
Args:
xyz (np.array): original positions
alpha (float): rotation angle around the x axis
beta (float): rotation angle around the x axis
gamma (float): rotation angle around the x axis
Returns:
np.array: Rotated positions
"""
# precompute the trig
ca, sa = np.cos(alpha), np.sin(alpha)
cb, sb = np.cos(beta), np.sin(beta)
cg, sg = np.cos(gamma), np.sin(gamma)
# get the center of the molecule
xyz0 = np.mean(xyz, 0)
# rotation matrices
rx = np.array([[1, 0, 0], [0, ca, -sa], [0, sa, ca]])
ry = np.array([[cb, 0, sb], [0, 1, 0], [-sb, 0, cb]])
rz = np.array([[cg, -sg, 0], [sg, cs, 0], [0, 0, 1]])
rot_mat = np.dot(rx, np.dot(ry, rz))
# apply the rotation
return np.dot(rot_mat, (xyz-xyz0).T).T + xyz0
@staticmethod
def rotation_matrix(xyz, rot_mat, center=True):
"""Rotate a fragment from a roation matrix
Args:
xyz (np.array): original positions
rot_mat (np.array): rotation matrix
center (bool, optional): Center the fragment before rotation
Returns:
np.array: rotated positions
"""
if center:
xyz0 = np.mean(xyz)
return np.dot(rot_mat, (xyz-xyz0).T).T + xyz0
else:
return np.dot(rot_mat, (xyz).T).T
# if __name__ == '__main__':
# import time
# BM4 = '/home/nico/Documents/projects/deeprank/data/HADDOCK/BM4_dimers/'
# decoy = BM4 + 'decoys_pdbFLs/1AK4/water/1AK4_1w.pdb'
# ref = BM4 + 'BM4_dimers_bound/pdbFLs_ori/1AK4.pdb'
# sim = StructureSimilarity(decoy,ref)
# #----------------------------------------------------------------------
# t0 = time.time()
# irmsd_fast = sim.compute_irmsd_fast(method='svd',izone='1AK4.izone')
# t1 = time.time()-t0
# print('\nIRMSD TIME FAST %f in %f sec' %(irmsd_fast,t1))
# t0 = time.time()
# irmsd = sim.compute_irmsd_pdb2sql(method='svd',izone='1AK4.izone')
# t1 = time.time()-t0
# print('IRMSD TIME SQL %f in %f sec' %(irmsd,t1))
# #----------------------------------------------------------------------
# t0 = time.time()
# lrmsd_fast = sim.compute_lrmsd_fast(method='svd',lzone='1AK4.lzone',check=True)
# t1 = time.time()-t0
# print('\nLRMSD TIME FAST %f in %f sec' %(lrmsd_fast,t1))
# t0 = time.time()
# lrmsd = sim.compute_lrmsd_pdb2sql(exportpath=None,method='svd')
# t1 = time.time()-t0
# print('LRMSD TIME SQL %f in %f sec' %(lrmsd,t1))
# #----------------------------------------------------------------------
# t0 = time.time()
# Fnat = sim.compute_Fnat_pdb2sql()
# t1 = time.time()-t0
# print('\nFNAT TIME SQL %f in %f sec' %(Fnat,t1))
# t0 = time.time()
# Fnat_fast = sim.compute_Fnat_fast(ref_pairs='1AK4.ref_pairs')
# t1 = time.time()-t0
# print('LRMSD TIME FAST %f in %f sec' %(Fnat_fast,t1))
# #----------------------------------------------------------------------
# dockQ = sim.compute_DockQScore(Fnat_fast,lrmsd_fast,irmsd_fast)
# print('\nDockQ %f' %dockQ ) | PypiClean |
/Kiosk_Client-0.8.4.tar.gz/Kiosk_Client-0.8.4/kiosk_client/manager.py | """Manager class used to create and manage jobs"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import logging
import os
import timeit
import uuid
import requests
from google.cloud import storage as google_storage
from twisted.internet import defer, reactor
from twisted.web.client import HTTPConnectionPool
from kiosk_client.job import Job
from kiosk_client.utils import iter_image_files
from kiosk_client.utils import sleep
from kiosk_client.utils import strip_bucket_prefix
from kiosk_client.utils import get_download_path
from kiosk_client import settings
from kiosk_client.cost import CostGetter
class JobManager(object):
"""Manages many DeepCell Kiosk jobs.
Args:
host (str): public IP address of the DeepCell Kiosk cluster.
job_type (str): DeepCell Kiosk job type (e.g. "segmentation").
upload_prefix (str): upload all files to this folder in the bucket.
refresh_rate (int): seconds between each manager status check.
update_interval (int): seconds between each job status refresh.
expire_time (int): seconds until finished jobs are expired.
start_delay (int): delay between each job, in seconds.
"""
def __init__(self, host, job_type, **kwargs):
self.logger = logging.getLogger(str(self.__class__.__name__))
self.created_at = timeit.default_timer()
self.all_jobs = []
self.host = self._get_host(host)
self.job_type = job_type
model = kwargs.get('model', '')
if model:
try:
model_name, model_version = str(model).split(':')
model_version = int(model_version)
except Exception as err:
self.logger.error('Invalid model name, must be of the form '
'"ModelName:Version", for example "model:0".')
raise err
else:
model_name, model_version = '', ''
self.model_name = model_name
self.model_version = model_version
data_scale = str(kwargs.get('data_scale', ''))
if data_scale:
try:
data_scale = float(data_scale)
except ValueError:
raise ValueError('data_scale must be a number.')
self.data_scale = data_scale
data_label = str(kwargs.get('data_label', ''))
if data_label:
try:
data_label = int(data_label)
except ValueError:
raise ValueError('data_label must be an integer.')
self.data_label = data_label
self.preprocess = kwargs.get('preprocess', '')
self.postprocess = kwargs.get('postprocess', '')
self.upload_prefix = kwargs.get('upload_prefix', 'uploads')
self.upload_prefix = strip_bucket_prefix(self.upload_prefix)
self.refresh_rate = int(kwargs.get('refresh_rate', 10))
self.update_interval = kwargs.get('update_interval', 10)
self.expire_time = kwargs.get('expire_time', 3600)
self.start_delay = kwargs.get('start_delay', 0.1)
self.bucket = kwargs.get('storage_bucket')
self.upload_results = kwargs.get('upload_results', False)
self.download_results = kwargs.get('download_results', True)
self.calculate_cost = kwargs.get('calculate_cost', False)
self.output_dir = kwargs.get('output_dir', get_download_path())
if not os.path.isdir(self.output_dir):
raise ValueError('Invalid value for output_dir,'
' %s is not a directory.' % self.output_dir)
if not os.access(self.output_dir, os.W_OK):
raise ValueError('Invalid value for output_dir,'
' %s is not writable.' % self.output_dir)
# initializing cost estimation workflow
self.cost_getter = CostGetter()
self.sleep = sleep # allow monkey-patch
# twisted configuration
self.pool = HTTPConnectionPool(reactor, persistent=True)
self.pool.maxPersistentPerHost = settings.CONCURRENT_REQUESTS_PER_HOST
self.pool.retryAutomatically = False
def _get_host(self, host):
"""Send a GET request to the provided host. Check for redirects.
Twisted does not allow POST requests to follow redirects. Use requests
to send a single GET request to the host and follow any redirects.
Args:
host (str): The user-provided hostname.
Returns:
str: The hostname after all redirects.
"""
host = str(host).lower()
if not any(host.startswith(x) for x in ('http://', 'https://')):
host = 'http://{}'.format(host)
try:
url = str(requests.get(host).url)
url = url[:-1] if url.endswith('/') else url
return url
except:
raise RuntimeError('Could not connect to host: %s' % host)
def upload_file(self, filepath, acl='publicRead',
hash_filename=True, prefix=None):
prefix = self.upload_prefix if prefix is None else prefix
start = timeit.default_timer()
storage_client = google_storage.Client()
self.logger.debug('Uploading %s.', filepath)
if hash_filename:
_, ext = os.path.splitext(filepath)
dest = '{}{}'.format(uuid.uuid4().hex, ext)
else:
dest = os.path.basename(filepath)
bucket = storage_client.get_bucket(self.bucket)
blob = bucket.blob(os.path.join(prefix, dest))
blob.upload_from_filename(filepath, predefined_acl=acl)
self.logger.debug('Uploaded %s to %s in %s seconds.',
filepath, dest, timeit.default_timer() - start)
return dest
def make_job(self, filepath):
return Job(filepath=filepath,
host=self.host,
model_name=self.model_name,
model_version=self.model_version,
job_type=self.job_type,
data_scale=self.data_scale,
data_label=self.data_label,
postprocess=self.postprocess,
upload_prefix=self.upload_prefix,
update_interval=self.update_interval,
download_results=self.download_results,
expire_time=self.expire_time,
pool=self.pool,
output_dir=self.output_dir)
def get_completed_job_count(self):
created, complete, failed, expired = 0, 0, 0, 0
statuses = {}
for j in self.all_jobs:
expired += int(j.is_expired) # true mark of being done
complete += int(j.is_summarized)
created += int(j.job_id is not None)
if j.status is not None:
if j.status not in statuses:
statuses[j.status] = 1
else:
statuses[j.status] += 1
if j.failed:
j.restart(delay=self.start_delay * failed)
# # TODO: patched! "done" jobs can get stranded before summarization
# if j.status == 'done' and not j.is_summarized:
# j.summarize()
#
# # TODO: patched! sometimes jobs don't get expired?
# elif j.status == 'done' and j.is_summarized and not j.is_expired:
# j.expire()
self.logger.info('%s created; %s finished; %s summarized; '
'%s; %s jobs total', created, expired, complete,
'; '.join('%s %s' % (v, k)
for k, v in statuses.items()),
len(self.all_jobs))
if len(self.all_jobs) - expired <= 25:
for j in self.all_jobs:
if not j.is_expired:
self.logger.info('Waiting on key `%s` with status %s',
j.job_id, j.status)
return expired
@defer.inlineCallbacks
def _stop(self):
yield reactor.stop() # pylint: disable=no-member
@defer.inlineCallbacks
def check_job_status(self):
complete = -1 # initialize comparison value
while complete != len(self.all_jobs):
yield self.sleep(self.refresh_rate)
complete = self.get_completed_job_count() # synchronous
self.summarize() # synchronous
yield self._stop()
def summarize(self):
time_elapsed = timeit.default_timer() - self.created_at
self.logger.info('Finished %s jobs in %s seconds.',
len(self.all_jobs), time_elapsed)
# add cost and timing data to json output
cpu_cost, gpu_cost, total_cost = '', '', ''
if self.calculate_cost:
try:
cpu_cost, gpu_cost, total_cost = self.cost_getter.finish()
except Exception as err: # pylint: disable=broad-except
self.logger.error('Encountered %s while getting cost data: %s',
type(err).__name__, err)
jsondata = {
'cpu_node_cost': cpu_cost,
'gpu_node_cost': gpu_cost,
'total_node_and_networking_costs': total_cost,
'start_delay': self.start_delay,
'num_jobs': len(self.all_jobs),
'time_elapsed': time_elapsed,
'job_data': [j.json() for j in self.all_jobs]
}
output_filepath = '{}{}jobs_{}delay_{}.json'.format(
'{}gpu_'.format(settings.NUM_GPUS) if settings.NUM_GPUS else '',
len(self.all_jobs), self.start_delay, uuid.uuid4().hex)
output_filepath = os.path.join(self.output_dir, output_filepath)
with open(output_filepath, 'w') as jsonfile:
json.dump(jsondata, jsonfile, indent=4)
self.logger.info('Wrote job data as JSON to %s.', output_filepath)
if self.upload_results:
try:
_ = self.upload_file(output_filepath,
hash_filename=False,
prefix='output')
except Exception as err: # pylint: disable=broad-except
self.logger.error(err)
self.logger.error('Could not upload output file to bucket. '
'Copy this file from the docker container to '
'keep the data.')
def run(self, *args, **kwargs):
raise NotImplementedError
class BenchmarkingJobManager(JobManager):
# pylint: disable=arguments-differ
@defer.inlineCallbacks
def run(self, filepath, count, upload=False):
self.logger.info('Benchmarking %s jobs of file `%s`', count, filepath)
for i in range(count):
job = self.make_job(filepath)
self.all_jobs.append(job)
# stagger the delay seconds; if upload it will be staggered already
job.start(delay=self.start_delay * i * int(not upload),
upload=upload)
yield self.sleep(self.start_delay * upload)
if upload:
self.get_completed_job_count() # log during uploading
yield self.check_job_status()
class BatchProcessingJobManager(JobManager):
# pylint: disable=arguments-differ
@defer.inlineCallbacks
def run(self, filepath):
self.logger.info('Benchmarking all image/zip files in `%s`', filepath)
for f in iter_image_files(filepath):
_ = timeit.default_timer()
job = self.make_job(f)
self.all_jobs.append(job)
self.logger.info('Uploading file "%s".', f)
uploaded_path = yield job.upload_file()
self.logger.info('Uploaded file "%s" in %s seconds.',
f, timeit.default_timer() - _)
job.filepath = os.path.relpath(uploaded_path, self.upload_prefix)
job.start(delay=self.start_delay)
yield self.check_job_status() | PypiClean |
/Marl-Factory-Grid-0.1.2.tar.gz/Marl-Factory-Grid-0.1.2/marl_factory_grid/algorithms/static/TSP_item_agent.py | import numpy as np
from marl_factory_grid.algorithms.static.TSP_base_agent import TSPBaseAgent
from marl_factory_grid.modules.items import constants as i
future_planning = 7
inventory_size = 3
MODE_GET = 'Mode_Get'
MODE_BRING = 'Mode_Bring'
class TSPItemAgent(TSPBaseAgent):
def __init__(self, *args, mode=MODE_GET, **kwargs):
super(TSPItemAgent, self).__init__(*args, **kwargs)
self.mode = mode
def predict(self, *_, **__):
if self._env.state[i.ITEM].by_pos(self.state.pos) is not None:
# Translate the action_object to an integer to have the same output as any other model
action = i.ITEM_ACTION
elif self._env.state[i.DROP_OFF].by_pos(self.state.pos) is not None:
# Translate the action_object to an integer to have the same output as any other model
action = i.ITEM_ACTION
elif door := self._door_is_close():
action = self._use_door_or_move(door, i.DROP_OFF if self.mode == MODE_BRING else i.ITEM)
else:
action = self._choose()
# Translate the action_object to an integer to have the same output as any other model
try:
action_obj = next(action_i for action_i, a in enumerate(self.state.actions) if a.name == action)
except (StopIteration, UnboundLocalError):
print('Will not happen')
raise EnvironmentError
# noinspection PyUnboundLocalVariable
if self.mode == MODE_BRING and len(self._env[i.INVENTORY].by_entity(self.state)):
pass
elif self.mode == MODE_BRING and not len(self._env[i.INVENTORY].by_entity(self.state)):
self.mode = MODE_GET
elif self.mode == MODE_GET and len(self._env[i.INVENTORY].by_entity(self.state)) > inventory_size:
self.mode = MODE_BRING
else:
pass
return action_obj
def _choose(self):
target = i.DROP_OFF if self.mode == MODE_BRING else i.ITEM
if len(self._env.state[i.ITEM]) >= 1:
action = self._predict_move(target)
elif len(self._env[i.INVENTORY].by_entity(self.state)):
self.mode = MODE_BRING
action = self._predict_move(target)
else:
action = int(np.random.randint(self._env.action_space.n))
# noinspection PyUnboundLocalVariable
return action | PypiClean |
/DepartmnetHelper-1.0.0.tar.gz/DepartmnetHelper-1.0.0/application/services.py | from application import db
from application.models import Department
from application.models import User
from application.models import Employee
def get_department():
return Department.query.all()
def create_department(name):
department = Department(name=name)
db.session.add(department)
db.session.commit()
def edit_department(id, name):
Department.query.filter_by(id=id).update(dict(name=name))
db.session.commit()
def delete_department(dep_id):
Department.query.filter_by(id=dep_id).delete()
db.session.commit()
def find_user_by_name(name, password):
return User.query.filter_by(name=name, password=password).first()
def find_user_by_id(user_id):
return User.query.get(int(user_id))
def create_employee(name, surname, salary, date_of_birth, department):
employee = Employee(
name=name,
surname=surname,
salary=salary,
date_of_birth=date_of_birth,
department_ref=department
)
db.session.add(employee)
db.session.commit()
def find_employees_by_dep_ref(dep_id):
return Employee.query.filter_by(department_ref=int(dep_id))
def find_employee_by_id(id):
return Employee.query.get(int(id))
def update_employee(id, name, surname, salary, date_of_birth, department):
Employee.query.filter_by(id=id).update(dict(
name=name,
surname=surname,
salary=salary,
date_of_birth=date_of_birth,
department_ref=department
)
)
db.session.commit()
def delete_employee(id):
Employee.query.filter_by(id=id).delete()
db.session.commit()
def create_user(name, password, access_level):
user = User(
name=name,
password=password,
access_level=access_level
)
db.session.add(user)
db.session.commit()
def update_user(id, name, password):
User.query.filter_by(id=id).update(dict(name=name, password=password))
db.session.commit()
def get_user_by_name(name):
return User.query.filter_by(name=name).all() | PypiClean |
/Dts-OpenFisca-Core-34.8.0.tar.gz/Dts-OpenFisca-Core-34.8.0/openfisca_core/scripts/migrations/v24_to_25.py |
import argparse
import os
import glob
from ruamel.yaml.comments import CommentedSeq
from openfisca_core.scripts import add_tax_benefit_system_arguments, build_tax_benefit_system
from ruamel.yaml import YAML
yaml = YAML()
yaml.default_flow_style = False
yaml.width = 4096
TEST_METADATA = {'period', 'name', 'reforms', 'only_variables', 'ignore_variables', 'absolute_error_margin', 'relative_error_margin', 'description', 'keywords'}
def build_parser():
parser = argparse.ArgumentParser()
parser.add_argument('path', help = "paths (files or directories) of tests to execute", nargs = '+')
parser = add_tax_benefit_system_arguments(parser)
return parser
class Migrator(object):
def __init__(self, tax_benefit_system):
self.tax_benefit_system = tax_benefit_system
self.entities_by_plural = {entity.plural: entity for entity in self.tax_benefit_system.entities}
def migrate(self, path):
if isinstance(path, list):
for item in path:
self.migrate(item)
return
if os.path.isdir(path):
yaml_paths = glob.glob(os.path.join(path, "*.yaml"))
subdirectories = glob.glob(os.path.join(path, "*/"))
for yaml_path in yaml_paths:
self.migrate(yaml_path)
for subdirectory in subdirectories:
self.migrate(subdirectory)
return
print('Migrating {}.'.format(path))
with open(path) as yaml_file:
tests = yaml.safe_load(yaml_file)
if isinstance(tests, CommentedSeq):
migrated_tests = [self.convert_test(test) for test in tests]
else:
migrated_tests = self.convert_test(tests)
with open(path, 'w') as yaml_file:
yaml.dump(migrated_tests, yaml_file)
def convert_test(self, test):
if test.get('output'):
# This test is already converted, ignoring it
return test
result = {}
outputs = test.pop('output_variables')
inputs = test.pop('input_variables', {})
for key, value in test.items():
if key in TEST_METADATA:
result[key] = value
else:
inputs[key] = value
result['input'] = self.convert_inputs(inputs)
result['output'] = outputs
return result
def convert_inputs(self, inputs):
first_key = next(iter(inputs.keys()), None)
if first_key not in self.entities_by_plural:
return inputs
results = {}
for entity_plural, entities_description in inputs.items():
entity = self.entities_by_plural[entity_plural]
if not isinstance(entities_description, (CommentedSeq, list)):
entities_description = [entities_description]
if not entity.is_person and len(entities_description) == 1:
results[entity.key] = remove_id(entities_description[0])
continue
results[entity_plural] = self.convert_entities(entity, entities_description)
results = self.generate_missing_entities(results)
return results
def convert_entities(self, entity, entities_description):
return {
entity_description.get('id', "{}_{}".format(entity.key, index)): remove_id(entity_description)
for index, entity_description in enumerate(entities_description)
}
def generate_missing_entities(self, inputs):
for entity in self.tax_benefit_system.entities:
if entity.plural in inputs or entity.key in inputs:
continue
persons = inputs[self.tax_benefit_system.person_entity.plural]
if len(persons) == 1:
person_id = next(iter(persons))
inputs[entity.key] = {entity.roles[0].plural or entity.roles[0].key: [person_id]}
else:
inputs[entity.plural] = {
'{}_{}'.format(entity.key, index): {entity.roles[0].plural or entity.roles[0].key: [person_id]}
for index, person_id in enumerate(persons.keys())
}
return inputs
def remove_id(input_dict):
return {
key: value
for (key, value) in input_dict.items()
if key != "id"
}
def main():
parser = build_parser()
args = parser.parse_args()
paths = [os.path.abspath(path) for path in args.path]
tax_benefit_system = build_tax_benefit_system(args.country_package, args.extensions, args.reforms)
Migrator(tax_benefit_system).migrate(paths)
if __name__ == "__main__":
main() | PypiClean |
/Faker-19.3.1.tar.gz/Faker-19.3.1/faker/proxy.py | import copy
import functools
import re
from collections import OrderedDict
from random import Random
from typing import Any, Callable, Dict, List, Optional, Pattern, Sequence, Tuple, TypeVar, Union
from .config import DEFAULT_LOCALE
from .exceptions import UniquenessException
from .factory import Factory
from .generator import Generator, random
from .typing import SeedType
from .utils.distribution import choices_distribution
_UNIQUE_ATTEMPTS = 1000
RetType = TypeVar("RetType")
class Faker:
"""Proxy class capable of supporting multiple locales"""
cache_pattern: Pattern = re.compile(r"^_cached_\w*_mapping$")
generator_attrs = [
attr for attr in dir(Generator) if not attr.startswith("__") and attr not in ["seed", "seed_instance", "random"]
]
def __init__(
self,
locale: Optional[Union[str, Sequence[str], Dict[str, Union[int, float]]]] = None,
providers: Optional[List[str]] = None,
generator: Optional[Generator] = None,
includes: Optional[List[str]] = None,
use_weighting: bool = True,
**config: Any,
) -> None:
self._factory_map = OrderedDict()
self._weights = None
self._unique_proxy = UniqueProxy(self)
self._optional_proxy = OptionalProxy(self)
if isinstance(locale, str):
locales = [locale.replace("-", "_")]
# This guarantees a FIFO ordering of elements in `locales` based on the final
# locale string while discarding duplicates after processing
elif isinstance(locale, (list, tuple, set)):
locales = []
for code in locale:
if not isinstance(code, str):
raise TypeError(f'The locale "{str(code)}" must be a string.')
final_locale = code.replace("-", "_")
if final_locale not in locales:
locales.append(final_locale)
elif isinstance(locale, OrderedDict):
assert all(isinstance(v, (int, float)) for v in locale.values())
odict = OrderedDict()
for k, v in locale.items():
key = k.replace("-", "_")
odict[key] = v
locales = list(odict.keys())
self._weights = list(odict.values())
else:
locales = [DEFAULT_LOCALE]
for locale in locales:
self._factory_map[locale] = Factory.create(
locale,
providers,
generator,
includes,
use_weighting=use_weighting,
**config,
)
self._locales = locales
self._factories = list(self._factory_map.values())
def __dir__(self):
attributes = set(super(Faker, self).__dir__())
for factory in self.factories:
attributes |= {attr for attr in dir(factory) if not attr.startswith("_")}
return sorted(attributes)
def __getitem__(self, locale: str) -> Generator:
return self._factory_map[locale.replace("-", "_")]
def __getattribute__(self, attr: str) -> Any:
"""
Handles the "attribute resolution" behavior for declared members of this proxy class
The class method `seed` cannot be called from an instance.
:param attr: attribute name
:return: the appropriate attribute
"""
if attr == "seed":
msg = "Calling `.seed()` on instances is deprecated. " "Use the class method `Faker.seed()` instead."
raise TypeError(msg)
else:
return super().__getattribute__(attr)
def __getattr__(self, attr: str) -> Any:
"""
Handles cache access and proxying behavior
:param attr: attribute name
:return: the appropriate attribute
"""
if len(self._factories) == 1:
return getattr(self._factories[0], attr)
elif attr in self.generator_attrs:
msg = "Proxying calls to `%s` is not implemented in multiple locale mode." % attr
raise NotImplementedError(msg)
elif self.cache_pattern.match(attr):
msg = "Cached attribute `%s` does not exist" % attr
raise AttributeError(msg)
else:
factory = self._select_factory(attr)
return getattr(factory, attr)
def __deepcopy__(self, memodict: Dict = {}) -> "Faker":
cls = self.__class__
result = cls.__new__(cls)
result._locales = copy.deepcopy(self._locales)
result._factories = copy.deepcopy(self._factories)
result._factory_map = copy.deepcopy(self._factory_map)
result._weights = copy.deepcopy(self._weights)
result._unique_proxy = UniqueProxy(self)
result._unique_proxy._seen = {k: {result._unique_proxy._sentinel} for k in self._unique_proxy._seen.keys()}
return result
def __setstate__(self, state: Any) -> None:
self.__dict__.update(state)
@property
def unique(self) -> "UniqueProxy":
return self._unique_proxy
@property
def optional(self) -> "OptionalProxy":
return self._optional_proxy
def _select_factory(self, method_name: str) -> Factory:
"""
Returns a random factory that supports the provider method
:param method_name: Name of provider method
:return: A factory that supports the provider method
"""
factories, weights = self._map_provider_method(method_name)
if len(factories) == 0:
msg = f"No generator object has attribute {method_name!r}"
raise AttributeError(msg)
elif len(factories) == 1:
return factories[0]
if weights:
factory = self._select_factory_distribution(factories, weights)
else:
factory = self._select_factory_choice(factories)
return factory
def _select_factory_distribution(self, factories, weights):
return choices_distribution(factories, weights, random, length=1)[0]
def _select_factory_choice(self, factories):
return random.choice(factories)
def _map_provider_method(self, method_name: str) -> Tuple[List[Factory], Optional[List[float]]]:
"""
Creates a 2-tuple of factories and weights for the given provider method name
The first element of the tuple contains a list of compatible factories.
The second element of the tuple contains a list of distribution weights.
:param method_name: Name of provider method
:return: 2-tuple (factories, weights)
"""
# Return cached mapping if it exists for given method
attr = f"_cached_{method_name}_mapping"
if hasattr(self, attr):
return getattr(self, attr)
# Create mapping if it does not exist
if self._weights:
value = [
(factory, weight)
for factory, weight in zip(self.factories, self._weights)
if hasattr(factory, method_name)
]
factories, weights = zip(*value)
mapping = list(factories), list(weights)
else:
value = [factory for factory in self.factories if hasattr(factory, method_name)] # type: ignore
mapping = value, None # type: ignore
# Then cache and return results
setattr(self, attr, mapping)
return mapping
@classmethod
def seed(cls, seed: Optional[SeedType] = None) -> None:
"""
Hashables the shared `random.Random` object across all factories
:param seed: seed value
"""
Generator.seed(seed)
def seed_instance(self, seed: Optional[SeedType] = None) -> None:
"""
Creates and seeds a new `random.Random` object for each factory
:param seed: seed value
"""
for factory in self._factories:
factory.seed_instance(seed)
def seed_locale(self, locale: str, seed: Optional[SeedType] = None) -> None:
"""
Creates and seeds a new `random.Random` object for the factory of the specified locale
:param locale: locale string
:param seed: seed value
"""
self._factory_map[locale.replace("-", "_")].seed_instance(seed)
@property
def random(self) -> Random:
"""
Proxies `random` getter calls
In single locale mode, this will be proxied to the `random` getter
of the only internal `Generator` object. Subclasses will have to
implement desired behavior in multiple locale mode.
"""
if len(self._factories) == 1:
return self._factories[0].random
else:
msg = "Proxying `random` getter calls is not implemented in multiple locale mode."
raise NotImplementedError(msg)
@random.setter
def random(self, value: Random) -> None:
"""
Proxies `random` setter calls
In single locale mode, this will be proxied to the `random` setter
of the only internal `Generator` object. Subclasses will have to
implement desired behavior in multiple locale mode.
"""
if len(self._factories) == 1:
self._factories[0].random = value
else:
msg = "Proxying `random` setter calls is not implemented in multiple locale mode."
raise NotImplementedError(msg)
@property
def locales(self) -> List[str]:
return list(self._locales)
@property
def weights(self) -> Optional[List[Union[int, float]]]:
return self._weights
@property
def factories(self) -> List[Generator]:
return self._factories
def items(self) -> List[Tuple[str, Generator]]:
return list(self._factory_map.items())
class UniqueProxy:
def __init__(self, proxy: Faker):
self._proxy = proxy
self._seen: Dict = {}
self._sentinel = object()
def clear(self) -> None:
self._seen = {}
def __getattr__(self, name: str) -> Any:
obj = getattr(self._proxy, name)
if callable(obj):
return self._wrap(name, obj)
else:
raise TypeError("Accessing non-functions through .unique is not supported.")
def __getstate__(self):
# Copy the object's state from self.__dict__ which contains
# all our instance attributes. Always use the dict.copy()
# method to avoid modifying the original state.
state = self.__dict__.copy()
return state
def __setstate__(self, state):
self.__dict__.update(state)
def _wrap(self, name: str, function: Callable) -> Callable:
@functools.wraps(function)
def wrapper(*args, **kwargs):
key = (name, args, tuple(sorted(kwargs.items())))
generated = self._seen.setdefault(key, {self._sentinel})
# With use of a sentinel value rather than None, we leave
# None open as a valid return value.
retval = self._sentinel
for i in range(_UNIQUE_ATTEMPTS):
if retval not in generated:
break
retval = function(*args, **kwargs)
else:
raise UniquenessException(f"Got duplicated values after {_UNIQUE_ATTEMPTS:,} iterations.")
generated.add(retval)
return retval
return wrapper
class OptionalProxy:
"""
Return either a fake value or None, with a customizable probability.
"""
def __init__(self, proxy: Faker):
self._proxy = proxy
def __getattr__(self, name: str) -> Any:
obj = getattr(self._proxy, name)
if callable(obj):
return self._wrap(name, obj)
else:
raise TypeError("Accessing non-functions through .optional is not supported.")
def __getstate__(self):
# Copy the object's state from self.__dict__ which contains
# all our instance attributes. Always use the dict.copy()
# method to avoid modifying the original state.
state = self.__dict__.copy()
return state
def __setstate__(self, state):
self.__dict__.update(state)
def _wrap(self, name: str, function: Callable[..., RetType]) -> Callable[..., Optional[RetType]]:
@functools.wraps(function)
def wrapper(*args: Any, prob: float = 0.5, **kwargs: Any) -> Optional[RetType]:
if not 0 < prob <= 1.0:
raise ValueError("prob must be between 0 and 1")
return function(*args, **kwargs) if self._proxy.boolean(chance_of_getting_true=int(prob * 100)) else None
return wrapper | PypiClean |
/HBT_IP_Test-1.0.1-py3-none-any.whl/HBT_IP_Test/libs/IsomDevices_pb2.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import IsomStdDef_pb2 as IsomStdDef__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='IsomDevices.proto',
package='Honeywell.Security.ISOM.Devices',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x11IsomDevices.proto\x12\x1fHoneywell.Security.ISOM.Devices\x1a\x10IsomStdDef.proto\"[\n\x10\x44\x65viceOperations\x12=\n\tresources\x18\x0b \x03(\x0e\x32*.Honeywell.Security.ISOM.Devices.Resources*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"c\n\x18\x44\x65viceSupportedRelations\x12=\n\trelations\x18\x0b \x03(\x0e\x32*.Honeywell.Security.ISOM.Devices.Relations*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"Q\n\x0c\x44\x65viceEvents\x12\x37\n\x06\x65vents\x18\x0b \x03(\x0e\x32\'.Honeywell.Security.ISOM.Devices.Events*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xad\x01\n\x0f\x44\x65viceOmitState\x12\n\n\x02id\x18\x0b \x01(\t\x12>\n\x05state\x18\x0c \x01(\x0e\x32/.Honeywell.Security.ISOM.Devices.DeviceOmitType\x12\x44\n\x0flastUpdatedTime\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"d\n\x13\x44\x65viceOmitStateList\x12\x43\n\tomitState\x18\x0b \x03(\x0b\x32\x30.Honeywell.Security.ISOM.Devices.DeviceOmitState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xff\x03\n\x0fReleaseTemplate\x12?\n\nstartDelay\x18\x0b \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDurationB\x04\x90\xb5\x18\x11\x12@\n\x0bpulsePeriod\x18\x0c \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDurationB\x04\x90\xb5\x18\x11\x12?\n\npulseWidth\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDurationB\x04\x90\xb5\x18\x11\x12\x12\n\npulseCount\x18\x0e \x01(\x04\x12\x15\n\rinvertedPulse\x18\x10 \x01(\x08\x12G\n\x12\x64\x65layBetweenTrains\x18\x11 \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDurationB\x04\x90\xb5\x18\x11\x12\x1d\n\x15pulseTrainRepeatCount\x18\x12 \x01(\x04\x12\x1a\n\x12holdStateOnRelease\x18\x13 \x01(\x08\x12\x18\n\x10startDelayInSecs\x18\x14 \x01(\x04\x12\x19\n\x11pulsePeriodInSecs\x18\x15 \x01(\x04\x12\x18\n\x10pulseWidthInSecs\x18\x16 \x01(\x04\x12 \n\x18\x64\x65layBetweenTrainsInSecs\x18\x17 \x01(\x04*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\x92\x01\n\x0eReleasePattern\x12\x14\n\x0ctemplateName\x18\x0b \x01(\t\x12\x16\n\x0etemplateDetail\x18\x0c \x01(\t\x12H\n\x0e\x63ustomTemplate\x18\r \x01(\x0b\x32\x30.Honeywell.Security.ISOM.Devices.ReleaseTemplate*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xb1\x01\n\x11\x44\x65viceBypassState\x12\n\n\x02id\x18\x0b \x01(\t\x12@\n\x05state\x18\x0c \x01(\x0e\x32\x31.Honeywell.Security.ISOM.Devices.DeviceBypassType\x12\x44\n\x0flastUpdatedTime\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"j\n\x15\x44\x65viceBypassStateList\x12G\n\x0b\x62ypassState\x18\x0b \x03(\x0b\x32\x32.Honeywell.Security.ISOM.Devices.DeviceBypassState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xd8\x01\n\x12\x44\x65viceReleaseState\x12\n\n\x02id\x18\x0b \x01(\t\x12\x41\n\x05state\x18\x0c \x01(\x0e\x32\x32.Honeywell.Security.ISOM.Devices.DeviceReleaseType\x12\x14\n\x0cpresentValue\x18\r \x01(\t\x12\r\n\x05units\x18\x0e \x01(\t\x12\x44\n\x0flastUpdatedTime\x18\x0f \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"m\n\x16\x44\x65viceReleaseStateList\x12I\n\x0creleaseState\x18\x0b \x03(\x0b\x32\x33.Honeywell.Security.ISOM.Devices.DeviceReleaseState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xb3\x01\n\x12\x44\x65viceTroubleState\x12\n\n\x02id\x18\x0b \x01(\t\x12\x41\n\x05state\x18\x0c \x01(\x0e\x32\x32.Honeywell.Security.ISOM.Devices.DeviceTroubleType\x12\x44\n\x0flastUpdatedTime\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"m\n\x16\x44\x65viceTroubleStateList\x12I\n\x0ctroubleState\x18\x0b \x03(\x0b\x32\x33.Honeywell.Security.ISOM.Devices.DeviceTroubleState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\x9c\x02\n\x11\x44\x65viceTamperState\x12\n\n\x02id\x18\x0b \x01(\t\x12@\n\x05state\x18\x0c \x01(\x0e\x32\x31.Honeywell.Security.ISOM.Devices.DeviceTamperType\x12\x33\n\x0bopenCircuit\x18\r \x01(\x0e\x32\x1e.Honeywell.Security.ISOM.State\x12\x34\n\x0c\x63loseCircuit\x18\x0e \x01(\x0e\x32\x1e.Honeywell.Security.ISOM.State\x12\x44\n\x0flastUpdatedTime\x18\x0f \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"j\n\x15\x44\x65viceTamperStateList\x12G\n\x0btamperState\x18\x0b \x03(\x0b\x32\x32.Honeywell.Security.ISOM.Devices.DeviceTamperState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xb7\x01\n\x14\x44\x65viceDiscoveryState\x12\n\n\x02id\x18\x0b \x01(\t\x12\x43\n\x05state\x18\x0c \x01(\x0e\x32\x34.Honeywell.Security.ISOM.Devices.DeviceDiscoveryType\x12\x44\n\x0flastUpdatedTime\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"s\n\x18\x44\x65viceDiscoveryStateList\x12M\n\x0e\x64iscoveryState\x18\x0b \x03(\x0b\x32\x35.Honeywell.Security.ISOM.Devices.DeviceDiscoveryState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xbb\x01\n\x16\x44\x65viceSupervisionState\x12\n\n\x02id\x18\x0b \x01(\t\x12\x45\n\x05state\x18\x0c \x01(\x0e\x32\x36.Honeywell.Security.ISOM.Devices.DeviceSuperVisionType\x12\x44\n\x0flastUpdatedTime\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"t\n\x1a\x44\x65viceSupervisionStateList\x12L\n\x0bsupervision\x18\x0b \x03(\x0b\x32\x37.Honeywell.Security.ISOM.Devices.DeviceSupervisionState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\x9c\x01\n\x0f\x44\x65viceTestState\x12\n\n\x02ID\x18\x0b \x01(\t\x12-\n\x05state\x18\x0c \x01(\x0e\x32\x1e.Honeywell.Security.ISOM.State\x12\x44\n\x0flastUpdatedTime\x18\r \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDateTimeB\x04\x90\xb5\x18\r*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"d\n\x13\x44\x65viceTestStateList\x12\x43\n\ttestState\x18\x0b \x03(\x0b\x32\x30.Honeywell.Security.ISOM.Devices.DeviceTestState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xbc\x05\n\x0b\x44\x65viceState\x12\n\n\x02id\x18\x0b \x01(\t\x12I\n\x0ctroubleState\x18\r \x01(\x0b\x32\x33.Honeywell.Security.ISOM.Devices.DeviceTroubleState\x12I\n\x0creleaseState\x18\x0e \x01(\x0b\x32\x33.Honeywell.Security.ISOM.Devices.DeviceReleaseState\x12\x43\n\tomitState\x18\x0f \x01(\x0b\x32\x30.Honeywell.Security.ISOM.Devices.DeviceOmitState\x12G\n\x0b\x62ypassState\x18\x10 \x01(\x0b\x32\x32.Honeywell.Security.ISOM.Devices.DeviceBypassState\x12G\n\x0btamperState\x18\x11 \x01(\x0b\x32\x32.Honeywell.Security.ISOM.Devices.DeviceTamperState\x12Q\n\x10supervisionState\x18\x13 \x01(\x0b\x32\x37.Honeywell.Security.ISOM.Devices.DeviceSupervisionState\x12M\n\x0e\x64iscoveryState\x18\x15 \x01(\x0b\x32\x35.Honeywell.Security.ISOM.Devices.DeviceDiscoveryState\x12\x43\n\ttestState\x18\x16 \x01(\x0b\x32\x30.Honeywell.Security.ISOM.Devices.DeviceTestState\x12\x43\n\x0b\x63onfigState\x18\x17 \x01(\x0b\x32..Honeywell.Security.ISOM.IsomEntityConfigState*\x08\x08\x80\xea\x30\x10\xe0\x91\x43\"X\n\x0f\x44\x65viceStateList\x12;\n\x05state\x18\x0b \x03(\x0b\x32,.Honeywell.Security.ISOM.Devices.DeviceState*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\x92\x01\n\x11\x44\x65viceIdentifiers\x12\n\n\x02id\x18\x0b \x01(\t\x12\x0c\n\x04guid\x18\x0c \x01(\t\x12\x0c\n\x04name\x18\r \x01(\t\x12>\n\x0b\x64\x65scription\x18\x0e \x01(\x0b\x32#.Honeywell.Security.ISOM.IsomStringB\x04\x90\xb5\x18\x13\x12\x0b\n\x03tag\x18\x0f \x03(\t*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"j\n\x15\x44\x65viceIdentifiersList\x12G\n\x0bidentifiers\x18\x0b \x03(\x0b\x32\x32.Honeywell.Security.ISOM.Devices.DeviceIdentifiers*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"r\n\x0e\x44\x65viceRelation\x12\n\n\x02id\x18\x0b \x01(\t\x12\x38\n\x04name\x18\x0c \x01(\x0e\x32*.Honeywell.Security.ISOM.Devices.Relations\x12\x10\n\x08\x65ntityId\x18\x0e \x01(\t*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"a\n\x12\x44\x65viceRelationList\x12\x41\n\x08relation\x18\x0b \x03(\x0b\x32/.Honeywell.Security.ISOM.Devices.DeviceRelation*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\xb6\x03\n\x0c\x44\x65viceConfig\x12G\n\x0bidentifiers\x18\x0b \x01(\x0b\x32\x32.Honeywell.Security.ISOM.Devices.DeviceIdentifiers\x12\x41\n\x08relation\x18\n \x03(\x0b\x32/.Honeywell.Security.ISOM.Devices.DeviceRelation\x12\x39\n\x04type\x18\x14 \x01(\x0e\x32+.Honeywell.Security.ISOM.Devices.DeviceType\x12=\n\x04omit\x18\x15 \x01(\x0e\x32/.Honeywell.Security.ISOM.Devices.DeviceOmitType\x12:\n\x07subType\x18\x17 \x01(\x0b\x32#.Honeywell.Security.ISOM.IsomStringB\x04\x90\xb5\x18\x13\x12H\n\x13supervisionInterval\x18\x18 \x01(\x0b\x32%.Honeywell.Security.ISOM.IsomDurationB\x04\x90\xb5\x18\x11\x12\x10\n\x08\x62\x65\x65pMode\x18\x19 \x01(\x08*\x08\x08\x80\xea\x30\x10\xe0\x91\x43\"[\n\x10\x44\x65viceConfigList\x12=\n\x06\x63onfig\x18\x0c \x03(\x0b\x32-.Honeywell.Security.ISOM.Devices.DeviceConfig*\x08\x08\xc0\x84=\x10\xe0\x91\x43\"\x94\x01\n\x0c\x44\x65viceEntity\x12=\n\x06\x63onfig\x18\x15 \x01(\x0b\x32-.Honeywell.Security.ISOM.Devices.DeviceConfig\x12;\n\x05state\x18\x1f \x01(\x0b\x32,.Honeywell.Security.ISOM.Devices.DeviceState*\x08\x08\xa0\xf7\x36\x10\xe0\x91\x43\"[\n\x10\x44\x65viceEntityList\x12=\n\x06\x65ntity\x18\x0b \x03(\x0b\x32-.Honeywell.Security.ISOM.Devices.DeviceEntity*\x08\x08\xc0\x84=\x10\xe0\x91\x43*\xcb\x04\n\tResources\x12\x18\n\x13supportedOperations\x10\xf2\x07\x12\x17\n\x12supportedRelations\x10\xf3\x07\x12\x14\n\x0fsupportedEvents\x10\xf4\x07\x12\x1a\n\x15supportedCapabilities\x10\xf5\x07\x12\x0f\n\nfullEntity\x10\xc2N\x12\t\n\x04info\x10\xc3N\x12\n\n\x06\x63onfig\x10\x04\x12\x0f\n\x0bidentifiers\x10\x44\x12\x0e\n\trelations\x10\xffN\x12\x11\n\x0ctroubleState\x10\xbbO\x12\x11\n\x0creleaseState\x10\xcfO\x12\x19\n\x14releaseState_s_clear\x10\xd0O\x12\x1a\n\x15releaseState_s_normal\x10\xd1O\x12\x1b\n\x16releaseState_s_release\x10\xd2O\x12\x1a\n\x15releaseState_s_toggle\x10\xd3O\x12\x0e\n\tomitState\x10\xe3O\x12\x10\n\x0b\x62ypassState\x10\x8bP\x12\x16\n\x11\x64iscovery_s_state\x10\xb3P\x12\t\n\x05state\x10 \x12\x15\n\x10omitState_s_omit\x10\x86R\x12\x17\n\x12omitState_s_unOmit\x10\x87R\x12\x19\n\x14\x62ypassState_s_normal\x10\x88R\x12\x19\n\x14\x62ypassState_s_bypass\x10\x89R\x12\x1a\n\x15omitState_s_timedOmit\x10\x8aR\x12\x1c\n\x17omitState_s_timedUnomit\x10\x8bR\x12\x15\n\rMax_Resources\x10\x80\x80\x80\x80\x04*\x98\x03\n\tRelations\x12\x1f\n\x1a\x44\x65viceAssignedToPeripheral\x10\xc3N\x12\x1f\n\x1a\x44\x65viceConnectedToInterface\x10\xc4N\x12%\n DeviceAssignedToDeviceCollection\x10\xc5N\x12\"\n\x1d\x44\x65viceAssignedToDetectorGroup\x10\xc6N\x12\x1d\n\x18\x44\x65viceAssignedCredential\x10\xc7N\x12\x19\n\x14\x44\x65viceOwnedByAccount\x10\xc8N\x12\x1a\n\x15\x44\x65viceAssignedToMacro\x10\xc9N\x12#\n\x1e\x44\x65viceAssignedCredentialHolder\x10\xcaN\x12\x1a\n\x15\x44\x65viceAssociatedInput\x10\xcbN\x12\x1b\n\x16\x44\x65viceAssociatedOutput\x10\xccN\x12\x1b\n\x16\x44\x65viceOwnedByPartition\x10\xcdN\x12\x16\n\x11\x44\x65viceOwnedBySite\x10\xceN\x12\x15\n\rMax_Relations\x10\x80\x80\x80\x80\x04*\xea\x02\n\x06\x45vents\x12\x11\n\x0c\x63onfig_p_add\x10\x9aN\x12\x14\n\x0f\x63onfig_p_modify\x10\x9bN\x12\x14\n\x0f\x63onfig_p_delete\x10\x9cN\x12\x17\n\x12omitState_p_unOmit\x10\xf4N\x12\x15\n\x10omitState_p_omit\x10\xf5N\x12\x19\n\x14\x62ypassState_p_normal\x10\xcdS\x12\x19\n\x14\x62ypassState_p_bypass\x10\xceS\x12\x19\n\x14releaseState_p_clear\x10\xb1T\x12\x1a\n\x15releaseState_p_normal\x10\xb2T\x12\x1b\n\x16releaseState_p_release\x10\xb3T\x12\x1a\n\x15releaseState_p_toggle\x10\xb4T\x12\x1b\n\x16troubleState_p_trouble\x10\x95U\x12\x1a\n\x15troubleState_p_normal\x10\x96U\x12\x12\n\nMax_Events\x10\x80\x80\x80\x80\x04*B\n\x0e\x44\x65viceOmitType\x12\n\n\x06unOmit\x10\x0b\x12\x08\n\x04omit\x10\x0c\x12\x1a\n\x12Max_DeviceOmitType\x10\x80\x80\x80\x80\x04*Y\n\x10\x44\x65viceBypassType\x12\x1b\n\x17\x44\x65viceBypassType_normal\x10\x0b\x12\n\n\x06\x62ypass\x10\x0c\x12\x1c\n\x14Max_DeviceBypassType\x10\x80\x80\x80\x80\x04*]\n\x11\x44\x65viceReleaseType\x12\x1c\n\x18\x44\x65viceReleaseType_normal\x10\x0b\x12\x0b\n\x07release\x10\x0c\x12\x1d\n\x15Max_DeviceReleaseType\x10\x80\x80\x80\x80\x04*]\n\x11\x44\x65viceTroubleType\x12\x1c\n\x18\x44\x65viceTroubleType_normal\x10\x0b\x12\x0b\n\x07trouble\x10\x0c\x12\x1d\n\x15Max_DeviceTroubleType\x10\x80\x80\x80\x80\x04*Y\n\x10\x44\x65viceTamperType\x12\x1b\n\x17\x44\x65viceTamperType_normal\x10\x0b\x12\n\n\x06tamper\x10\x0c\x12\x1c\n\x14Max_DeviceTamperType\x10\x80\x80\x80\x80\x04*Q\n\x13\x44\x65viceDiscoveryType\x12\x0b\n\x07\x65nabled\x10\x0b\x12\x0c\n\x08\x64isabled\x10\x0c\x12\x1f\n\x17Max_DeviceDiscoveryType\x10\x80\x80\x80\x80\x04*f\n\x15\x44\x65viceSuperVisionType\x12 \n\x1c\x44\x65viceSuperVisionType_normal\x10\x0b\x12\x08\n\x04\x66\x61il\x10\x0c\x12!\n\x19Max_DeviceSuperVisionType\x10\x80\x80\x80\x80\x04*\x95\x02\n\nDeviceType\x12\t\n\x05Input\x10\x0b\x12\x0c\n\x08\x43OSensor\x10\x0c\x12\x0e\n\nGlassBreak\x10(\x12\x07\n\x03PIR\x10\x0f\x12\x11\n\rSmokeDetector\x10\x10\x12\x15\n\x11TemperatureSensor\x10\x17\x12\r\n\tKeySwitch\x10\x1d\x12\n\n\x06Output\x10\x11\x12\x0b\n\x07\x46lasher\x10\x12\x12\t\n\x05Relay\x10\x13\x12\x07\n\x03LED\x10\x14\x12\x0b\n\x07Sounder\x10\x15\x12\n\n\x06KeyPad\x10\x0e\x12\n\n\x06KeyFob\x10\'\x12\x08\n\x04\x44oor\x10\x16\x12\n\n\x06\x43\x61mera\x10\x18\x12\x0c\n\x08Recorder\x10\x19\x12\x0e\n\nThermostat\x10)\x12\x16\n\x0eMax_DeviceType\x10\x80\x80\x80\x80\x04')
,
dependencies=[IsomStdDef__pb2.DESCRIPTOR,])
_RESOURCES = _descriptor.EnumDescriptor(
name='Resources',
full_name='Honeywell.Security.ISOM.Devices.Resources',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='supportedOperations', index=0, number=1010,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='supportedRelations', index=1, number=1011,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='supportedEvents', index=2, number=1012,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='supportedCapabilities', index=3, number=1013,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='fullEntity', index=4, number=10050,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='info', index=5, number=10051,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='config', index=6, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='identifiers', index=7, number=68,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='relations', index=8, number=10111,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='troubleState', index=9, number=10171,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState', index=10, number=10191,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_s_clear', index=11, number=10192,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_s_normal', index=12, number=10193,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_s_release', index=13, number=10194,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_s_toggle', index=14, number=10195,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState', index=15, number=10211,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='bypassState', index=16, number=10251,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='discovery_s_state', index=17, number=10291,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='state', index=18, number=32,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState_s_omit', index=19, number=10502,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState_s_unOmit', index=20, number=10503,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='bypassState_s_normal', index=21, number=10504,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='bypassState_s_bypass', index=22, number=10505,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState_s_timedOmit', index=23, number=10506,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState_s_timedUnomit', index=24, number=10507,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_Resources', index=25, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=5512,
serialized_end=6099,
)
_sym_db.RegisterEnumDescriptor(_RESOURCES)
Resources = enum_type_wrapper.EnumTypeWrapper(_RESOURCES)
_RELATIONS = _descriptor.EnumDescriptor(
name='Relations',
full_name='Honeywell.Security.ISOM.Devices.Relations',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DeviceAssignedToPeripheral', index=0, number=10051,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceConnectedToInterface', index=1, number=10052,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssignedToDeviceCollection', index=2, number=10053,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssignedToDetectorGroup', index=3, number=10054,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssignedCredential', index=4, number=10055,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceOwnedByAccount', index=5, number=10056,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssignedToMacro', index=6, number=10057,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssignedCredentialHolder', index=7, number=10058,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssociatedInput', index=8, number=10059,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceAssociatedOutput', index=9, number=10060,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceOwnedByPartition', index=10, number=10061,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DeviceOwnedBySite', index=11, number=10062,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_Relations', index=12, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=6102,
serialized_end=6510,
)
_sym_db.RegisterEnumDescriptor(_RELATIONS)
Relations = enum_type_wrapper.EnumTypeWrapper(_RELATIONS)
_EVENTS = _descriptor.EnumDescriptor(
name='Events',
full_name='Honeywell.Security.ISOM.Devices.Events',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='config_p_add', index=0, number=10010,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='config_p_modify', index=1, number=10011,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='config_p_delete', index=2, number=10012,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState_p_unOmit', index=3, number=10100,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omitState_p_omit', index=4, number=10101,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='bypassState_p_normal', index=5, number=10701,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='bypassState_p_bypass', index=6, number=10702,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_p_clear', index=7, number=10801,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_p_normal', index=8, number=10802,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_p_release', index=9, number=10803,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='releaseState_p_toggle', index=10, number=10804,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='troubleState_p_trouble', index=11, number=10901,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='troubleState_p_normal', index=12, number=10902,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_Events', index=13, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=6513,
serialized_end=6875,
)
_sym_db.RegisterEnumDescriptor(_EVENTS)
Events = enum_type_wrapper.EnumTypeWrapper(_EVENTS)
_DEVICEOMITTYPE = _descriptor.EnumDescriptor(
name='DeviceOmitType',
full_name='Honeywell.Security.ISOM.Devices.DeviceOmitType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='unOmit', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='omit', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceOmitType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=6877,
serialized_end=6943,
)
_sym_db.RegisterEnumDescriptor(_DEVICEOMITTYPE)
DeviceOmitType = enum_type_wrapper.EnumTypeWrapper(_DEVICEOMITTYPE)
_DEVICEBYPASSTYPE = _descriptor.EnumDescriptor(
name='DeviceBypassType',
full_name='Honeywell.Security.ISOM.Devices.DeviceBypassType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DeviceBypassType_normal', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='bypass', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceBypassType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=6945,
serialized_end=7034,
)
_sym_db.RegisterEnumDescriptor(_DEVICEBYPASSTYPE)
DeviceBypassType = enum_type_wrapper.EnumTypeWrapper(_DEVICEBYPASSTYPE)
_DEVICERELEASETYPE = _descriptor.EnumDescriptor(
name='DeviceReleaseType',
full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DeviceReleaseType_normal', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='release', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceReleaseType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7036,
serialized_end=7129,
)
_sym_db.RegisterEnumDescriptor(_DEVICERELEASETYPE)
DeviceReleaseType = enum_type_wrapper.EnumTypeWrapper(_DEVICERELEASETYPE)
_DEVICETROUBLETYPE = _descriptor.EnumDescriptor(
name='DeviceTroubleType',
full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DeviceTroubleType_normal', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='trouble', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceTroubleType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7131,
serialized_end=7224,
)
_sym_db.RegisterEnumDescriptor(_DEVICETROUBLETYPE)
DeviceTroubleType = enum_type_wrapper.EnumTypeWrapper(_DEVICETROUBLETYPE)
_DEVICETAMPERTYPE = _descriptor.EnumDescriptor(
name='DeviceTamperType',
full_name='Honeywell.Security.ISOM.Devices.DeviceTamperType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DeviceTamperType_normal', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='tamper', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceTamperType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7226,
serialized_end=7315,
)
_sym_db.RegisterEnumDescriptor(_DEVICETAMPERTYPE)
DeviceTamperType = enum_type_wrapper.EnumTypeWrapper(_DEVICETAMPERTYPE)
_DEVICEDISCOVERYTYPE = _descriptor.EnumDescriptor(
name='DeviceDiscoveryType',
full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='enabled', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='disabled', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceDiscoveryType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7317,
serialized_end=7398,
)
_sym_db.RegisterEnumDescriptor(_DEVICEDISCOVERYTYPE)
DeviceDiscoveryType = enum_type_wrapper.EnumTypeWrapper(_DEVICEDISCOVERYTYPE)
_DEVICESUPERVISIONTYPE = _descriptor.EnumDescriptor(
name='DeviceSuperVisionType',
full_name='Honeywell.Security.ISOM.Devices.DeviceSuperVisionType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DeviceSuperVisionType_normal', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='fail', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceSuperVisionType', index=2, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7400,
serialized_end=7502,
)
_sym_db.RegisterEnumDescriptor(_DEVICESUPERVISIONTYPE)
DeviceSuperVisionType = enum_type_wrapper.EnumTypeWrapper(_DEVICESUPERVISIONTYPE)
_DEVICETYPE = _descriptor.EnumDescriptor(
name='DeviceType',
full_name='Honeywell.Security.ISOM.Devices.DeviceType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='Input', index=0, number=11,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COSensor', index=1, number=12,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GlassBreak', index=2, number=40,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PIR', index=3, number=15,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SmokeDetector', index=4, number=16,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TemperatureSensor', index=5, number=23,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KeySwitch', index=6, number=29,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Output', index=7, number=17,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Flasher', index=8, number=18,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Relay', index=9, number=19,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LED', index=10, number=20,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Sounder', index=11, number=21,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KeyPad', index=12, number=14,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KeyFob', index=13, number=39,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Door', index=14, number=22,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Camera', index=15, number=24,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Recorder', index=16, number=25,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Thermostat', index=17, number=41,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Max_DeviceType', index=18, number=1073741824,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7505,
serialized_end=7782,
)
_sym_db.RegisterEnumDescriptor(_DEVICETYPE)
DeviceType = enum_type_wrapper.EnumTypeWrapper(_DEVICETYPE)
supportedOperations = 1010
supportedRelations = 1011
supportedEvents = 1012
supportedCapabilities = 1013
fullEntity = 10050
info = 10051
config = 4
identifiers = 68
relations = 10111
troubleState = 10171
releaseState = 10191
releaseState_s_clear = 10192
releaseState_s_normal = 10193
releaseState_s_release = 10194
releaseState_s_toggle = 10195
omitState = 10211
bypassState = 10251
discovery_s_state = 10291
state = 32
omitState_s_omit = 10502
omitState_s_unOmit = 10503
bypassState_s_normal = 10504
bypassState_s_bypass = 10505
omitState_s_timedOmit = 10506
omitState_s_timedUnomit = 10507
Max_Resources = 1073741824
DeviceAssignedToPeripheral = 10051
DeviceConnectedToInterface = 10052
DeviceAssignedToDeviceCollection = 10053
DeviceAssignedToDetectorGroup = 10054
DeviceAssignedCredential = 10055
DeviceOwnedByAccount = 10056
DeviceAssignedToMacro = 10057
DeviceAssignedCredentialHolder = 10058
DeviceAssociatedInput = 10059
DeviceAssociatedOutput = 10060
DeviceOwnedByPartition = 10061
DeviceOwnedBySite = 10062
Max_Relations = 1073741824
config_p_add = 10010
config_p_modify = 10011
config_p_delete = 10012
omitState_p_unOmit = 10100
omitState_p_omit = 10101
bypassState_p_normal = 10701
bypassState_p_bypass = 10702
releaseState_p_clear = 10801
releaseState_p_normal = 10802
releaseState_p_release = 10803
releaseState_p_toggle = 10804
troubleState_p_trouble = 10901
troubleState_p_normal = 10902
Max_Events = 1073741824
unOmit = 11
omit = 12
Max_DeviceOmitType = 1073741824
DeviceBypassType_normal = 11
bypass = 12
Max_DeviceBypassType = 1073741824
DeviceReleaseType_normal = 11
release = 12
Max_DeviceReleaseType = 1073741824
DeviceTroubleType_normal = 11
trouble = 12
Max_DeviceTroubleType = 1073741824
DeviceTamperType_normal = 11
tamper = 12
Max_DeviceTamperType = 1073741824
enabled = 11
disabled = 12
Max_DeviceDiscoveryType = 1073741824
DeviceSuperVisionType_normal = 11
fail = 12
Max_DeviceSuperVisionType = 1073741824
Input = 11
COSensor = 12
GlassBreak = 40
PIR = 15
SmokeDetector = 16
TemperatureSensor = 23
KeySwitch = 29
Output = 17
Flasher = 18
Relay = 19
LED = 20
Sounder = 21
KeyPad = 14
KeyFob = 39
Door = 22
Camera = 24
Recorder = 25
Thermostat = 41
Max_DeviceType = 1073741824
_DEVICEOPERATIONS = _descriptor.Descriptor(
name='DeviceOperations',
full_name='Honeywell.Security.ISOM.Devices.DeviceOperations',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resources', full_name='Honeywell.Security.ISOM.Devices.DeviceOperations.resources', index=0,
number=11, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=72,
serialized_end=163,
)
_DEVICESUPPORTEDRELATIONS = _descriptor.Descriptor(
name='DeviceSupportedRelations',
full_name='Honeywell.Security.ISOM.Devices.DeviceSupportedRelations',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='relations', full_name='Honeywell.Security.ISOM.Devices.DeviceSupportedRelations.relations', index=0,
number=11, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=165,
serialized_end=264,
)
_DEVICEEVENTS = _descriptor.Descriptor(
name='DeviceEvents',
full_name='Honeywell.Security.ISOM.Devices.DeviceEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='events', full_name='Honeywell.Security.ISOM.Devices.DeviceEvents.events', index=0,
number=11, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=266,
serialized_end=347,
)
_DEVICEOMITSTATE = _descriptor.Descriptor(
name='DeviceOmitState',
full_name='Honeywell.Security.ISOM.Devices.DeviceOmitState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceOmitState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceOmitState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceOmitState.lastUpdatedTime', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=350,
serialized_end=523,
)
_DEVICEOMITSTATELIST = _descriptor.Descriptor(
name='DeviceOmitStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceOmitStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='omitState', full_name='Honeywell.Security.ISOM.Devices.DeviceOmitStateList.omitState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=525,
serialized_end=625,
)
_RELEASETEMPLATE = _descriptor.Descriptor(
name='ReleaseTemplate',
full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='startDelay', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.startDelay', index=0,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\021'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pulsePeriod', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.pulsePeriod', index=1,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\021'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pulseWidth', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.pulseWidth', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\021'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pulseCount', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.pulseCount', index=3,
number=14, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='invertedPulse', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.invertedPulse', index=4,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delayBetweenTrains', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.delayBetweenTrains', index=5,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\021'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pulseTrainRepeatCount', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.pulseTrainRepeatCount', index=6,
number=18, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='holdStateOnRelease', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.holdStateOnRelease', index=7,
number=19, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='startDelayInSecs', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.startDelayInSecs', index=8,
number=20, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pulsePeriodInSecs', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.pulsePeriodInSecs', index=9,
number=21, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pulseWidthInSecs', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.pulseWidthInSecs', index=10,
number=22, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delayBetweenTrainsInSecs', full_name='Honeywell.Security.ISOM.Devices.ReleaseTemplate.delayBetweenTrainsInSecs', index=11,
number=23, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=628,
serialized_end=1139,
)
_RELEASEPATTERN = _descriptor.Descriptor(
name='ReleasePattern',
full_name='Honeywell.Security.ISOM.Devices.ReleasePattern',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='templateName', full_name='Honeywell.Security.ISOM.Devices.ReleasePattern.templateName', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='templateDetail', full_name='Honeywell.Security.ISOM.Devices.ReleasePattern.templateDetail', index=1,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customTemplate', full_name='Honeywell.Security.ISOM.Devices.ReleasePattern.customTemplate', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=1142,
serialized_end=1288,
)
_DEVICEBYPASSSTATE = _descriptor.Descriptor(
name='DeviceBypassState',
full_name='Honeywell.Security.ISOM.Devices.DeviceBypassState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceBypassState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceBypassState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceBypassState.lastUpdatedTime', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=1291,
serialized_end=1468,
)
_DEVICEBYPASSSTATELIST = _descriptor.Descriptor(
name='DeviceBypassStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceBypassStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bypassState', full_name='Honeywell.Security.ISOM.Devices.DeviceBypassStateList.bypassState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=1470,
serialized_end=1576,
)
_DEVICERELEASESTATE = _descriptor.Descriptor(
name='DeviceReleaseState',
full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='presentValue', full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseState.presentValue', index=2,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseState.units', index=3,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseState.lastUpdatedTime', index=4,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=1579,
serialized_end=1795,
)
_DEVICERELEASESTATELIST = _descriptor.Descriptor(
name='DeviceReleaseStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='releaseState', full_name='Honeywell.Security.ISOM.Devices.DeviceReleaseStateList.releaseState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=1797,
serialized_end=1906,
)
_DEVICETROUBLESTATE = _descriptor.Descriptor(
name='DeviceTroubleState',
full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleState.lastUpdatedTime', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=1909,
serialized_end=2088,
)
_DEVICETROUBLESTATELIST = _descriptor.Descriptor(
name='DeviceTroubleStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='troubleState', full_name='Honeywell.Security.ISOM.Devices.DeviceTroubleStateList.troubleState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=2090,
serialized_end=2199,
)
_DEVICETAMPERSTATE = _descriptor.Descriptor(
name='DeviceTamperState',
full_name='Honeywell.Security.ISOM.Devices.DeviceTamperState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceTamperState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceTamperState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='openCircuit', full_name='Honeywell.Security.ISOM.Devices.DeviceTamperState.openCircuit', index=2,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closeCircuit', full_name='Honeywell.Security.ISOM.Devices.DeviceTamperState.closeCircuit', index=3,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceTamperState.lastUpdatedTime', index=4,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=2202,
serialized_end=2486,
)
_DEVICETAMPERSTATELIST = _descriptor.Descriptor(
name='DeviceTamperStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceTamperStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tamperState', full_name='Honeywell.Security.ISOM.Devices.DeviceTamperStateList.tamperState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=2488,
serialized_end=2594,
)
_DEVICEDISCOVERYSTATE = _descriptor.Descriptor(
name='DeviceDiscoveryState',
full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryState.lastUpdatedTime', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=2597,
serialized_end=2780,
)
_DEVICEDISCOVERYSTATELIST = _descriptor.Descriptor(
name='DeviceDiscoveryStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='discoveryState', full_name='Honeywell.Security.ISOM.Devices.DeviceDiscoveryStateList.discoveryState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=2782,
serialized_end=2897,
)
_DEVICESUPERVISIONSTATE = _descriptor.Descriptor(
name='DeviceSupervisionState',
full_name='Honeywell.Security.ISOM.Devices.DeviceSupervisionState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceSupervisionState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceSupervisionState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceSupervisionState.lastUpdatedTime', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=2900,
serialized_end=3087,
)
_DEVICESUPERVISIONSTATELIST = _descriptor.Descriptor(
name='DeviceSupervisionStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceSupervisionStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='supervision', full_name='Honeywell.Security.ISOM.Devices.DeviceSupervisionStateList.supervision', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=3089,
serialized_end=3205,
)
_DEVICETESTSTATE = _descriptor.Descriptor(
name='DeviceTestState',
full_name='Honeywell.Security.ISOM.Devices.DeviceTestState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ID', full_name='Honeywell.Security.ISOM.Devices.DeviceTestState.ID', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceTestState.state', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastUpdatedTime', full_name='Honeywell.Security.ISOM.Devices.DeviceTestState.lastUpdatedTime', index=2,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\r'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=3208,
serialized_end=3364,
)
_DEVICETESTSTATELIST = _descriptor.Descriptor(
name='DeviceTestStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceTestStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='testState', full_name='Honeywell.Security.ISOM.Devices.DeviceTestStateList.testState', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=3366,
serialized_end=3466,
)
_DEVICESTATE = _descriptor.Descriptor(
name='DeviceState',
full_name='Honeywell.Security.ISOM.Devices.DeviceState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceState.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='troubleState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.troubleState', index=1,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='releaseState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.releaseState', index=2,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='omitState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.omitState', index=3,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bypassState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.bypassState', index=4,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tamperState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.tamperState', index=5,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supervisionState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.supervisionState', index=6,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='discoveryState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.discoveryState', index=7,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='testState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.testState', index=8,
number=22, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configState', full_name='Honeywell.Security.ISOM.Devices.DeviceState.configState', index=9,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(800000, 1100000), ],
oneofs=[
],
serialized_start=3469,
serialized_end=4169,
)
_DEVICESTATELIST = _descriptor.Descriptor(
name='DeviceStateList',
full_name='Honeywell.Security.ISOM.Devices.DeviceStateList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceStateList.state', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=4171,
serialized_end=4259,
)
_DEVICEIDENTIFIERS = _descriptor.Descriptor(
name='DeviceIdentifiers',
full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiers',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiers.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='guid', full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiers.guid', index=1,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiers.name', index=2,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiers.description', index=3,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\023'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tag', full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiers.tag', index=4,
number=15, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=4262,
serialized_end=4408,
)
_DEVICEIDENTIFIERSLIST = _descriptor.Descriptor(
name='DeviceIdentifiersList',
full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiersList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identifiers', full_name='Honeywell.Security.ISOM.Devices.DeviceIdentifiersList.identifiers', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=4410,
serialized_end=4516,
)
_DEVICERELATION = _descriptor.Descriptor(
name='DeviceRelation',
full_name='Honeywell.Security.ISOM.Devices.DeviceRelation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Honeywell.Security.ISOM.Devices.DeviceRelation.id', index=0,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='Honeywell.Security.ISOM.Devices.DeviceRelation.name', index=1,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=10051,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='entityId', full_name='Honeywell.Security.ISOM.Devices.DeviceRelation.entityId', index=2,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=4518,
serialized_end=4632,
)
_DEVICERELATIONLIST = _descriptor.Descriptor(
name='DeviceRelationList',
full_name='Honeywell.Security.ISOM.Devices.DeviceRelationList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='relation', full_name='Honeywell.Security.ISOM.Devices.DeviceRelationList.relation', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=4634,
serialized_end=4731,
)
_DEVICECONFIG = _descriptor.Descriptor(
name='DeviceConfig',
full_name='Honeywell.Security.ISOM.Devices.DeviceConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identifiers', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.identifiers', index=0,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='relation', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.relation', index=1,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.type', index=2,
number=20, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='omit', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.omit', index=3,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=11,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subType', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.subType', index=4,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\023'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supervisionInterval', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.supervisionInterval', index=5,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\220\265\030\021'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='beepMode', full_name='Honeywell.Security.ISOM.Devices.DeviceConfig.beepMode', index=6,
number=25, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(800000, 1100000), ],
oneofs=[
],
serialized_start=4734,
serialized_end=5172,
)
_DEVICECONFIGLIST = _descriptor.Descriptor(
name='DeviceConfigList',
full_name='Honeywell.Security.ISOM.Devices.DeviceConfigList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='config', full_name='Honeywell.Security.ISOM.Devices.DeviceConfigList.config', index=0,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=5174,
serialized_end=5265,
)
_DEVICEENTITY = _descriptor.Descriptor(
name='DeviceEntity',
full_name='Honeywell.Security.ISOM.Devices.DeviceEntity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='config', full_name='Honeywell.Security.ISOM.Devices.DeviceEntity.config', index=0,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='Honeywell.Security.ISOM.Devices.DeviceEntity.state', index=1,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(900000, 1100000), ],
oneofs=[
],
serialized_start=5268,
serialized_end=5416,
)
_DEVICEENTITYLIST = _descriptor.Descriptor(
name='DeviceEntityList',
full_name='Honeywell.Security.ISOM.Devices.DeviceEntityList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entity', full_name='Honeywell.Security.ISOM.Devices.DeviceEntityList.entity', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000000, 1100000), ],
oneofs=[
],
serialized_start=5418,
serialized_end=5509,
)
_DEVICEOPERATIONS.fields_by_name['resources'].enum_type = _RESOURCES
_DEVICESUPPORTEDRELATIONS.fields_by_name['relations'].enum_type = _RELATIONS
_DEVICEEVENTS.fields_by_name['events'].enum_type = _EVENTS
_DEVICEOMITSTATE.fields_by_name['state'].enum_type = _DEVICEOMITTYPE
_DEVICEOMITSTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICEOMITSTATELIST.fields_by_name['omitState'].message_type = _DEVICEOMITSTATE
_RELEASETEMPLATE.fields_by_name['startDelay'].message_type = IsomStdDef__pb2._ISOMDURATION
_RELEASETEMPLATE.fields_by_name['pulsePeriod'].message_type = IsomStdDef__pb2._ISOMDURATION
_RELEASETEMPLATE.fields_by_name['pulseWidth'].message_type = IsomStdDef__pb2._ISOMDURATION
_RELEASETEMPLATE.fields_by_name['delayBetweenTrains'].message_type = IsomStdDef__pb2._ISOMDURATION
_RELEASEPATTERN.fields_by_name['customTemplate'].message_type = _RELEASETEMPLATE
_DEVICEBYPASSSTATE.fields_by_name['state'].enum_type = _DEVICEBYPASSTYPE
_DEVICEBYPASSSTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICEBYPASSSTATELIST.fields_by_name['bypassState'].message_type = _DEVICEBYPASSSTATE
_DEVICERELEASESTATE.fields_by_name['state'].enum_type = _DEVICERELEASETYPE
_DEVICERELEASESTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICERELEASESTATELIST.fields_by_name['releaseState'].message_type = _DEVICERELEASESTATE
_DEVICETROUBLESTATE.fields_by_name['state'].enum_type = _DEVICETROUBLETYPE
_DEVICETROUBLESTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICETROUBLESTATELIST.fields_by_name['troubleState'].message_type = _DEVICETROUBLESTATE
_DEVICETAMPERSTATE.fields_by_name['state'].enum_type = _DEVICETAMPERTYPE
_DEVICETAMPERSTATE.fields_by_name['openCircuit'].enum_type = IsomStdDef__pb2._STATE
_DEVICETAMPERSTATE.fields_by_name['closeCircuit'].enum_type = IsomStdDef__pb2._STATE
_DEVICETAMPERSTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICETAMPERSTATELIST.fields_by_name['tamperState'].message_type = _DEVICETAMPERSTATE
_DEVICEDISCOVERYSTATE.fields_by_name['state'].enum_type = _DEVICEDISCOVERYTYPE
_DEVICEDISCOVERYSTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICEDISCOVERYSTATELIST.fields_by_name['discoveryState'].message_type = _DEVICEDISCOVERYSTATE
_DEVICESUPERVISIONSTATE.fields_by_name['state'].enum_type = _DEVICESUPERVISIONTYPE
_DEVICESUPERVISIONSTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICESUPERVISIONSTATELIST.fields_by_name['supervision'].message_type = _DEVICESUPERVISIONSTATE
_DEVICETESTSTATE.fields_by_name['state'].enum_type = IsomStdDef__pb2._STATE
_DEVICETESTSTATE.fields_by_name['lastUpdatedTime'].message_type = IsomStdDef__pb2._ISOMDATETIME
_DEVICETESTSTATELIST.fields_by_name['testState'].message_type = _DEVICETESTSTATE
_DEVICESTATE.fields_by_name['troubleState'].message_type = _DEVICETROUBLESTATE
_DEVICESTATE.fields_by_name['releaseState'].message_type = _DEVICERELEASESTATE
_DEVICESTATE.fields_by_name['omitState'].message_type = _DEVICEOMITSTATE
_DEVICESTATE.fields_by_name['bypassState'].message_type = _DEVICEBYPASSSTATE
_DEVICESTATE.fields_by_name['tamperState'].message_type = _DEVICETAMPERSTATE
_DEVICESTATE.fields_by_name['supervisionState'].message_type = _DEVICESUPERVISIONSTATE
_DEVICESTATE.fields_by_name['discoveryState'].message_type = _DEVICEDISCOVERYSTATE
_DEVICESTATE.fields_by_name['testState'].message_type = _DEVICETESTSTATE
_DEVICESTATE.fields_by_name['configState'].message_type = IsomStdDef__pb2._ISOMENTITYCONFIGSTATE
_DEVICESTATELIST.fields_by_name['state'].message_type = _DEVICESTATE
_DEVICEIDENTIFIERS.fields_by_name['description'].message_type = IsomStdDef__pb2._ISOMSTRING
_DEVICEIDENTIFIERSLIST.fields_by_name['identifiers'].message_type = _DEVICEIDENTIFIERS
_DEVICERELATION.fields_by_name['name'].enum_type = _RELATIONS
_DEVICERELATIONLIST.fields_by_name['relation'].message_type = _DEVICERELATION
_DEVICECONFIG.fields_by_name['identifiers'].message_type = _DEVICEIDENTIFIERS
_DEVICECONFIG.fields_by_name['relation'].message_type = _DEVICERELATION
_DEVICECONFIG.fields_by_name['type'].enum_type = _DEVICETYPE
_DEVICECONFIG.fields_by_name['omit'].enum_type = _DEVICEOMITTYPE
_DEVICECONFIG.fields_by_name['subType'].message_type = IsomStdDef__pb2._ISOMSTRING
_DEVICECONFIG.fields_by_name['supervisionInterval'].message_type = IsomStdDef__pb2._ISOMDURATION
_DEVICECONFIGLIST.fields_by_name['config'].message_type = _DEVICECONFIG
_DEVICEENTITY.fields_by_name['config'].message_type = _DEVICECONFIG
_DEVICEENTITY.fields_by_name['state'].message_type = _DEVICESTATE
_DEVICEENTITYLIST.fields_by_name['entity'].message_type = _DEVICEENTITY
DESCRIPTOR.message_types_by_name['DeviceOperations'] = _DEVICEOPERATIONS
DESCRIPTOR.message_types_by_name['DeviceSupportedRelations'] = _DEVICESUPPORTEDRELATIONS
DESCRIPTOR.message_types_by_name['DeviceEvents'] = _DEVICEEVENTS
DESCRIPTOR.message_types_by_name['DeviceOmitState'] = _DEVICEOMITSTATE
DESCRIPTOR.message_types_by_name['DeviceOmitStateList'] = _DEVICEOMITSTATELIST
DESCRIPTOR.message_types_by_name['ReleaseTemplate'] = _RELEASETEMPLATE
DESCRIPTOR.message_types_by_name['ReleasePattern'] = _RELEASEPATTERN
DESCRIPTOR.message_types_by_name['DeviceBypassState'] = _DEVICEBYPASSSTATE
DESCRIPTOR.message_types_by_name['DeviceBypassStateList'] = _DEVICEBYPASSSTATELIST
DESCRIPTOR.message_types_by_name['DeviceReleaseState'] = _DEVICERELEASESTATE
DESCRIPTOR.message_types_by_name['DeviceReleaseStateList'] = _DEVICERELEASESTATELIST
DESCRIPTOR.message_types_by_name['DeviceTroubleState'] = _DEVICETROUBLESTATE
DESCRIPTOR.message_types_by_name['DeviceTroubleStateList'] = _DEVICETROUBLESTATELIST
DESCRIPTOR.message_types_by_name['DeviceTamperState'] = _DEVICETAMPERSTATE
DESCRIPTOR.message_types_by_name['DeviceTamperStateList'] = _DEVICETAMPERSTATELIST
DESCRIPTOR.message_types_by_name['DeviceDiscoveryState'] = _DEVICEDISCOVERYSTATE
DESCRIPTOR.message_types_by_name['DeviceDiscoveryStateList'] = _DEVICEDISCOVERYSTATELIST
DESCRIPTOR.message_types_by_name['DeviceSupervisionState'] = _DEVICESUPERVISIONSTATE
DESCRIPTOR.message_types_by_name['DeviceSupervisionStateList'] = _DEVICESUPERVISIONSTATELIST
DESCRIPTOR.message_types_by_name['DeviceTestState'] = _DEVICETESTSTATE
DESCRIPTOR.message_types_by_name['DeviceTestStateList'] = _DEVICETESTSTATELIST
DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
DESCRIPTOR.message_types_by_name['DeviceStateList'] = _DEVICESTATELIST
DESCRIPTOR.message_types_by_name['DeviceIdentifiers'] = _DEVICEIDENTIFIERS
DESCRIPTOR.message_types_by_name['DeviceIdentifiersList'] = _DEVICEIDENTIFIERSLIST
DESCRIPTOR.message_types_by_name['DeviceRelation'] = _DEVICERELATION
DESCRIPTOR.message_types_by_name['DeviceRelationList'] = _DEVICERELATIONLIST
DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
DESCRIPTOR.message_types_by_name['DeviceConfigList'] = _DEVICECONFIGLIST
DESCRIPTOR.message_types_by_name['DeviceEntity'] = _DEVICEENTITY
DESCRIPTOR.message_types_by_name['DeviceEntityList'] = _DEVICEENTITYLIST
DESCRIPTOR.enum_types_by_name['Resources'] = _RESOURCES
DESCRIPTOR.enum_types_by_name['Relations'] = _RELATIONS
DESCRIPTOR.enum_types_by_name['Events'] = _EVENTS
DESCRIPTOR.enum_types_by_name['DeviceOmitType'] = _DEVICEOMITTYPE
DESCRIPTOR.enum_types_by_name['DeviceBypassType'] = _DEVICEBYPASSTYPE
DESCRIPTOR.enum_types_by_name['DeviceReleaseType'] = _DEVICERELEASETYPE
DESCRIPTOR.enum_types_by_name['DeviceTroubleType'] = _DEVICETROUBLETYPE
DESCRIPTOR.enum_types_by_name['DeviceTamperType'] = _DEVICETAMPERTYPE
DESCRIPTOR.enum_types_by_name['DeviceDiscoveryType'] = _DEVICEDISCOVERYTYPE
DESCRIPTOR.enum_types_by_name['DeviceSuperVisionType'] = _DEVICESUPERVISIONTYPE
DESCRIPTOR.enum_types_by_name['DeviceType'] = _DEVICETYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DeviceOperations = _reflection.GeneratedProtocolMessageType('DeviceOperations', (_message.Message,), {
'DESCRIPTOR' : _DEVICEOPERATIONS,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceOperations)
})
_sym_db.RegisterMessage(DeviceOperations)
DeviceSupportedRelations = _reflection.GeneratedProtocolMessageType('DeviceSupportedRelations', (_message.Message,), {
'DESCRIPTOR' : _DEVICESUPPORTEDRELATIONS,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceSupportedRelations)
})
_sym_db.RegisterMessage(DeviceSupportedRelations)
DeviceEvents = _reflection.GeneratedProtocolMessageType('DeviceEvents', (_message.Message,), {
'DESCRIPTOR' : _DEVICEEVENTS,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceEvents)
})
_sym_db.RegisterMessage(DeviceEvents)
DeviceOmitState = _reflection.GeneratedProtocolMessageType('DeviceOmitState', (_message.Message,), {
'DESCRIPTOR' : _DEVICEOMITSTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceOmitState)
})
_sym_db.RegisterMessage(DeviceOmitState)
DeviceOmitStateList = _reflection.GeneratedProtocolMessageType('DeviceOmitStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICEOMITSTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceOmitStateList)
})
_sym_db.RegisterMessage(DeviceOmitStateList)
ReleaseTemplate = _reflection.GeneratedProtocolMessageType('ReleaseTemplate', (_message.Message,), {
'DESCRIPTOR' : _RELEASETEMPLATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.ReleaseTemplate)
})
_sym_db.RegisterMessage(ReleaseTemplate)
ReleasePattern = _reflection.GeneratedProtocolMessageType('ReleasePattern', (_message.Message,), {
'DESCRIPTOR' : _RELEASEPATTERN,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.ReleasePattern)
})
_sym_db.RegisterMessage(ReleasePattern)
DeviceBypassState = _reflection.GeneratedProtocolMessageType('DeviceBypassState', (_message.Message,), {
'DESCRIPTOR' : _DEVICEBYPASSSTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceBypassState)
})
_sym_db.RegisterMessage(DeviceBypassState)
DeviceBypassStateList = _reflection.GeneratedProtocolMessageType('DeviceBypassStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICEBYPASSSTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceBypassStateList)
})
_sym_db.RegisterMessage(DeviceBypassStateList)
DeviceReleaseState = _reflection.GeneratedProtocolMessageType('DeviceReleaseState', (_message.Message,), {
'DESCRIPTOR' : _DEVICERELEASESTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceReleaseState)
})
_sym_db.RegisterMessage(DeviceReleaseState)
DeviceReleaseStateList = _reflection.GeneratedProtocolMessageType('DeviceReleaseStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICERELEASESTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceReleaseStateList)
})
_sym_db.RegisterMessage(DeviceReleaseStateList)
DeviceTroubleState = _reflection.GeneratedProtocolMessageType('DeviceTroubleState', (_message.Message,), {
'DESCRIPTOR' : _DEVICETROUBLESTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceTroubleState)
})
_sym_db.RegisterMessage(DeviceTroubleState)
DeviceTroubleStateList = _reflection.GeneratedProtocolMessageType('DeviceTroubleStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICETROUBLESTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceTroubleStateList)
})
_sym_db.RegisterMessage(DeviceTroubleStateList)
DeviceTamperState = _reflection.GeneratedProtocolMessageType('DeviceTamperState', (_message.Message,), {
'DESCRIPTOR' : _DEVICETAMPERSTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceTamperState)
})
_sym_db.RegisterMessage(DeviceTamperState)
DeviceTamperStateList = _reflection.GeneratedProtocolMessageType('DeviceTamperStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICETAMPERSTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceTamperStateList)
})
_sym_db.RegisterMessage(DeviceTamperStateList)
DeviceDiscoveryState = _reflection.GeneratedProtocolMessageType('DeviceDiscoveryState', (_message.Message,), {
'DESCRIPTOR' : _DEVICEDISCOVERYSTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceDiscoveryState)
})
_sym_db.RegisterMessage(DeviceDiscoveryState)
DeviceDiscoveryStateList = _reflection.GeneratedProtocolMessageType('DeviceDiscoveryStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICEDISCOVERYSTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceDiscoveryStateList)
})
_sym_db.RegisterMessage(DeviceDiscoveryStateList)
DeviceSupervisionState = _reflection.GeneratedProtocolMessageType('DeviceSupervisionState', (_message.Message,), {
'DESCRIPTOR' : _DEVICESUPERVISIONSTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceSupervisionState)
})
_sym_db.RegisterMessage(DeviceSupervisionState)
DeviceSupervisionStateList = _reflection.GeneratedProtocolMessageType('DeviceSupervisionStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICESUPERVISIONSTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceSupervisionStateList)
})
_sym_db.RegisterMessage(DeviceSupervisionStateList)
DeviceTestState = _reflection.GeneratedProtocolMessageType('DeviceTestState', (_message.Message,), {
'DESCRIPTOR' : _DEVICETESTSTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceTestState)
})
_sym_db.RegisterMessage(DeviceTestState)
DeviceTestStateList = _reflection.GeneratedProtocolMessageType('DeviceTestStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICETESTSTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceTestStateList)
})
_sym_db.RegisterMessage(DeviceTestStateList)
DeviceState = _reflection.GeneratedProtocolMessageType('DeviceState', (_message.Message,), {
'DESCRIPTOR' : _DEVICESTATE,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceState)
})
_sym_db.RegisterMessage(DeviceState)
DeviceStateList = _reflection.GeneratedProtocolMessageType('DeviceStateList', (_message.Message,), {
'DESCRIPTOR' : _DEVICESTATELIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceStateList)
})
_sym_db.RegisterMessage(DeviceStateList)
DeviceIdentifiers = _reflection.GeneratedProtocolMessageType('DeviceIdentifiers', (_message.Message,), {
'DESCRIPTOR' : _DEVICEIDENTIFIERS,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceIdentifiers)
})
_sym_db.RegisterMessage(DeviceIdentifiers)
DeviceIdentifiersList = _reflection.GeneratedProtocolMessageType('DeviceIdentifiersList', (_message.Message,), {
'DESCRIPTOR' : _DEVICEIDENTIFIERSLIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceIdentifiersList)
})
_sym_db.RegisterMessage(DeviceIdentifiersList)
DeviceRelation = _reflection.GeneratedProtocolMessageType('DeviceRelation', (_message.Message,), {
'DESCRIPTOR' : _DEVICERELATION,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceRelation)
})
_sym_db.RegisterMessage(DeviceRelation)
DeviceRelationList = _reflection.GeneratedProtocolMessageType('DeviceRelationList', (_message.Message,), {
'DESCRIPTOR' : _DEVICERELATIONLIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceRelationList)
})
_sym_db.RegisterMessage(DeviceRelationList)
DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), {
'DESCRIPTOR' : _DEVICECONFIG,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceConfig)
})
_sym_db.RegisterMessage(DeviceConfig)
DeviceConfigList = _reflection.GeneratedProtocolMessageType('DeviceConfigList', (_message.Message,), {
'DESCRIPTOR' : _DEVICECONFIGLIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceConfigList)
})
_sym_db.RegisterMessage(DeviceConfigList)
DeviceEntity = _reflection.GeneratedProtocolMessageType('DeviceEntity', (_message.Message,), {
'DESCRIPTOR' : _DEVICEENTITY,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceEntity)
})
_sym_db.RegisterMessage(DeviceEntity)
DeviceEntityList = _reflection.GeneratedProtocolMessageType('DeviceEntityList', (_message.Message,), {
'DESCRIPTOR' : _DEVICEENTITYLIST,
'__module__' : 'IsomDevices_pb2'
# @@protoc_insertion_point(class_scope:Honeywell.Security.ISOM.Devices.DeviceEntityList)
})
_sym_db.RegisterMessage(DeviceEntityList)
_DEVICEOMITSTATE.fields_by_name['lastUpdatedTime']._options = None
_RELEASETEMPLATE.fields_by_name['startDelay']._options = None
_RELEASETEMPLATE.fields_by_name['pulsePeriod']._options = None
_RELEASETEMPLATE.fields_by_name['pulseWidth']._options = None
_RELEASETEMPLATE.fields_by_name['delayBetweenTrains']._options = None
_DEVICEBYPASSSTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICERELEASESTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICETROUBLESTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICETAMPERSTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICEDISCOVERYSTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICESUPERVISIONSTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICETESTSTATE.fields_by_name['lastUpdatedTime']._options = None
_DEVICEIDENTIFIERS.fields_by_name['description']._options = None
_DEVICECONFIG.fields_by_name['subType']._options = None
_DEVICECONFIG.fields_by_name['supervisionInterval']._options = None
# @@protoc_insertion_point(module_scope) | PypiClean |
/IMNN-0.3.2-py3-none-any.whl/imnn/lfi/gaussian_approximation.py | import jax
import jax.numpy as np
from jax.scipy.stats import norm, multivariate_normal
from imnn.lfi import LikelihoodFreeInference
class GaussianApproximation(LikelihoodFreeInference):
"""Uses Fisher information and parameter estimates approximate marginals
Since the inverse of the Fisher information matrix describes the minimum
variance of some estimator we can use it to make an approximate (Gaussian-
distributed) estimate of the target distribution. Note that this will not
reflect the true shape of the target distribution and as likely to
underestimate the distribution as overestimate it. Furthermore, if the
Fisher information matrix is calculated far from the estimate of the
parameter values then its value may not be representative of the Fisher
information at that position and so the variance estimated from its inverse
be incorrect.
Parameters
----------
parameter_estimates: float(n_targets, n_params)
The parameter estimates of each target data
invF: float(n_targets, n_params, n_params)
The inverse Fisher information matrix for each target
marginals: list of lists
The 1D and 2D marginal distribution for each target
Todo
----
type checking and pytests need implementing
"""
def __init__(self, parameter_estimates, invF, prior, gridsize=100):
"""Constructor method
Parameters
----------
parameter_estimates: float(n_targets, n_params)
The parameter estimates of each target data
invF: float(n_targets, n_params, n_params)
The inverse Fisher information matrix for each target
prior: fn
A prior distribution which can be evaluated and sampled from
(should also contain a ``low`` and a ``high`` attribute with
appropriate ranges)
gridsize : int or list, default=100
The number of grid points to evaluate the marginal distribution on
for every parameter (int) or each parameter (list)
"""
super().__init__(
prior=prior,
gridsize=gridsize)
if len(parameter_estimates.shape) == 0:
parameter_estimates = np.expand_dims(parameter_estimates, 0)
if len(parameter_estimates.shape) == 1:
parameter_estimates = np.expand_dims(parameter_estimates, 0)
self.parameter_estimates = parameter_estimates
self.n_targets = self.parameter_estimates.shape[0]
self.n_params = self.parameter_estimates.shape[-1]
self.invF = invF
self.marginals = self.get_marginals()
def get_marginals(self, parameter_estimates=None, invF=None, ranges=None,
gridsize=None):
"""
Creates list of 1D and 2D marginal distributions ready for plotting
The marginal distribution lists from full distribution array. For every
parameter the full distribution is summed over every other parameter to
get the 1D marginals and for every combination the 2D marginals are
calculated by summing over the remaining parameters. The list is made
up of a list of n_params lists which contain n_columns number of
objects. The value of the distribution comes from
Parameters
----------
parameter_estimates: float(n_targets, n_params) or None, default=None
The parameter estimates of each target data. If None the class
instance parameter estimates are used
invF: float(n_targets, n_params, n_params) or None, default=None
The inverse Fisher information matrix for each target. If None the
class instance inverse Fisher information matrices are used
ranges : list or None, default=None
A list of arrays containing the gridpoints for the marginal
distribution for each parameter. If None the class instance ranges
are used determined by the prior range
gridsize : list or None, default=None
If using own `ranges` then the gridsize for these ranges must be
passed (not checked)
Returns
-------
list of lists:
The 1D and 2D marginal distributions for each parameter (of pair)
Todo
----
Need to multiply the distribution by the prior to get the posterior
Maybe move to TensorFlow probability?
Make sure that using several Fisher estimates works
"""
if parameter_estimates is None:
parameter_estimates = self.parameter_estimates
n_targets = parameter_estimates.shape[0]
if invF is None:
invF = self.invF
if ranges is None:
ranges = self.ranges
if gridsize is None:
gridsize = self.gridsize
marginals = []
for row in range(self.n_params):
marginals.append([])
for column in range(self.n_params):
if column == row:
marginals[row].append(
jax.vmap(
lambda mean, _invF: norm.pdf(
ranges[column],
mean,
np.sqrt(_invF)))(
parameter_estimates[:, column],
invF[:, column, column]))
elif column < row:
X, Y = np.meshgrid(ranges[row], ranges[column])
unravelled = np.vstack([X.ravel(), Y.ravel()]).T
marginals[row].append(
jax.vmap(
lambda mean, _invF: multivariate_normal.pdf(
unravelled,
mean,
_invF).reshape(
((gridsize[column], gridsize[row]))))(
parameter_estimates[:, [row, column]],
invF[:,
[row, row, column, column],
[row, column, row, column]].reshape(
(n_targets, 2, 2))))
return marginals | PypiClean |
/Game%20Scorer-1.0.tar.gz/Game Scorer-1.0/README.md | # Тема: підбір комп'ютерних ігор на основі профілю користувача
### Короткий опис
На основі профілю, який ви зможете створити після
опитування про ваші вподобання, алгоритмічно-обрана рекомендація ігор, які можуть вам сподобатись.
---
Оскільки рекомендація має бути персональною для кожного користувача, підхід за допомогою нейронних мереж не спрацює,
тому що він потребує персоналізованого набору даних та достатню кількість часу для їх опрацювання.
Тому мій алгоритм використовує акуратно виведені схеми підбору правильних комбінацій ігор для всіх випадків.
В кінцевому результаті ви отримуєте 5 ігор, які вам неодмінно сподобаються, прямо у вікні вашого браузера.
---
### Вхідні та вихідні дані програми
Моя програма збирає дані з API ігрового сервісу IGDB. Ці дані я обробляю та зберігаю в базі даних ```data/games.json```
Всі дані про ці ігри в результаті проходять через складну павутину алгоритмів, і, враховуючи вибір користувача,
видають 5 найкращих варіантів ігор для нього\неї. Звичайно, кожної спроби ігри будуть різні, оскільки алгоритм
складніший, ніж просто "вибрати найкращу по категоріях".
---
### Структура програми
Програма складається з бек-енду та фронт-енду.
Бек-енд повністю реалізований в директоріях `scripts` та `data`
- В `scripts` знаходяться всі основні алгоритми програми. Для прикладу, саме там є `api_shell.py`, моя особиста
оболонка для IGDB API. Також там знаходиться `game_adt.py`, а це файл, де організовані всі ADT та класи для
обробки інформації у цьому проекті.
- А от в `data` потужних алгоритмів нема. Це - директорія з основною базою даних проекту.
Саме в `data` знаходиться `games.json`, а також додаткова інформація для проекту та всі константи балансу
(числа, від яких залежить поведінка алгоритму).
Фронт-енд, натомість, зосереджений у `templates`, `static`, та файлі `flask_app.py`
- `templates` - це директорія з усіма HTML сторінками, які використовуються в проекті.
- У `static` містяться всі картинки, а саме логотипи ігор. Їх понад 200.
- `flask_app.py` взагалі є Головним файлом проекту. Він об'єднує все інше, і створює динамічний вебсайт
для демонстрації алгоритму.
---
### Коротка інструкція
Для використання програми, запустіть файл `flask_app.py`
після переходу на IP адресу 127.0.0.1:5000, ви зможете інтерактивно працювати з веб-сайтом.
---
### Тестування
Для тестування програми розрозблені модулі `game_adt_test.py` та `api_shell_test.py` всередині директорії `scripts`
© Ярема Міщенко | PypiClean |
/FlaskCms-0.0.4.tar.gz/FlaskCms-0.0.4/flask_cms/static/js/ace/snippets/erlang.js | ace.define("ace/snippets/erlang",["require","exports","module"], function(require, exports, module) {
"use strict";
exports.snippetText = "# module and export all\n\
snippet mod\n\
-module(${1:`Filename('', 'my')`}).\n\
\n\
-compile([export_all]).\n\
\n\
start() ->\n\
${2}\n\
\n\
stop() ->\n\
ok.\n\
# define directive\n\
snippet def\n\
-ace.define(${1:macro}, ${2:body}).${3}\n\
# export directive\n\
snippet exp\n\
-export([${1:function}/${2:arity}]).\n\
# include directive\n\
snippet inc\n\
-include(\"${1:file}\").${2}\n\
# behavior directive\n\
snippet beh\n\
-behaviour(${1:behaviour}).${2}\n\
# if expression\n\
snippet if\n\
if\n\
${1:guard} ->\n\
${2:body}\n\
end\n\
# case expression\n\
snippet case\n\
case ${1:expression} of\n\
${2:pattern} ->\n\
${3:body};\n\
end\n\
# anonymous function\n\
snippet fun\n\
fun (${1:Parameters}) -> ${2:body} end${3}\n\
# try...catch\n\
snippet try\n\
try\n\
${1}\n\
catch\n\
${2:_:_} -> ${3:got_some_exception}\n\
end\n\
# record directive\n\
snippet rec\n\
-record(${1:record}, {\n\
${2:field}=${3:value}}).${4}\n\
# todo comment\n\
snippet todo\n\
%% TODO: ${1}\n\
## Snippets below (starting with '%') are in EDoc format.\n\
## See http://www.erlang.org/doc/apps/edoc/chapter.html#id56887 for more details\n\
# doc comment\n\
snippet %d\n\
%% @doc ${1}\n\
# end of doc comment\n\
snippet %e\n\
%% @end\n\
# specification comment\n\
snippet %s\n\
%% @spec ${1}\n\
# private function marker\n\
snippet %p\n\
%% @private\n\
# OTP application\n\
snippet application\n\
-module(${1:`Filename('', 'my')`}).\n\
\n\
-behaviour(application).\n\
\n\
-export([start/2, stop/1]).\n\
\n\
start(_Type, _StartArgs) ->\n\
case ${2:root_supervisor}:start_link() of\n\
{ok, Pid} ->\n\
{ok, Pid};\n\
Other ->\n\
{error, Other}\n\
end.\n\
\n\
stop(_State) ->\n\
ok. \n\
# OTP supervisor\n\
snippet supervisor\n\
-module(${1:`Filename('', 'my')`}).\n\
\n\
-behaviour(supervisor).\n\
\n\
%% API\n\
-export([start_link/0]).\n\
\n\
%% Supervisor callbacks\n\
-export([init/1]).\n\
\n\
-ace.define(SERVER, ?MODULE).\n\
\n\
start_link() ->\n\
supervisor:start_link({local, ?SERVER}, ?MODULE, []).\n\
\n\
init([]) ->\n\
Server = {${2:my_server}, {$2, start_link, []},\n\
permanent, 2000, worker, [$2]},\n\
Children = [Server],\n\
RestartStrategy = {one_for_one, 0, 1},\n\
{ok, {RestartStrategy, Children}}.\n\
# OTP gen_server\n\
snippet gen_server\n\
-module(${1:`Filename('', 'my')`}).\n\
\n\
-behaviour(gen_server).\n\
\n\
%% API\n\
-export([\n\
start_link/0\n\
]).\n\
\n\
%% gen_server callbacks\n\
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,\n\
terminate/2, code_change/3]).\n\
\n\
-ace.define(SERVER, ?MODULE).\n\
\n\
-record(state, {}).\n\
\n\
%%%===================================================================\n\
%%% API\n\
%%%===================================================================\n\
\n\
start_link() ->\n\
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).\n\
\n\
%%%===================================================================\n\
%%% gen_server callbacks\n\
%%%===================================================================\n\
\n\
init([]) ->\n\
{ok, #state{}}.\n\
\n\
handle_call(_Request, _From, State) ->\n\
Reply = ok,\n\
{reply, Reply, State}.\n\
\n\
handle_cast(_Msg, State) ->\n\
{noreply, State}.\n\
\n\
handle_info(_Info, State) ->\n\
{noreply, State}.\n\
\n\
terminate(_Reason, _State) ->\n\
ok.\n\
\n\
code_change(_OldVsn, State, _Extra) ->\n\
{ok, State}.\n\
\n\
%%%===================================================================\n\
%%% Internal functions\n\
%%%===================================================================\n\
\n\
";
exports.scope = "erlang";
}); | PypiClean |
/HTSQL-2.3.3.tar.gz/HTSQL-2.3.3/src/htsql/tweak/shell/vendor/codemirror-2.13/mode/ruby/ruby.js | CodeMirror.defineMode("ruby", function(config, parserConfig) {
function wordObj(words) {
var o = {};
for (var i = 0, e = words.length; i < e; ++i) o[words[i]] = true;
return o;
}
var keywords = wordObj([
"alias", "and", "BEGIN", "begin", "break", "case", "class", "def", "defined?", "do", "else",
"elsif", "END", "end", "ensure", "false", "for", "if", "in", "module", "next", "not", "or",
"redo", "rescue", "retry", "return", "self", "super", "then", "true", "undef", "unless",
"until", "when", "while", "yield", "nil", "raise", "throw", "catch", "fail", "loop", "callcc",
"caller", "lambda", "proc", "public", "protected", "private", "require", "load",
"require_relative", "extend", "autoload"
]);
var indentWords = wordObj(["def", "class", "case", "for", "while", "do", "module", "then",
"unless", "catch", "loop", "proc"]);
var dedentWords = wordObj(["end", "until"]);
var matching = {"[": "]", "{": "}", "(": ")"};
var curPunc;
function chain(newtok, stream, state) {
state.tokenize = newtok;
return newtok(stream, state);
}
function tokenBase(stream, state) {
curPunc = null;
if (stream.sol() && stream.match("=begin") && stream.eol()) {
state.tokenize = readBlockComment;
return "comment";
}
if (stream.eatSpace()) return null;
var ch = stream.next();
if (ch == "`" || ch == "'" || ch == '"' || ch == "/") {
return chain(readQuoted(ch, "string"), stream, state);
} else if (ch == "%") {
var style;
if (stream.eat("s")) style = "atom";
else if (stream.eat(/[wWxqQr]/)) style = "string";
var delim = stream.eat(/[^\w\s]/);
if (!delim) return "operator";
if (matching.propertyIsEnumerable(delim)) delim = matching[delim];
return chain(readPercentQuoted(delim, style), stream, state);
} else if (ch == "#") {
stream.skipToEnd();
return "comment";
} else if (ch == "<" && stream.eat("<")) {
stream.eat("-");
stream.eat(/[\'\"\`]/);
var match = stream.match(/^\w+/);
stream.eat(/[\'\"\`]/);
if (match) return chain(readHereDoc(match[0]), stream, state);
return null;
} else if (ch == "0") {
if (stream.eat("x")) stream.eatWhile(/[\da-fA-F]/);
else if (stream.eat("b")) stream.eatWhile(/[01]/);
else stream.eatWhile(/[0-7]/);
return "number";
} else if (/\d/.test(ch)) {
stream.match(/^[\d_]*(?:\.[\d_]+)?(?:[eE][+\-]?[\d_]+)?/);
return "number";
} else if (ch == "?") {
while (stream.match(/^\\[CM]-/)) {}
if (stream.eat("\\")) stream.eatWhile(/\w/);
else stream.next();
return "string";
} else if (ch == ":") {
if (stream.eat("'")) return chain(readQuoted("'", "atom"), stream, state);
if (stream.eat('"')) return chain(readQuoted('"', "atom"), stream, state);
stream.eatWhile(/[\w\?]/);
return "atom";
} else if (ch == "@") {
stream.eat("@");
stream.eatWhile(/[\w\?]/);
return "variable-2";
} else if (ch == "$") {
stream.next();
stream.eatWhile(/[\w\?]/);
return "variable-3";
} else if (/\w/.test(ch)) {
stream.eatWhile(/[\w\?]/);
if (stream.eat(":")) return "atom";
return "ident";
} else if (ch == "|" && (state.varList || state.lastTok == "{" || state.lastTok == "do")) {
curPunc = "|";
return null;
} else if (/[\(\)\[\]{}\\;]/.test(ch)) {
curPunc = ch;
return null;
} else if (ch == "-" && stream.eat(">")) {
return "arrow";
} else if (/[=+\-\/*:\.^%<>~|]/.test(ch)) {
stream.eatWhile(/[=+\-\/*:\.^%<>~|]/);
return "operator";
} else {
return null;
}
}
function readQuoted(quote, style) {
return function(stream, state) {
var escaped = false, ch;
while ((ch = stream.next()) != null) {
if (ch == quote && !escaped) {
state.tokenize = tokenBase;
break;
}
escaped = !escaped && ch == "\\";
}
return style;
};
}
function readPercentQuoted(quote, style) {
return function(stream, state) {
if (stream.skipTo(quote)) {stream.next(); state.tokenize = tokenBase;}
else stream.skipToEnd();
return style;
};
}
function readHereDoc(phrase) {
return function(stream, state) {
if (stream.match(phrase)) state.tokenize = tokenBase;
else stream.skipToEnd();
return "string";
};
}
function readBlockComment(stream, state) {
if (stream.sol() && stream.match("=end") && stream.eol())
state.tokenize = tokenBase;
stream.skipToEnd();
return "comment";
}
return {
startState: function() {
return {tokenize: tokenBase,
indented: 0,
context: {type: "top", indented: -config.indentUnit},
continuedLine: false,
lastTok: null,
varList: false};
},
token: function(stream, state) {
if (stream.sol()) state.indented = stream.indentation();
var style = state.tokenize(stream, state), kwtype;
if (style == "ident") {
var word = stream.current();
style = keywords.propertyIsEnumerable(stream.current()) ? "keyword"
: /^[A-Z]/.test(word) ? "tag"
: (state.lastTok == "def" || state.lastTok == "class" || state.varList) ? "def"
: "variable";
if (indentWords.propertyIsEnumerable(word)) kwtype = "indent";
else if (dedentWords.propertyIsEnumerable(word)) kwtype = "dedent";
else if (word == "if" && stream.column() == stream.indentation()) kwtype = "indent";
}
if (curPunc || (style && style != "comment")) state.lastTok = word || curPunc || style;
if (curPunc == "|") state.varList = !state.varList;
if (kwtype == "indent" || /[\(\[\{]/.test(curPunc))
state.context = {prev: state.context, type: curPunc || style, indented: state.indented};
else if ((kwtype == "dedent" || /[\)\]\}]/.test(curPunc)) && state.context.prev)
state.context = state.context.prev;
if (stream.eol())
state.continuedLine = (curPunc == "\\" || style == "operator");
return style;
},
indent: function(state, textAfter) {
if (state.tokenize != tokenBase) return 0;
var firstChar = textAfter && textAfter.charAt(0);
var ct = state.context;
var closing = ct.type == matching[firstChar] ||
ct.type == "keyword" && /^(?:end|until|else|elsif|when)\b/.test(textAfter);
return ct.indented + (closing ? 0 : config.indentUnit) +
(state.continuedLine ? config.indentUnit : 0);
}
};
});
CodeMirror.defineMIME("text/x-ruby", "ruby"); | PypiClean |
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/electrum_chi/electrum/gui/qt/amountedit.py |
from decimal import Decimal
from PyQt5.QtCore import pyqtSignal, Qt
from PyQt5.QtGui import QPalette, QPainter, QFontMetrics
from PyQt5.QtWidgets import (QLineEdit, QStyle, QStyleOptionFrame)
from .util import char_width_in_lineedit
from electrum.util import (format_satoshis_plain, decimal_point_to_base_unit_name,
FEERATE_PRECISION, quantize_feerate)
class MyLineEdit(QLineEdit):
frozen = pyqtSignal()
def setFrozen(self, b):
self.setReadOnly(b)
self.setFrame(not b)
self.frozen.emit()
class AmountEdit(MyLineEdit):
shortcut = pyqtSignal()
def __init__(self, base_unit, is_int=False, parent=None):
QLineEdit.__init__(self, parent)
# This seems sufficient for hundred-BTC amounts with 8 decimals
self.setFixedWidth(16 * char_width_in_lineedit())
self.base_unit = base_unit
self.textChanged.connect(self.numbify)
self.is_int = is_int
self.is_shortcut = False
self.help_palette = QPalette()
self.extra_precision = 0
def decimal_point(self):
return 8
def max_precision(self):
return self.decimal_point() + self.extra_precision
def numbify(self):
text = self.text().strip()
if text == '!':
self.shortcut.emit()
return
pos = self.cursorPosition()
chars = '0123456789'
if not self.is_int: chars +='.'
s = ''.join([i for i in text if i in chars])
if not self.is_int:
if '.' in s:
p = s.find('.')
s = s.replace('.','')
s = s[:p] + '.' + s[p:p+self.max_precision()]
self.setText(s)
# setText sets Modified to False. Instead we want to remember
# if updates were because of user modification.
self.setModified(self.hasFocus())
self.setCursorPosition(pos)
def paintEvent(self, event):
QLineEdit.paintEvent(self, event)
if self.base_unit:
panel = QStyleOptionFrame()
self.initStyleOption(panel)
textRect = self.style().subElementRect(QStyle.SE_LineEditContents, panel, self)
textRect.adjust(2, 0, -10, 0)
painter = QPainter(self)
painter.setPen(self.help_palette.brush(QPalette.Disabled, QPalette.Text).color())
painter.drawText(textRect, Qt.AlignRight | Qt.AlignVCenter, self.base_unit())
def get_amount(self):
try:
return (int if self.is_int else Decimal)(str(self.text()))
except:
return None
def setAmount(self, x):
self.setText("%d"%x)
class BTCAmountEdit(AmountEdit):
def __init__(self, decimal_point, is_int=False, parent=None):
AmountEdit.__init__(self, self._base_unit, is_int, parent)
self.decimal_point = decimal_point
def _base_unit(self):
return decimal_point_to_base_unit_name(self.decimal_point())
def get_amount(self):
try:
x = Decimal(str(self.text()))
except:
return None
# scale it to max allowed precision, make it an int
power = pow(10, self.max_precision())
max_prec_amount = int(power * x)
# if the max precision is simply what unit conversion allows, just return
if self.max_precision() == self.decimal_point():
return max_prec_amount
# otherwise, scale it back to the expected unit
amount = Decimal(max_prec_amount) / pow(10, self.max_precision()-self.decimal_point())
return Decimal(amount) if not self.is_int else int(amount)
def setAmount(self, amount):
if amount is None:
self.setText(" ") # Space forces repaint in case units changed
else:
self.setText(format_satoshis_plain(amount, self.decimal_point()))
class FeerateEdit(BTCAmountEdit):
def __init__(self, decimal_point, is_int=False, parent=None):
super().__init__(decimal_point, is_int, parent)
self.extra_precision = FEERATE_PRECISION
def _base_unit(self):
return 'swartz/byte'
def get_amount(self):
sat_per_byte_amount = BTCAmountEdit.get_amount(self)
return quantize_feerate(sat_per_byte_amount)
def setAmount(self, amount):
amount = quantize_feerate(amount)
super().setAmount(amount) | PypiClean |
/125softNLP-0.0.1-py3-none-any.whl/pysoftNLP/ner/kashgari/tasks/classification/dpcnn_model.py |
# author: Alex
# contact: ialexwwang@gmail.com
# version: 0.1
# license: Apache Licence
# file: dpcnn_model.py
# time: 2019-07-02 19:15
# Reference:
# https://ai.tencent.com/ailab/media/publications/ACL3-Brady.pdf
# https://github.com/Cheneng/DPCNN
# https://github.com/miracleyoo/DPCNN-TextCNN-Pytorch-Inception
# https://www.kaggle.com/michaelsnell/conv1d-dpcnn-in-keras
from math import log2, floor
from typing import Dict, Any
import tensorflow as tf
from kashgari.layers import L, KMaxPoolingLayer
from kashgari.tasks.classification.base_model import BaseClassificationModel
class DPCNN_Model(BaseClassificationModel):
'''
This implementation of DPCNN requires a clear declared sequence length.
So sequences input in should be padded or cut to a given length in advance.
'''
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
pool_type = 'max'
filters = 250
activation = 'linear'
return {
'region_embedding': {
'filters': filters,
'kernel_size': 3,
'strides': 1,
'padding': 'same',
'activation': activation,
'name': 'region_embedding',
},
'region_dropout': {
'rate': 0.2,
},
'conv_block': {
'filters': filters,
'kernel_size': 3,
'activation': activation,
'shortcut': True,
},
'resnet_block': {
'filters': filters,
'kernel_size': 3,
'activation': activation,
'shortcut': True,
'pool_type': pool_type,
'sorted': True,
},
'dense': {
'units': 256,
'activation': activation,
},
'dropout': {
'rate': 0.5,
},
'activation': {
'activation': 'softmax',
}
}
def downsample(self, inputs, pool_type: str = 'max',
sorted: bool = True, stage: int = 1): # noqa: A002
layers_pool = []
if pool_type == 'max':
layers_pool.append(
L.MaxPooling1D(pool_size=3,
strides=2,
padding='same',
name=f'pool_{stage}'))
elif pool_type == 'k_max':
k = int(inputs.shape[1].value / 2)
layers_pool.append(
KMaxPoolingLayer(k=k,
sorted=sorted,
name=f'pool_{stage}'))
elif pool_type == 'conv':
layers_pool.append(
L.Conv1D(filters=inputs.shape[-1].value,
kernel_size=3,
strides=2,
padding='same',
name=f'pool_{stage}'))
layers_pool.append(
L.BatchNormalization())
elif pool_type is None:
layers_pool = []
else:
raise ValueError(f'unsupported pooling type `{pool_type}`!')
tensor_out = inputs
for layer in layers_pool:
tensor_out = layer(tensor_out)
return tensor_out
def conv_block(self, inputs, filters: int, kernel_size: int = 3,
activation: str = 'linear', shortcut: bool = True):
layers_conv_unit = []
layers_conv_unit.append(
L.BatchNormalization())
layers_conv_unit.append(
L.PReLU())
layers_conv_unit.append(
L.Conv1D(filters=filters,
kernel_size=kernel_size,
strides=1,
padding='same',
activation=activation))
layers_conv_block = layers_conv_unit * 2
tensor_out = inputs
for layer in layers_conv_block:
tensor_out = layer(tensor_out)
if shortcut:
tensor_out = L.Add()([inputs, tensor_out])
return tensor_out
def resnet_block(self, inputs, filters: int, kernel_size: int = 3,
activation: str = 'linear', shortcut: bool = True,
pool_type: str = 'max', sorted: bool = True, stage: int = 1): # noqa: A002
tensor_pool = self.downsample(inputs, pool_type=pool_type, sorted=sorted, stage=stage)
tensor_out = self.conv_block(tensor_pool, filters=filters, kernel_size=kernel_size,
activation=activation, shortcut=shortcut)
return tensor_out
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_region = [
L.Conv1D(**config['region_embedding']),
L.BatchNormalization(),
L.PReLU(),
L.Dropout(**config['region_dropout'])
]
layers_main = [
L.GlobalMaxPooling1D(),
L.Dense(**config['dense']),
L.BatchNormalization(),
L.PReLU(),
L.Dropout(**config['dropout']),
L.Dense(output_dim, **config['activation'])
]
tensor_out = embed_model.output
# build region tensors
for layer in layers_region:
tensor_out = layer(tensor_out)
# build the base pyramid layer
tensor_out = self.conv_block(tensor_out, **config['conv_block'])
# build the above pyramid layers while `steps > 2`
seq_len = tensor_out.shape[1].value
if seq_len is None:
raise ValueError('`sequence_length` should be explicitly assigned, but it is `None`.')
for i in range(floor(log2(seq_len)) - 2):
tensor_out = self.resnet_block(tensor_out, stage=i + 1,
**config['resnet_block'])
for layer in layers_main:
tensor_out = layer(tensor_out)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor_out) | PypiClean |
/OASYS1-XOPPY-1.2.10.tar.gz/OASYS1-XOPPY-1.2.10/orangecontrib/xoppy/util/script/python_script.py | __author__ = 'labx'
import sys
import code
import keyword
import itertools
from PyQt5 import QtGui, QtWidgets
from PyQt5.QtGui import (
QTextCursor, QFont, QColor, QPalette
)
from PyQt5.QtCore import Qt, QRegExp, QItemSelectionModel
def text_format(foreground=Qt.black, weight=QFont.Normal):
fmt = QtGui.QTextCharFormat()
fmt.setForeground(QtGui.QBrush(foreground))
fmt.setFontWeight(weight)
return fmt
class PythonSyntaxHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, parent=None):
self.keywordFormat = text_format(Qt.blue, QFont.Bold)
self.stringFormat = text_format(Qt.darkGreen)
self.defFormat = text_format(Qt.black, QFont.Bold)
self.commentFormat = text_format(Qt.lightGray)
self.decoratorFormat = text_format(Qt.darkGray)
self.keywords = list(keyword.kwlist)
self.rules = [(QRegExp(r"\b%s\b" % kwd), self.keywordFormat)
for kwd in self.keywords] + \
[(QRegExp(r"\bdef\s+([A-Za-z_]+[A-Za-z0-9_]+)\s*\("),
self.defFormat),
(QRegExp(r"\bclass\s+([A-Za-z_]+[A-Za-z0-9_]+)\s*\("),
self.defFormat),
(QRegExp(r"'.*'"), self.stringFormat),
(QRegExp(r'".*"'), self.stringFormat),
(QRegExp(r"#.*"), self.commentFormat),
(QRegExp(r"@[A-Za-z_]+[A-Za-z0-9_]+"),
self.decoratorFormat)]
self.multilineStart = QRegExp(r"(''')|" + r'(""")')
self.multilineEnd = QRegExp(r"(''')|" + r'(""")')
super().__init__(parent)
def highlightBlock(self, text):
for pattern, format in self.rules:
exp = QRegExp(pattern)
index = exp.indexIn(text)
while index >= 0:
length = exp.matchedLength()
if exp.numCaptures() > 0:
self.setFormat(exp.pos(1), len(str(exp.cap(1))), format)
else:
self.setFormat(exp.pos(0), len(str(exp.cap(0))), format)
index = exp.indexIn(text, index + length)
# Multi line strings
start = self.multilineStart
end = self.multilineEnd
self.setCurrentBlockState(0)
startIndex, skip = 0, 0
if self.previousBlockState() != 1:
startIndex, skip = start.indexIn(text), 3
while startIndex >= 0:
endIndex = end.indexIn(text, startIndex + skip)
if endIndex == -1:
self.setCurrentBlockState(1)
commentLen = len(text) - startIndex
else:
commentLen = endIndex - startIndex + 3
self.setFormat(startIndex, commentLen, self.stringFormat)
startIndex, skip = (start.indexIn(text,
startIndex + commentLen + 3),
3)
class PythonScriptEditor(QtWidgets.QPlainTextEdit):
INDENT = 4
def lastLine(self):
text = str(self.toPlainText())
pos = self.textCursor().position()
index = text.rfind("\n", 0, pos)
text = text[index: pos].lstrip("\n")
return text
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return:
text = self.lastLine()
indent = len(text) - len(text.lstrip())
if text.strip() == "pass" or text.strip().startswith("return "):
indent = max(0, indent - self.INDENT)
elif text.strip().endswith(":"):
indent += self.INDENT
super().keyPressEvent(event)
self.insertPlainText(" " * indent)
elif event.key() == Qt.Key_Tab:
self.insertPlainText(" " * self.INDENT)
elif event.key() == Qt.Key_Backspace:
text = self.lastLine()
if text and not text.strip():
cursor = self.textCursor()
for i in range(min(self.INDENT, len(text))):
cursor.deletePreviousChar()
else:
super().keyPressEvent(event)
else:
super().keyPressEvent(event)
class PythonConsole(QtWidgets.QPlainTextEdit, code.InteractiveConsole):
def __init__(self, locals=None, parent=None):
QtWidgets.QPlainTextEdit.__init__(self, parent)
code.InteractiveConsole.__init__(self, locals)
self.history, self.historyInd = [""], 0
self.loop = self.interact()
next(self.loop)
def setLocals(self, locals):
self.locals = locals
def interact(self, banner=None):
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>> "
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = ('Type "help", "copyright", "credits" or "license" '
'for more information.')
if banner is None:
self.write("Python %s on %s\n%s\n(%s)\n" %
(sys.version, sys.platform, cprt,
self.__class__.__name__))
else:
self.write("%s\n" % str(banner))
more = 0
while 1:
try:
if more:
prompt = sys.ps2
else:
prompt = sys.ps1
self.new_prompt(prompt)
yield
try:
line = self.raw_input(prompt)
except EOFError:
self.write("\n")
break
else:
more = self.push(line)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
def raw_input(self, prompt):
input = str(self.document().lastBlock().previous().text())
return input[len(prompt):]
def new_prompt(self, prompt):
self.write(prompt)
self.newPromptPos = self.textCursor().position()
def write(self, data):
cursor = QTextCursor(self.document())
cursor.movePosition(QTextCursor.End, QTextCursor.MoveAnchor)
cursor.insertText(data)
self.setTextCursor(cursor)
self.ensureCursorVisible()
def writelines(self, lines):
for line in lines:
self.write(line)
def push(self, line):
if self.history[0] != line:
self.history.insert(0, line)
self.historyInd = 0
saved = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = self, self
return code.InteractiveConsole.push(self, line)
finally:
sys.stdout, sys.stderr = saved
def setLine(self, line):
cursor = QTextCursor(self.document())
cursor.movePosition(QTextCursor.End)
cursor.setPosition(self.newPromptPos, QTextCursor.KeepAnchor)
cursor.removeSelectedText()
cursor.insertText(line)
self.setTextCursor(cursor)
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return:
self.write("\n")
next(self.loop)
elif event.key() == Qt.Key_Up:
self.historyUp()
elif event.key() == Qt.Key_Down:
self.historyDown()
elif event.key() == Qt.Key_Tab:
self.complete()
elif event.key() in [Qt.Key_Left, Qt.Key_Backspace]:
if self.textCursor().position() > self.newPromptPos:
QtWidgets.QPlainTextEdit.keyPressEvent(self, event)
else:
QtWidgets.QPlainTextEdit.keyPressEvent(self, event)
def historyUp(self):
self.setLine(self.history[self.historyInd])
self.historyInd = min(self.historyInd + 1, len(self.history) - 1)
def historyDown(self):
self.setLine(self.history[self.historyInd])
self.historyInd = max(self.historyInd - 1, 0)
def complete(self):
pass
def _moveCursorToInputLine(self):
"""
Move the cursor to the input line if not already there. If the cursor
if already in the input line (at position greater or equal to
`newPromptPos`) it is left unchanged, otherwise it is moved at the
end.
"""
cursor = self.textCursor()
pos = cursor.position()
if pos < self.newPromptPos:
cursor.movePosition(QTextCursor.End)
self.setTextCursor(cursor)
def pasteCode(self, source):
"""
Paste source code into the console.
"""
self._moveCursorToInputLine()
for line in interleave(source.splitlines(), itertools.repeat("\n")):
if line != "\n":
self.insertPlainText(line)
else:
self.write("\n")
next(self.loop)
def insertFromMimeData(self, source):
"""
Reimplemented from QPlainTextEdit.insertFromMimeData.
"""
if source.hasText():
self.pasteCode(str(source.text()))
return
def interleave(seq1, seq2):
"""
Interleave elements of `seq2` between consecutive elements of `seq1`.
>>> list(interleave([1, 3, 5], [2, 4]))
[1, 2, 3, 4, 5]
"""
iterator1, iterator2 = iter(seq1), iter(seq2)
leading = next(iterator1)
for element in iterator1:
yield leading
yield next(iterator2)
leading = element
yield leading
class Script(object):
Modified = 1
MissingFromFilesystem = 2
def __init__(self, name, script, flags=0, filename=None):
self.name = name
self.script = script
self.flags = flags
self.filename = filename
class ScriptItemDelegate(QtWidgets.QStyledItemDelegate):
def __init__(self, parent):
super().__init__(parent)
def displayText(self, script, locale):
if script.flags & Script.Modified:
return "*" + script.name
else:
return script.name
def paint(self, painter, option, index):
script = index.data(Qt.DisplayRole)
if script.flags & Script.Modified:
option = QtWidgets.QStyleOptionViewItem(option)
option.palette.setColor(QPalette.Text, QColor(Qt.red))
option.palette.setColor(QPalette.Highlight, QColor(Qt.darkRed))
super().paint(painter, option, index)
def createEditor(self, parent, option, index):
return QtWidgets.QLineEdit(parent)
def setEditorData(self, editor, index):
script = index.data(Qt.DisplayRole)
editor.setText(script.name)
def setModelData(self, editor, model, index):
model[index.row()].name = str(editor.text())
def select_row(view, row):
"""
Select a `row` in an item view
"""
selmodel = view.selectionModel()
selmodel.select(view.model().index(row, 0),
QItemSelectionModel.ClearAndSelect) | PypiClean |
/NameGenderPredictor-0.0.1.tar.gz/NameGenderPredictor-0.0.1/README.md | Prediction of genders of english names based on US Social Security data. The gender probability of each name is computed based on the number of male and female babies that were given this name between 1880 and 2017.
The full data can be found at https://www.ssa.gov/oact/babynames/limits.html
# Description
The main function returns the probability for a given english name to be a male name. There are optional arguments start_year (default 1880) and end_year (default 2018) to use a specific period of time for calculation. For instance, for gender prediction of babyboomers, the period can be set to 1946-1965. If the name is absent from the database, `None` is returned.
# Installation
Requires package sqlite3. The code was only tested with Python 3.7.
```
$ pip install NameGenderPredictor
```
# Basic Usage
Function `predict_gender` is case insensitive for names (see below).
```python
>>> from NameGenderPredictor import predict_gender
>>> print(predict_gender('john'))
0.9957852975260119
>>> print(predict_gender('Avi'))
0.8783253379851723
>>> print(predict_gender('chris'))
0.8632186721917374
>>> print(predict_gender('chris'),start_year=2000,end_year=2010)
0.9833235466823254
>>> print(predict_gender('chris'),start_year=1950,end_year=1965)
0.8111958317448215
```
| PypiClean |
/GenIce2-2.1.7.1.tar.gz/GenIce2-2.1.7.1/genice2/lattices/Struct45.py | from genice2.cell import cellvectors
import genice2.lattices
desc = {"ref": {"SpaceFullerene": 'Sikiric 2010'},
"usage": "No options available.",
"brief": "A space fullerene."
}
class Lattice(genice2.lattices.Lattice):
def __init__(self):
self.pairs = """
31 128
36 90
140 120
197 56
133 68
132 50
76 103
105 30
161 192
135 118
25 102
147 51
34 77
47 43
145 139
13 160
50 111
159 117
187 125
48 7
25 176
170 202
18 120
24 165
180 4
148 53
141 155
136 6
93 3
112 102
185 66
67 154
173 111
185 30
17 180
172 8
130 203
80 92
114 169
205 122
205 121
70 189
28 61
134 181
26 3
199 92
4 129
35 134
172 19
107 37
79 119
70 191
140 89
202 60
125 179
103 61
180 139
148 84
191 188
180 98
64 89
20 194
106 150
12 71
80 176
158 16
148 27
148 173
123 97
29 126
65 51
9 165
130 187
2 69
118 201
170 50
2 38
94 5
174 83
34 121
11 127
199 158
32 189
171 166
51 43
59 164
128 127
77 78
190 107
82 140
176 91
24 105
46 179
112 197
138 173
22 201
183 165
66 116
164 43
18 86
14 166
17 63
130 170
52 178
157 57
23 36
135 175
29 88
97 85
175 54
115 81
193 84
20 145
47 198
143 202
86 3
91 99
144 67
178 111
1 41
50 155
149 146
59 65
168 200
47 0
198 38
97 79
145 126
31 75
39 85
85 38
57 177
4 137
63 16
199 107
6 72
94 121
19 0
29 71
144 86
117 171
115 55
197 137
54 90
123 72
156 177
9 90
130 153
45 129
15 26
183 201
95 4
68 159
138 41
42 79
133 83
182 110
151 39
78 61
171 129
70 184
195 95
195 96
182 84
62 186
33 160
44 194
71 167
47 196
18 33
70 90
177 168
21 13
109 11
100 124
169 102
103 125
198 3
138 155
77 152
93 120
23 62
192 167
177 181
190 169
44 102
190 176
17 106
19 192
30 127
21 74
75 9
72 42
5 189
119 147
163 37
66 75
1 153
106 145
10 162
192 91
191 205
114 126
31 118
112 126
88 8
53 73
172 71
136 69
82 76
54 146
174 162
69 42
141 185
168 137
149 7
110 40
52 170
113 24
173 89
127 153
17 157
103 203
129 56
1 141
161 163
155 187
57 98
2 151
22 128
83 87
134 158
120 154
152 116
67 55
32 183
142 76
82 73
28 46
136 65
64 101
113 204
109 31
27 203
162 98
134 169
58 7
20 99
36 188
204 5
204 184
28 41
117 95
45 87
60 193
150 25
157 194
63 181
187 30
14 133
115 110
142 179
14 54
49 0
79 26
160 110
28 152
92 8
23 117
72 43
11 78
159 96
150 92
114 99
75 184
200 48
44 56
114 167
175 48
121 201
132 104
20 25
174 139
15 33
122 186
188 118
151 74
57 87
58 87
80 19
186 188
113 152
97 33
13 119
142 89
113 100
160 154
58 171
125 124
85 86
143 153
195 7
131 49
164 80
58 68
131 163
10 96
161 65
144 81
23 14
161 74
5 165
156 166
64 154
63 137
149 122
109 191
32 205
190 59
12 199
76 202
52 193
46 100
49 38
74 0
62 200
123 49
135 9
6 12
1 116
59 167
175 186
35 99
198 42
81 26
156 83
133 10
140 178
21 123
82 27
100 108
150 16
109 34
29 158
122 96
45 139
196 55
138 179
2 163
36 183
35 88
64 53
168 162
15 40
132 101
136 107
67 39
195 98
182 111
46 104
200 95
106 88
52 27
194 45
128 116
131 6
101 60
24 66
77 22
21 51
178 40
112 16
60 73
144 53
15 193
35 157
55 93
44 181
174 197
18 73
78 124
203 41
37 91
93 40
105 22
156 56
10 62
48 166
146 68
143 104
34 204
12 164
108 11
143 61
131 172
32 159
132 142
37 8
141 104
146 189
105 124
115 119
69 147
182 101
149 94
184 108
185 108
13 39
81 84
196 151
196 147
135 94
"""
self.waters = """
0.80883 0.0 0.27714
0.19624 0.5 0.91946
0.875 0.5 0.26494
0.0 0.81173 0.17048
0.0 0.875 0.56455
0.80883 0.31617 0.76311
0.375 0.68383 0.30737
0.875 0.5 0.64549
0.81173 0.68383 0.37885
0.19117 0.31617 0.76311
0.5 0.875 0.62994
0.5 0.875 0.85451
0.31328 0.80883 0.34573
0.5 0.25 0.19172
0.31328 0.19117 0.65427
0.31617 0.81173 0.12116
0.18828 0.80376 0.45959
0.875 0.67876 0.5
0.68383 0.81173 0.12116
0.81173 0.0 0.32952
0.81173 0.19624 0.45959
0.5 0.125 0.23506
0.0 0.81173 0.82952
0.18828 0.0 0.67048
0.0 0.18828 0.82952
0.0 0.125 0.43545
0.19117 0.68673 0.15427
0.0 0.68673 0.02414
0.80376 0.5 0.91946
0.5 0.80376 0.41946
0.19117 0.0 0.88698
0.31617 0.625 0.80737
0.80883 0.0 0.72287
0.5 0.875 0.14549
0.68383 0.625 0.80737
0.625 0.5 0.45146
0.19117 0.0 0.72287
0.875 0.5 0.35451
0.80883 0.68383 0.23689
0.68383 0.375 0.19264
0.19117 0.0 0.11302
0.0 0.5 0.95173
0.19117 0.68383 0.23689
0.19117 0.0 0.27714
0.125 0.32124 0.5
0.81173 0.19624 0.54041
0.67876 0.31328 0.92732
0.0 0.0 0.25
0.125 0.5 0.64549
0.68383 0.80883 0.26311
0.32124 0.125 0.0
0.31617 0.19117 0.26311
0.19624 0.81173 0.04041
0.80376 0.5 0.08055
0.375 0.31617 0.69264
0.0 0.18828 0.17048
0.18828 0.19624 0.54041
0.625 0.5 0.54854
0.81173 0.31617 0.62116
0.31328 0.19117 0.34573
0.5 0.625 0.04854
0.67876 0.68673 0.92732
0.31328 0.80883 0.65427
0.125 0.67876 0.5
0.67876 0.31328 0.07268
0.375 0.31617 0.30737
0.19117 0.31328 0.84573
0.80883 0.31328 0.15427
0.68673 0.19117 0.65427
0.125 0.5 0.26494
0.5 0.125 0.76494
0.5 0.875 0.37007
0.31617 0.80883 0.26311
0.67876 0.68673 0.07268
0.68383 0.19117 0.26311
0.31617 0.375 0.80737
0.67876 0.875 0.0
0.80883 0.68673 0.84573
0.68383 0.81173 0.87885
0.31617 0.625 0.19264
0.0 0.0 0.35478
0.125 0.5 0.12994
0.80376 0.81173 0.04041
0.5 0.19624 0.58055
0.19624 0.5 0.08055
0.68383 0.625 0.19264
0.80883 0.68673 0.15427
0.68673 0.32124 0.57268
0.68673 0.67876 0.42732
0.80376 0.18828 0.04041
0.31617 0.19117 0.73689
0.81173 0.31617 0.37885
0.0 0.80883 0.38698
0.0 0.0 0.14523
0.875 0.5 0.73506
0.0 0.80883 0.61302
0.68673 0.80883 0.65427
0.5 0.75 0.19172
0.68673 0.67876 0.57268
0.68673 0.32124 0.42732
0.68383 0.18828 0.87885
0.5 0.375 0.04854
0.18828 0.19624 0.45959
0.80376 0.81173 0.95959
0.5 0.375 0.95146
0.0 0.0 0.85478
0.81173 0.80376 0.45959
0.125 0.5 0.35451
0.5 0.125 0.85451
0.5 0.75 0.80828
0.31617 0.18828 0.12116
0.19624 0.18828 0.04041
0.31328 0.0 0.47586
0.80883 0.31328 0.84573
0.5 0.19624 0.41946
0.19117 0.31328 0.15427
0.125 0.5 0.87007
0.0 0.0 0.64523
0.19117 0.68383 0.76311
0.31617 0.375 0.19264
0.80883 0.0 0.11302
0.80883 0.68383 0.76311
0.625 0.68383 0.69264
0.5 0.875 0.23506
0.80883 0.0 0.88698
0.875 0.0 0.93545
0.5 0.0 0.45173
0.31617 0.81173 0.87885
0.19117 0.68673 0.84573
0.0 0.125 0.56455
0.19624 0.81173 0.95959
0.625 0.68383 0.30737
0.5 0.25 0.0
0.5 0.125 0.62994
0.375 0.5 0.45146
0.125 0.5 0.73506
0.25 0.5 0.30828
0.18828 0.80376 0.54041
0.0 0.31328 0.97586
0.68673 0.0 0.52414
0.875 0.0 0.06455
0.32124 0.31328 0.92732
0.67876 0.125 0.0
0.5 0.625 0.95146
0.875 0.5 0.12994
0.68673 0.0 0.47586
0.625 0.31617 0.69264
0.19117 0.31617 0.23689
0.0 0.5 0.04828
0.75 0.5 0.69172
0.0 0.875 0.43545
0.80883 0.31617 0.23689
0.875 0.5 0.87007
0.32124 0.68673 0.92732
0.68383 0.18828 0.12116
0.19624 0.18828 0.95959
0.31328 0.32124 0.57268
0.75 0.5 0.5
0.31328 0.67876 0.42732
0.81173 0.0 0.67048
0.5 0.125 0.14549
0.625 0.31617 0.30737
0.5 0.80376 0.58055
0.75 0.5 0.30828
0.18828 0.0 0.32952
0.0 0.19117 0.77714
0.18828 0.31617 0.62116
0.5 0.125 0.37007
0.31328 0.67876 0.57268
0.31328 0.32124 0.42732
0.32124 0.875 0.0
0.0 0.19117 0.61302
0.68673 0.80883 0.34573
0.0 0.31328 0.02414
0.5 0.0 0.54828
0.25 0.5 0.69172
0.0 0.19117 0.38698
0.375 0.5 0.54854
0.125 0.0 0.06455
0.80376 0.18828 0.95959
0.81173 0.80376 0.54041
0.25 0.5 0.5
0.32124 0.31328 0.07268
0.0 0.0 0.75
0.5 0.25 0.80828
0.31617 0.18828 0.87885
0.375 0.68383 0.69264
0.125 0.0 0.93545
0.31617 0.80883 0.73689
0.68383 0.19117 0.73689
0.18828 0.31617 0.37885
0.5 0.875 0.76494
0.68673 0.19117 0.34573
0.32124 0.68673 0.07268
0.875 0.32124 0.5
0.81173 0.68383 0.62116
0.0 0.19117 0.22287
0.31328 0.0 0.52414
0.0 0.80883 0.22287
0.18828 0.68383 0.37885
0.18828 0.68383 0.62116
0.0 0.80883 0.77714
0.5 0.75 0.0
0.0 0.68673 0.97586
0.68383 0.375 0.80737
0.68383 0.80883 0.73689
"""
self.coord = "relative"
self.cages = """
12 0.0 -0.2531 -0.09655
12 0.0 0.0 0.5
15 -0.5 0.5 -0.12908
15 0.0 0.5 0.56509
12 -0.2531 0.0 0.59655
15 0.5 0.5 -0.62908
14 0.0 -0.26467 -0.6939
12 -0.2531 0.0 0.40345
15 0.5 0.5 0.12908
12 0.0 0.2531 0.09655
14 0.0 -0.26467 0.6939
14 -0.26467 0.0 0.1939
14 0.28496 0.5 0.0
14 -0.26467 0.0 -0.1939
15 -0.5 0.0 -0.06509
15 -0.5 0.5 0.62908
14 0.26467 0.0 -0.1939
14 -0.5 0.28496 0.5
14 0.26467 0.0 0.1939
12 0.0 0.2531 -0.09655
12 0.5 0.0 -0.69756
14 0.0 0.26467 -0.6939
14 0.5 0.5 0.25
15 0.0 0.5 -0.56509
12 -0.5 0.0 0.69756
14 0.5 -0.28496 0.5
14 -0.28496 -0.5 0.0
12 0.0 -0.2531 0.09655
14 0.0 0.26467 0.6939
12 0.0 0.5 -0.19756
15 0.5 0.0 0.06509
12 0.2531 0.0 -0.59655
14 -0.5 0.5 0.75
12 0.2531 0.0 0.59655
12 0.0 0.0 0.0
12 0.0 0.5 0.19756
"""
self.bondlen = 3
self.cell = """
13.050670933678957 13.050670933678957 57.77874231167655
"""
self.density = 0.625698640770261
self.cell = cellvectors(a=13.050670933678957,
b=13.050670933678957,
c=57.77874231167655) | PypiClean |
/ModelFlowIb-1.56-py3-none-any.whl/modelclass2.py | import pandas as pd
import numpy as np
import subprocess
from itertools import chain,zip_longest
from numba import jit
import time
from modelclass import model
import modelclass as mc
import modelpattern as pt
import modelmf
class simmodel(model):
''' The model class, used to experiment
'''
def gouteval(self,databank):
''' takes a list of terms and translates to a evaluater function called los
The model axcess the data through:databank.Dataframe.value[rowindex+lag,coloumnindex] which is very efficient
This function has superseeded xouteval (:func:`modelclass.model.xouteval`
This function assumes that the numpy values have been made to a list of lists to increase speed.
'''
columnsnr=self.get_columnsnr(databank)
fib=[]
fib.append('def make_los():\n')
fib.append(' def los(values,row,solveorder, allvar):\n')
fib.append(' '+'from math import exp, log \n')
fib.append(' '+'import sys\n')
fib.append(' '+'from model_cvx import mv_opt, mv_opt_prop\n')
fib.append(' '+'from numpy import transpose\n')
fib.append(' '+'from stem import ste \n')
fib.append(' '+'from modelclass import sum_excel,logit \n')
fib.append(' '+'try: \n')
fib.append(' '+' '+'from cvxopt import matrix \n')
fib.append(' '+'except:\n')
fib.append(' '+' '+'pass \n')
fib.append(' '+'try :\n')
for v in self.solveorder:
if self.allvar[v]['dropfrml']:
fib.append(' '+' '+'pass # '+v+'\n')
continue
for i,t in enumerate(self.allvar[v]['terms']):
if (t.op == '='):
assignpos=i
break
for i,t in enumerate(self.allvar[v]['terms']):
if i==0:
fib.append(' '+' ')
if t.op:
ud='' if ( t.op == '$' ) else t.op.lower()
elif t.number:
ud=t.number
elif t.var:
# if self.allvar[t.var]['matrix']:
# ud=t.var
# else:
if i <= assignpos-1: # term is the left hand sided variable
ud= 'values[row]['+str(columnsnr[t.var])+']'
else:
if t.lag== '':
ud= 'values[row]'+'['+str(columnsnr[t.var])+']'
else :
ud= 'values[row'+t.lag+']['+str(columnsnr[t.var])+']'
fib.append(ud)
fib.append('\n')
fib.append(' '+'except :\n')
fib.append(' '+' '+'print("Error in",allvar[solveorder[sys.exc_info()[2].tb_lineno-14]]["frml"])\n')
fib.append(' '+' '+'raise\n')
fib.append(' '+'return \n')
fib.append(' return los\n')
# print (fib)
return ''.join(fib)
def cytouteval(self,databank,nr=1):
''' takes a list of terms and translates to a evaluater function called los
The model axcess the data through:databank.Dataframe.value[rowindex+lag,coloumnindex] which is very efficient
This function has superseeded xouteval (:func:`modelclass.model.xouteval`
This function assumes creates a CYTHON function to realy increase speed.
'''
columnsnr=self.get_columnsnr(databank)
fib=[]
#cython: language_level=3, boundscheck=False ,nonecheck =False, initializedcheck =False
# fib.append('#!python\n')
# fib.append('#cython: language_level=3, boundscheck=False ,nonecheck =False, initializedcheck =False \n')
fib.append('''
from numpy cimport ndarray
cimport numpy as np
cimport cython
ctypedef np.float64_t dtype_t
from cython cimport floating
from cython cimport double
@cython.wraparound(False)
@cython.boundscheck(False)
@cython.nonecheck(False)
@cython.initializedcheck(False)
@cython.optimize.unpack_method_calls(False) \n''')
fib.append('def los'+str(nr)+'(np.ndarray[dtype_t, ndim=2] values,long row):\n')
for v in self.solveorder:
if self.allvar[v]['dropfrml']:
fib.append(''+' '+'pass # '+v+'\n')
continue
for i,t in enumerate(self.allvar[v]['terms']):
if (t.op == '='):
assignpos=i
break
for i,t in enumerate(self.allvar[v]['terms']):
if i==0:
fib.append(''+' ')
if t.op:
ud='' if ( t.op == '$' ) else t.op.lower()
elif t.number:
ud=t.number
elif t.var:
# if self.allvar[t.var]['matrix']:
# ud=t.var
# else:
if i <= assignpos-1: # term is the left hand sided variable
ud= 'values[row,'+str(columnsnr[t.var])+']'
else:
if t.lag== '':
ud= 'values[row,'+''+str(columnsnr[t.var])+']'
else :
ud= 'values[row'+t.lag+','+str(columnsnr[t.var])+']'
fib.append(ud)
fib.append('\n')
# fib.append(' '+'except :\n')
# fib.append(''+' '+'print("Error in",allvar[solveorder[sys.exc_info()[2].tb_lineno-14]]["frml"])\n')
# fib.append(''+' '+'raise\n')
fib.append(' '+'return \n')
# print (fib)
return ''.join(fib)
def teststuff3(self):
columsnr=self.get_columnsnr(self.basedf)
return self.stuff3(self.basedf.values,2,columsnr)
def outsolve2(self,order='',exclude=[],chunk=1000,ljit=False):
''' returns a string with a function which calculates a
Gauss-Seidle iteration of a model
exclude is list of endogeneous variables not to be solved
uses:
model.solveorder the order in which the variables is calculated
model.allvar[v]["gauss"] the ccalculation
This function should split the functions in many functions easing numba for large models
'''
tjit = '@jit("f8[:](f8[:])") \n' if ljit else ''
def grouper(iterable, n, fillvalue=' '):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return zip_longest(*args, fillvalue=fillvalue)
solveorder=order if order else self.solveorder
gausslines = [self.make_gaussline(v) for v in solveorder if (v not in exclude) and (not self.allvar[v]['dropfrml'])]
chunked= grouper(gausslines,chunk,'')
chunked= [l for l in chunked][:]
def chunksolve(number,lines):
out = (tjit+'def los'+str(number)+'(a):\n ' + '\n '.join(lines)
+'\n return #a ')
# +'\n return a ')
return out
chunkedout = 'from numpy import exp, log \n'+'\n'.join([chunksolve(i,ch) for i,ch in enumerate(chunked) ])+'\n'
# masterout = tjit+'def los(a):\n '+' '.join([('a=los'+str(i)+'(a) \n') for (i,ch) in enumerate(chunked)])+'\n return a'
masterout = tjit+'def los(a):\n '+' '.join([('los'+str(i)+'(a) \n') for (i,ch) in enumerate(chunked)])+'\n return a'
# print(masterout)
return chunkedout+masterout
def outsolve3(self,order='',exclude=[],chunk=3000000,ljit=False,maxchunks=1000000,cache=False,chunkselect=0,maxlines=1000000000000):
''' returns a string with a function which calculates a
Gauss-Seidle iteration of a model
exclude is list of endogeneous variables not to be solved
uses:
model.solveorder the order in which the variables is calculated
model.allvar[v]["gauss"] the ccalculation
'''
short,long,longer = 4*' ',8*' ',12 *' '
if cache:
tjit = (short+'@jit("f8[:](f8[:],f8)",cache=1) \n') if ljit else ''
else:
tjit = (short+'@jit("f8[:](f8[:],f8)" )\n') if ljit else ''
tjit = (short+'@jit("f8[:](f8[:],f8)",fastmath=True)\n') if ljit else ''
def grouper(iterable, n, fillvalue=''):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return zip_longest(*args, fillvalue=fillvalue)
def chunksolve(number,lines):
out = (tjit+short+'def los'+str(number)+'(a,alfa):\n' + '\n'.join(l for l in lines if 0 < len(l.strip()))
+'\n return a ')
return out
solveorder=order if order else self.solveorder
fib1 = ['# -*- coding: utf-8 -*-']
fib1.append('def make(funks=[]):')
fib1.append(short + 'import time')
fib1.append(short + 'from numba import jit ')
fib1.append(short + 'from modeluserfunk import '+(', '.join(pt.userfunk)).lower())
fib1.append(short + 'from modelBLfunk import '+(', '.join(pt.BLfunk)).lower())
funktext = [short+f.__name__ + ' = funks['+str(i)+']' for i,f in enumerate(self.funks)]
fib1.extend(funktext)
f2=[long + self.make_gaussline(v) for i,v in enumerate(solveorder)
if (v not in exclude) and (not self.allvar[v]['dropfrml']) and i < maxlines]
chunked= grouper(f2,chunk,'')
if chunkselect:
chunkedlist = [l for (i,l) in enumerate(chunked) if i == chunkselect ]
else:
chunkedlist = [l for (i,l) in enumerate(chunked) if i < maxchunks ]
chunkedout = ((short+f'print("Compiling chunk {i} "+time.strftime("%H:%M:%S")) \n' if ljit else '') +chunksolve(i,ch) for i,ch in enumerate(chunkedlist) )
masterout = '\n'+short+'def los(a,alfa):\n'+'\n'.join((long+'a=los'+str(i)+'(a,alfa)') for (i,ch) in enumerate(chunkedlist))+'\n'+long+'return a'
out = '\n'.join(chain(fib1,chunkedout))+('\n'+short+
'print("Compiling master los: "+time.strftime("%H:%M:%S"))'+ masterout
+'\n'+short+'print("Finished master los: "+time.strftime("%H:%M:%S"))\n'+short+'return los')
return out
def cytsolve(self,order='',exclude=[],chunk=2,ljit=False):
''' returns a string with a Cython function which calculates a
Gauss-Seidle iteration of a model
exclude is list of endogeneous variables not to be solved
uses:
model.solveorder the order in which the variables is calculated
model.allvar[v]["gauss"] the ccalculation
This function should split the functions in many functions easing cython for large models
'''
def grouper(iterable, n, fillvalue=' '):
from itertools import zip_longest
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return zip_longest(*args, fillvalue=fillvalue)
solveorder=order if order else self.solveorder
gausslines = [self.allvar[v]['gauss'] for v in solveorder if (v not in exclude) and (not self.allvar[v]['dropfrml'])]
chunked= grouper(gausslines,chunk,'')
chunked= [l for l in chunked][:]
def chunksolve(number,lines):
out = ('''
import sys
from model_cvx import mv_opt, mv_opt_prop
from numpy import transpose , array
from stem import ste
from modelclass import sum_excel
from modelclass import pd_to_w_corp, pd_to_w_mrtg, pd_to_w_retail
try:
from cvxopt import matrix
except:
pass
# from numpy import exp, log
from libc.math cimport exp, log # to gain speed
from numpy cimport ndarray
cimport numpy as np
# cimport scipy.special.cython_special
cimport cython
ctypedef np.float64_t dtype_t
cdef inline dtype_t logit(dtype_t number): return -log(1.0/number-1.0)
cdef inline dtype_t max(dtype_t a, dtype_t b): return a if a >= b else b
cdef inline dtype_t min(dtype_t a, dtype_t b): return a if a <= b else b
from cython cimport floating
from cython cimport double
from numpy import array
@cython.cdivision(True)
@cython.wraparound(False)
@cython.boundscheck(False)
@cython.nonecheck(False)
@cython.initializedcheck(False)
@cython.optimize.unpack_method_calls(False) \n'''+
'def los'+str(number)+'(double[:] a):' +'\n '
+'\n cdef double alfa=0.2 '
+'\n '
+'\n '.join([l for l in lines if l !='' ])
+'\n return ' ) # 'a ')
return out
chunkedout = [chunksolve(i,ch) for i,ch in enumerate(chunked) ]
chunklist=[str(i) for (i,ch) in enumerate(chunked)]
chunkimp=['from s'+i+' import los'+i for i in chunklist]
masterout = ('''
from numpy cimport ndarray
from numpy import asarray
cimport numpy as np
cimport cython
ctypedef np.float64_t dtype_t
from cython cimport floating
from cython cimport double \n''' +
'\n'.join(chunkimp)+'\n '+
'''
@cython.wraparound(False)
@cython.boundscheck(False)
@cython.nonecheck(False)
@cython.initializedcheck(False)
@cython.optimize.unpack_method_calls(False) \n'''+
'def los(double[:] a):\n '+
' '.join([('los'+i+'(a) \n') for i in chunklist ])+' return asarray(a)')
# print(masterout)
return chunkedout,masterout,chunklist
if __name__ == '__main__' :
#%%
numberlines = 10
chunksize = 10
df = pd.DataFrame({'A0':[1.,2.,3,4] , 'B':[10.,20.,30.,40.] })
mtest=simmodel(''.join(['FRMl <> a'+str(i)+'=b(-1) +'+str(i) + '*2 +c $ '
for i in range(numberlines)]))
df=mc.insertModelVar(df,mtest)
mtest.findpos()
tt = mtest.outsolve3(chunk=3,ljit=1)
with open('solve.py','wt') as out:
out.write(tt)
print(tt)
exec(tt,globals())
solvefunk=make()
#%%
# xx = mtest(df,'1','2',setalt=True,setbase=True)
if 1:
testout= '''
from slos import los
import pandas as pd
#df = pd.DataFrame([[0.0,0.0,0.0,0.0,0.0,0.0,0.0] for i in range('''+str(len(df.columns))+''')]).T
assert 1==1
#aa = los(df.values,1)
xx = df.values.flatten()
bb = los(xx)
'''
with open(r"test1.py", "w") as text_file:
print(testout, file=text_file)
assert 2==1
if 1:
#%%
with open(r"J:\Udvikling - feedback\Systemfiler\super.fru", "r") as text_file:
fmonas = text_file.read()
mmonas = simmodel(fmonas)
#%% get the baseline
grund = pd.read_pickle(r'J:\Udvikling - feedback\Systemfiler\supergrund.pc')
start='2015q1'
slut='2017q4'
#%% Run the baseline
# xx=mmonas.sim2(grund,start='2015q1',slut='2017q4',antal=2000,first_test=500,conv='FY',silent=False,ljit=False,lcython=True)
if 1:
testmodel = mmonas
# testmodel = mtest
testmodel = mtotal
chunksize = 5000
#%%
testmodel.outgaussline2()
sourcelist,master,liste=testmodel.cytsolve(chunk=chunksize)
opt2=',extra_compile_args=["/Od","/GL-"] , extra_link_args=["-LTCG:OFF"]' # faster compilation slower execution
opt1=''
opt3 = ',extra_compile_args=["/fp:fast"] '
opt = opt3
setupout='''from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
import numpy
ext = [
''' + \
'\n'.join(['Extension("s'+l+'", sources=["src'+l+'.pyx"],include_dirs=[numpy.get_include()]'+opt+'),' for l in liste+['los']] )[:-1]\
+ ''' ]
setup(ext_modules=ext,
cmdclass={"build_ext": build_ext})
'''
with open(r"cython2\setup_model.py", "w") as text_file:
print(setupout, file=text_file)
for l,text in zip(liste,sourcelist):
with open(r"cython2\src"+l+".pyx", "w") as text_file:
print(text, file=text_file)
with open(r"cython2\srclos.pyx", "w") as text_file:
print(master, file=text_file)
with open(r"cython2\cmodel2.bat", "w") as text_file: # to make a stand alone bat file
print('python setup_model.py build_ext --inplace ', file=text_file)
# with ttimer('Compile') as t :
result = subprocess.check_output (r'python setup_model.py build_ext --inplace',shell = True,cwd='cython2').decode()
# subprocess.call(r'python setup_model.py build_ext --inplace',shell = True,cwd='cython2')
#%%
if mmonas is testmodel:
with ttimer('1 th Simulation'):
xx=mmonas.sim2(grund,start='2015q1',slut='2017q4',antal=2000,first_test=6000,conv='FY',silent=True,ljit=False,lcython=True)
with ttimer('2 th Simulation'):
xx=mmonas.sim2(grund,start='2015q1',slut='2017q4',antal=3000,first_test=6000,conv='FY',silent=True,ljit=False,lcython=True)
#%%
if mtest is testmodel:
#%% Try the model
with ttimer('first') as t:
xx=mtest.sim2(df,1,2,antal=1000000,first_test=2000,conv='A0',silent=True,samedata=True,
ldumpvar=False,dumpvar=['A*','B*'],dumpwith=10,dumpdecimal=1,lcython=0)
#%%
# with open(r"test.pc", "w") as pc:
# pickle.dump(mtest,pc,4)
#%%
if 0:
#%%
with open(r"models\mtotal.fru", "r") as text_file:
ftotal = text_file.read()
base = pd.read_pickle(r'data\base.pc')
adverse = pd.read_pickle(r'data\adverse.pc')
#%%
mtotal = simmodel(ftotal)
# adverse = mtotal.xgenr(adverse ,'2016q1','2018Q4',samedata=True,silent=False)
adversefb=adverse.copy()
#%%
adversefb.ITSIMPACT =100.0
mtotal.save = True
#%%
with ttimer():
adversenew=mtotal.sim2(adversefb,antal=3,first_test=200,slut='2018q4',silent=True,
conv='SHOCK__ITS__IT',ldumpvar=1,dumpvar=['G4_YER*IT'],
dumpdecimal=3,dumpwith=10,lcython=True) | PypiClean |
/MDP-3.6.tar.gz/MDP-3.6/mdp/nodes/pca_nodes.py | from __future__ import division
from builtins import str
from builtins import range
from past.utils import old_div
__docformat__ = "restructuredtext en"
import mdp
from mdp import numx
from mdp.utils import (mult, nongeneral_svd, CovarianceMatrix,
symeig, SymeigException)
import warnings as _warnings
class PCANode(mdp.Node):
"""Filter the input data through the most significatives of its
principal components.
:ivar avg: Mean of the input data (available after training).
:ivar v: Transposed of the projection matrix (available after training).
:ivar d: Variance corresponding to the PCA components (eigenvalues of the
covariance matrix).
:ivar explained_variance: When output_dim has been specified as a fraction
of the total variance, this is the fraction of the total variance that is
actually explained.
|
.. admonition:: Reference
More information about Principal Component Analysis, a.k.a. discrete
Karhunen-Loeve transform can be found among others in
I.T. Jolliffe, Principal Component Analysis, Springer-Verlag (1986).
"""
def __init__(self, input_dim=None, output_dim=None, dtype=None,
svd=False, reduce=False, var_rel=1E-12, var_abs=1E-15,
var_part=None):
"""Initializes an object of type 'PCANode'.
The number of principal components to be kept can be specified as
'output_dim' directly (e.g. 'output_dim=10' means 10 components
are kept) or by the fraction of variance to be explained
(e.g. 'output_dim=0.95' means that as many components as necessary
will be kept in order to explain 95% of the input variance).
:param input_dim: Dimensionality of the input.
Default is None.
:type input_dim: int
:param output_dim: Dimensionality of the output.
Default is None.
:type output_dim: int
:param dtype: Datatype of the input.
Default is None.
:type dtype: numpy.dtype, str
:param svd: If True use Singular Value Decomposition instead of the
standard eigenvalue problem solver. Use it when PCANode
complains about singular covariance matrices.
Default is Flase.
:type svd: bool
:param reduce: Keep only those principal components which have a variance
larger than 'var_abs' and a variance relative to the
first principal component larger than 'var_rel' and a
variance relative to total variance larger than 'var_part'
(set var_part to None or 0 for no filtering).
Default is False.
:type reduce: bool
.. note::
When the *reduce* switch is enabled, the actual number
of principal components (self.output_dim) may be different
from that set when creating the instance.
:param var_rel: Variance relative to first principal component threshold.
Default is 1E-12.
:type var_rel: float
:param var_abs: Absolute variance threshold.
Default is 1E-15.
:type var_abs: float
:param var_part: Variance relative to total variance threshold.
Default is None.
:type var_part: float
"""
# this must occur *before* calling super!
self.desired_variance = None
super(PCANode, self).__init__(input_dim, output_dim, dtype)
self.svd = svd
# set routine for eigenproblem
if svd:
self._symeig = nongeneral_svd
else:
self._symeig = symeig
self.var_abs = var_abs
self.var_rel = var_rel
self.var_part = var_part
self.reduce = reduce
# empirical covariance matrix, updated during the training phase
self._cov_mtx = CovarianceMatrix(dtype)
# attributes that defined in stop_training
self.d = None # eigenvalues
self.v = None # eigenvectors, first index for coordinates
self.total_variance = None
self.tlen = None
self.avg = None
self.explained_variance = None
def _set_output_dim(self, n):
if n <= 1 and isinstance(n, float):
# set the output dim after training, when the variances are known
self.desired_variance = n
else:
self._output_dim = n
def _check_output(self, y):
# check output rank
if not y.ndim == 2:
error_str = "y has rank %d, should be 2" % (y.ndim)
raise mdp.NodeException(error_str)
if y.shape[1] == 0 or y.shape[1] > self.output_dim:
error_str = ("y has dimension %d"
", should be 0<y<=%d" % (y.shape[1], self.output_dim))
raise mdp.NodeException(error_str)
def get_explained_variance(self):
"""The explained variance is the fraction of the original variance
that can be explained by self._output_dim PCA components. If for
example output_dim has been set to 0.95, the explained variance could
be something like 0.958...
.. note::
If output_dim was explicitly set to be a fixed number
of components, there is no way to calculate the explained variance.
:return: The explained variance.
:rtype: float
"""
return self.explained_variance
def _train(self, x):
"""Update the covariance matrix.
:param x: The training data.
:type x: numpy.ndarray
"""
self._cov_mtx.update(x)
def _adjust_output_dim(self):
"""This function is used if the output dimensions is smaller than the input
dimension (so only the larger eigenvectors have to be kept). If required it
sets the output dim.
:return: The eigenvector range.
:rtype: tuple
"""
# if the number of principal components to keep is not specified,
# keep all components
if self.desired_variance is None and self.output_dim is None:
self.output_dim = self.input_dim
return None
## define the range of eigenvalues to compute
# if the number of principal components to keep has been
# specified directly
if self.output_dim is not None and self.output_dim >= 1:
# (eigenvalues sorted in ascending order)
return (self.input_dim - self.output_dim + 1,
self.input_dim)
# otherwise, the number of principal components to keep has been
# specified by the fraction of variance to be explained
else:
return None
def _stop_training(self, debug=False):
"""Stop the training phase.
:param debug: Determines if singular matrices itself are stored in
self.cov_mtx and self.dcov_mtx to be examined, given that
stop_training fails because of singular covmatrices.
Default is False.
:type debug: bool
:raises mdp.NodeException: If negative eigenvalues occur,
the covariance matrix may be singular or no component
amounts to variation exceeding var_abs.
"""
# request the covariance matrix and clean up
self.cov_mtx, avg, self.tlen = self._cov_mtx.fix()
del self._cov_mtx
# this is a bit counterintuitive, as it reshapes the average vector to
# be a matrix. in this way, however, we spare the reshape
# operation every time that 'execute' is called.
self.avg = avg.reshape(1, avg.shape[0])
# range for the eigenvalues
rng = self._adjust_output_dim()
# if we have more variables then observations we are bound to fail here
# suggest to use the NIPALSNode instead.
if debug and self.tlen < self.input_dim:
wrn = ('The number of observations (%d) '
'is larger than the number of input variables '
'(%d). You may want to use '
'the NIPALSNode instead.' % (self.tlen, self.input_dim))
_warnings.warn(wrn, mdp.MDPWarning)
# total variance can be computed at this point:
# note that vartot == d.sum()
vartot = numx.diag(self.cov_mtx).sum()
## compute and sort the eigenvalues
# compute the eigenvectors of the covariance matrix (inplace)
# (eigenvalues sorted in ascending order)
try:
d, v = self._symeig(self.cov_mtx, range=rng, overwrite=(not debug))
# if reduce=False and svd=False. we should check for
# negative eigenvalues and fail
if not (self.reduce or self.svd or (self.desired_variance is
not None)):
if d.min() < 0:
raise mdp.NodeException(
"Got negative eigenvalues: %s.\n"
"You may either set output_dim to be smaller, "
"or set reduce=True and/or svd=True" % str(d))
except SymeigException as exception:
err = str(exception)+("\nCovariance matrix may be singular."
"Try setting svd=True.")
raise mdp.NodeException(err)
# delete covariance matrix if no exception occurred
if not debug:
del self.cov_mtx
# sort by descending order
d = numx.take(d, list(range(d.shape[0]-1, -1, -1)))
v = v[:, ::-1]
if self.desired_variance is not None:
# throw away immediately negative eigenvalues
d = d[ d > 0 ]
# the number of principal components to keep has
# been specified by the fraction of variance to be explained
varcum = (old_div(d, vartot)).cumsum(axis=0)
# select only the relevant eigenvalues
# number of relevant eigenvalues
neigval = int(varcum.searchsorted(self.desired_variance) + 1.)
#self.explained_variance = varcum[neigval-1]
# cut
d = d[0:neigval]
v = v[:, 0:neigval]
# define the new output dimension
self.output_dim = int(neigval)
# automatic dimensionality reduction
if self.reduce:
# remove entries that are smaller then var_abs and
# smaller then var_rel relative to the maximum
d = d[ d > self.var_abs ]
# check that we did not throw away everything
if len(d) == 0:
raise mdp.NodeException('No eigenvalues larger than'
' var_abs=%e!'%self.var_abs)
d = d[ old_div(d, d.max()) > self.var_rel ]
# filter for variance relative to total variance
if self.var_part:
d = d[ old_div(d, vartot) > self.var_part ]
v = v[:, 0:d.shape[0]]
self._output_dim = d.shape[0]
# set explained variance
self.explained_variance = old_div(d.sum(), vartot)
# store the eigenvalues
self.d = d
# store the eigenvectors
self.v = v
# store the total variance
self.total_variance = vartot
def get_projmatrix(self, transposed=1):
"""Returns the projection matrix.
:param transposed: Determines whether the transposed projection
matrix is returned.
Default is True.
:type transposed: bool
:return: The projection matrix.
:rtype: numpy.ndarray
"""
self._if_training_stop_training()
if transposed:
return self.v
return self.v.T
def get_recmatrix(self, transposed=1):
"""Returns the the back-projection matrix
(i.e. the reconstruction matrix).
:param transposed: Determines whether the transposed back-projection matrix
(i.e. the reconstruction matrix) is returned.
Default is True.
:type transposed: bool
:return: The back-projection matrix (i.e. the reconstruction matrix).
:rtype: numpy.ndarray
"""
self._if_training_stop_training()
if transposed:
return self.v.T
return self.v
def _execute(self, x, n=None):
"""Project the input on the first 'n' principal components.
If 'n' is not set, use all available components.
:param x: Input with at least 'n' principle components.
:type x: numpy.ndarray
:param n: Number of first principle components.
:type n: int
:return: The projected input.
:rtype: numpy.ndarray
"""
if n is not None:
return mult(x-self.avg, self.v[:, :n])
return mult(x-self.avg, self.v)
def _inverse(self, y, n=None):
"""Project data from the output to the input space using the
first 'n' components.
If 'n' is not set, use all available components.
:param y: Data to be projected to the input space.
:type y: numpy.ndarray
:param n: Number of first principle components.
:type n: int
:return: The projected data
:rtype: numpy.ndarray
"""
if n is None:
n = y.shape[1]
if n > self.output_dim:
error_str = ("y has dimension %d,"
" should be at most %d" % (n, self.output_dim))
raise mdp.NodeException(error_str)
v = self.get_recmatrix()
if n is not None:
return mult(y, v[:n, :]) + self.avg
return mult(y, v) + self.avg
class WhiteningNode(PCANode):
"""*Whiten* the input data by filtering it through the most
significant of its principal components.
All output signals have zero mean, unit variance and are decorrelated.
:ivar avg: Mean of the input data (available after training).
:ivar v: Transpose of the projection matrix (available after training).
:ivar d: Variance corresponding to the PCA components (eigenvalues of
the covariance matrix).
:ivar explained_variance: When output_dim has been specified as a
fraction of the total variance, this is the fraction of the total
variance that is actually explained.
"""
def _stop_training(self, debug=False):
"""Stop the training phase.
:param debug: Determines if singular matrices itself are stored in
self.cov_mtx and self.dcov_mtx to be examined, given that
stop_training fails because of singular covmatrices.
Default is False.
:type debug: bool
"""
super(WhiteningNode, self)._stop_training(debug)
##### whiten the filters
# self.v is now the _whitening_ matrix
self.v = old_div(self.v, numx.sqrt(self.d))
def get_eigenvectors(self):
"""Return the eigenvectors of the covariance matrix.
:return: The eigenvectors of the covariance matrix.
:rtype: numpy.ndarray
"""
self._if_training_stop_training()
return numx.sqrt(self.d)*self.v
def get_recmatrix(self, transposed=1):
"""Returns the the back-projection matrix
(i.e. the reconstruction matrix).
:param transposed: Determines whether the transposed back-projection matrix
(i.e. the reconstruction matrix) is returned.
Default is True.
:type transposed: bool
:return: The back-projection matrix (i.e. the reconstruction matrix).
:rtype: numpy.ndarray
"""
self._if_training_stop_training()
v_inverse = self.v*self.d
if transposed:
return v_inverse.T
return v_inverse | PypiClean |
/DeerLab-1.1.1.tar.gz/DeerLab-1.1.1/deerlab/correctphase.py | import numpy as np
from scipy.optimize import fminbound
def correctphase(V, full_output=False, offset=False):
r"""
Phase correction of complex-valued data.
Rotates the phase of complex-valued data ``V`` to minimize the imaginary component.
Among the two phases that minimize the imaginary part, the one that gives a real
part with a positive average is used.
For two-dimensional datasets ``V2D``, e.g. from measurements with multiple scans,
each slice ``V2D[:,i]`` is phase-rotated independently.
If the ``offset`` parameter is ``True``, the function will correct for a potential
non-zero mean imaginary offset.
Parameters
----------
V : array_like, or list of array_like
Complex-valued 1D or 2D signal.
full_output : boolean, optional
If ``True``, return additional output arguments. (default: ``False``)
offset : boolean
Enables numerical phase correction while accounting for a non-zero mean imaginary offset.
By default disabled.
Returns
-------
Vr : ndarray
Real part of the phase-corrected data.
Vi : ndarray (only if ``full_output==True``)
Imaginary part of the phase-corrected data.
phase : float scalar or ndarray (only if ``full_output==True``)
Fitted phase, or list of phases for 2D data, used for correction, in radians.
"""
if not np.iscomplexobj(V):
raise ValueError("Data set must be complex-valued.")
data1d = V.ndim==1
V_2d = V.copy()
if data1d:
V_2d = V_2d[:, np.newaxis]
if offset:
# Account for a potential imaginary component with non-zero mean
def objfcn(phi):
Vim_corr = np.imag(V_2d*np.exp(1j*phi))
Vim_corr -= np.average(Vim_corr,axis=0)
return np.sum(Vim_corr**2)
# Find one of the minima numerically
phimin = np.atleast_1d(fminbound(objfcn, 0, np.pi, xtol=1.74e-3))
else:
# The follwing determines the phase that minimizes the cost
# function = sum of squares of imaginary part of V*exp(1j*phi)
# This cost function has the analytical form
#
# (A+B) + (B-A)*cos(2*phi) + C*sin(2*phi)
# = offset + amp*cos(2*phi-phi0)
#
# where
# A = sum_k real(V_k)^2 / 2
# B = sum_k imag(V_k)^2 / 2
# C = sum_k real(V_k)*imag(V_k)
#
# offset = A+B
# amp = sqrt((B-A)^2+C^2)
# phi0 = atan2(C, B-A)
#
# The cost function has two minima:
# phi = phi0/2 + pi/2 and phi = phi0/2 + 3*pi/2
# Calculate phase that minimizes cost function
Vr = np.real(V_2d)
Vi = np.imag(V_2d)
A = np.sum(Vr**2, axis=0)/2
B = np.sum(Vi**2, axis=0)/2
C = np.sum(Vr*Vi, axis=0)
phi0 = np.arctan2(C, B-A)
phimin = phi0/2 + np.pi/2 # one of the two minimizers
# Apply phase rotation
V_2d *= np.exp(1j*phimin)[None,:]
# Pick minimizer that yields positive average of real part
reAvg = np.average(V_2d.real, axis=0)
idx = reAvg < 0
phimin[idx] += np.pi
V_2d[:,idx] = -V_2d[:,idx]
# Assemble output
if data1d:
V_2d = np.squeeze(V_2d, axis=1)
Vreal = np.real(V_2d)
Vimag = np.imag(V_2d)
if offset:
Vreal += np.abs(np.average(Vimag, axis=0))
Vimag -= np.average(Vimag, axis=0)
if full_output:
return Vreal, Vimag, phimin
else:
return Vreal | PypiClean |
/LbSoftConfDBMigration-0.0.1.tar.gz/LbSoftConfDBMigration-0.0.1/LbSoftConfDB/py2neo/geoff.py |
# Copyright 2011-2012 Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The :py:mod:`py2neo.geoff` module deals with Geoff data handling.
All Geoff functionality is focused around the :py:class:`Subgraph` class and
requires the Geoff server plugin to be installed (see
`<http://geoff.nigelsmall.net/>`_). A subgraph is a local, abstract
representation of a portion of graph data and may be used to build up a data
structure within a client application before submitting it to a database server
in a single request which can act to reduce the amount of network traffic
carried out.
The following example shows how to build a simple client-side graph and submit
it to the database server for insertion::
>>> from py2neo import geoff
>>> s = geoff.Subgraph({"name": "Alice"}, {"name": "Bob"}, (0, "KNOWS", 1)}
>>> s.insert_into(graph_db)
"""
import json
import re
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
from . import neo4j, rest, util
import logging
logger = logging.getLogger(__name__)
UNKNOWN, NODE, RELATIONSHIP = 0x00, 0x01, 0x02
PATTERNS = {
NODE: re.compile(
r"^\(([0-9A-Za-z_]+)\)$"
),
RELATIONSHIP: re.compile(
r"^\(([0-9A-Za-z_]+)\)-\[([0-9A-Za-z_]*):([^\]]+)\]->\(([0-9A-Za-z_]+)\)$"
),
}
def _parse(string):
"""Convert Geoff string into abstract nodes and relationships.
"""
rules = []
for i, line in enumerate(string.splitlines()):
if not line or line.startswith("#"):
continue
rule = re.split("\s+", line, 1)
try:
if len(rule) > 1:
rule[1] = json.loads(rule[1])
except TypeError:
pass
descriptor = str(rule[0])
data = dict(rule[1]) if len(rule) > 1 else {}
m = PATTERNS[NODE].match(descriptor)
if m:
rules.append((NODE, str(m.group(1)) or None, data))
continue
m = PATTERNS[RELATIONSHIP].match(descriptor)
if m:
rules.append((RELATIONSHIP, str(m.group(2)) or None, (
str(m.group(1)), str(m.group(3)),
str(m.group(4)), data,
)))
continue
rules.append((UNKNOWN, None, (descriptor, data)))
return rules
class Subgraph(object):
"""Local, abstract representation of a graph portion.
"""
def __init__(self, *items):
self._keys = []
self._nodes = {}
self._relationships = {}
self._unknowns = []
self._real_nodes = {}
self._real_relationships = {}
self.add(*items)
def __len__(self):
return len(self._nodes) + len(self._relationships)
def __str__(self):
return self.dumps()
def _add_abstract_node(self, abstract, key=None):
if not key:
key = len(self._nodes)
key = str(key)
self._keys.append((NODE, key))
self._nodes[key] = abstract
return key
def _add_abstract_relationship(self, abstract, key=None):
if not key:
key = len(self._relationships)
key = str(key)
self._keys.append((RELATIONSHIP, key))
self._relationships[key] = abstract
return key
def _add_unknown_abstract(self, abstract):
key = len(self._unknowns)
self._keys.append((UNKNOWN, key))
self._unknowns.append(abstract)
return key
def _merge_real_node(self, node):
uri = str(node._uri)
if uri not in self._real_nodes:
self._real_nodes[uri] = self._add_abstract_node(node.get_properties())
return self._real_nodes[uri]
def _merge_real_relationship(self, relationship):
uri = str(relationship._uri)
if uri not in self._real_relationships:
start_node = self._merge_real_node(relationship.start_node)
end_node = self._merge_real_node(relationship.end_node)
self._real_relationships[uri] = self._add_abstract_relationship((
start_node, relationship.type, end_node,
relationship.get_properties()
))
return self._real_relationships[uri]
@property
def nodes(self):
"""Return all nodes within this Subgraph.
"""
return self._nodes
@property
def relationships(self):
"""Return all relationships within this Subgraph.
"""
return self._relationships
def add(self, *items):
"""Add nodes and relationships into this subgraph.
This method will attempt to take the most appropriate action depending
on the type of data supplied. Supported types are treated according to
the list below:
:py:const:`list`
a sub-list of items; these will be added recursively
:py:const:`str` or :py:const:`unicode`
a textual Geoff rule (e.g. `'(A) {"name": "Alice"}'`)
:py:const:`dict`
an abstract node representation (e.g. `{u'name': u'Alice'}`)
:py:const:`tuple`
an abstract relationship representation (e.g. `(0, 'KNOWS', 1)`);
the start and end node references may be numeric or textual and
should refer to nodes within the same subgraph
:py:class:`py2neo.neo4j.Node`
a concrete node object
:py:class:`py2neo.neo4j.Relationship`
a concrete relationship object
:py:class:`py2neo.neo4j.Path`
a path object; all nodes and relationships will be added
:py:class:`py2neo.geoff.Subgraph`
a subgraph object; all nodes and relationships will be added
"""
for item in items:
if not item:
continue
if isinstance(item, list):
self.add(*item)
elif util.is_string(item):
rules = _parse(item)
for type, key, abstract in rules:
if type == NODE:
self._add_abstract_node(abstract, key)
elif type == RELATIONSHIP:
self._add_abstract_relationship(abstract, key)
else:
self._add_unknown_abstract(abstract)
elif isinstance(item, dict):
self._add_abstract_node(item)
elif isinstance(item, tuple):
self._add_abstract_relationship(item)
elif isinstance(item, neo4j.Node):
self._merge_real_node(item)
elif isinstance(item, neo4j.Relationship):
self._merge_real_relationship(item)
elif isinstance(item, neo4j.Path):
self.add(*item.nodes)
self.add(*item.relationships)
elif isinstance(item, Subgraph):
self.add(*item.nodes)
self.add(*item.relationships)
else:
raise TypeError(item)
def dump(self, file):
"""Dump Geoff rules from this subgraph into a file.
"""
file.write(self.dumps())
def dumps(self):
"""Dump Geoff rules from this subgraph into a string.
"""
rules = []
for type, key in self._keys:
if type == NODE:
abstract = self._nodes[key]
rules.append("({0}) {1}".format(key, json.dumps(abstract)))
elif type == RELATIONSHIP:
abstract = self._relationships[key]
if len(abstract) > 3:
data = json.dumps(abstract[3])
else:
data = "{}"
rules.append("({0})-[{1}:{2}]->({3}) {4}".format(
abstract[0], key, abstract[1], abstract[2], data
))
else:
abstract = self._unknowns[key]
rules.append("{0} {1}".format(abstract[0], json.dumps(abstract[1])))
return "\n".join(rules)
def load(self, file):
"""Load Geoff rules from a file into this subgraph.
"""
self.add(file.read())
def loads(self, str):
"""Load Geoff rules from a string into this subgraph.
"""
self.add(str)
def insert_into(self, graph_db, **params):
"""Insert this subgraph into a graph database via Geoff plugin.
"""
try:
uri = graph_db._extension_uri('GeoffPlugin', 'insert')
except NotImplementedError:
raise NotImplementedError("Geoff plugin not available for insert")
rs = graph_db._send(
rest.Request(graph_db, "POST", uri, {'subgraph': [self.dumps()], 'params': dict(params)}
))
return rs.body['params']
def merge_into(self, graph_db, **params):
"""Merge this subgraph into a graph database via Geoff plugin.
"""
try:
uri = graph_db._extension_uri('GeoffPlugin', 'merge')
except NotImplementedError:
raise NotImplementedError("Geoff plugin not available for merge")
rs = graph_db._send(
rest.Request(graph_db, "POST", uri, {'subgraph': [self.dumps()], 'params': dict(params)}
))
return rs.body['params']
def delete_from(self, graph_db, **params):
"""Delete this subgraph from a graph database via Geoff plugin.
"""
try:
uri = graph_db._extension_uri('GeoffPlugin', 'delete')
except NotImplementedError:
raise NotImplementedError("Geoff plugin not available for delete")
rs = graph_db._send(
rest.Request(graph_db, "POST", uri, {'subgraph': [self.dumps()], 'params': dict(params)}
))
return rs.body['params'] | PypiClean |
/BlueWhale3-3.31.3.tar.gz/BlueWhale3-3.31.3/Orange/widgets/model/owlinearregression.py | from itertools import chain
from AnyQt.QtCore import Qt
from AnyQt.QtWidgets import QLayout, QSizePolicy
from Orange.data import Table, Domain, ContinuousVariable, StringVariable
from Orange.regression.linear import (
LassoRegressionLearner, LinearRegressionLearner,
RidgeRegressionLearner, ElasticNetLearner
)
from Orange.widgets import settings, gui
from Orange.widgets.utils.owlearnerwidget import OWBaseLearner
from Orange.widgets.utils.widgetpreview import WidgetPreview
from Orange.widgets.widget import Output
from Orange.i18n_config import *
def __(key):
return i18n.t("widget.model.model.owlinearregression." + key)
class OWLinearRegression(OWBaseLearner):
name = __("name")
description = __("desc")
icon = "icons/LinearRegression.svg"
replaces = [
"Orange.widgets.regression.owlinearregression.OWLinearRegression",
]
priority = 60
keywords = ["ridge", "lasso", "elastic net"]
LEARNER = LinearRegressionLearner
class Outputs(OWBaseLearner.Outputs):
coefficients = Output("Coefficients", Table, explicit=True,
label=i18n.t("widget.model.model.common.coefficient"))
#: Types
REGULARIZATION_TYPES = [__("btn.no_regular"), __("btn.ridge_regression_l2"),
__("btn.lasso_regression_l1"), __("btn.elastic_net_regression")]
OLS, Ridge, Lasso, Elastic = 0, 1, 2, 3
ridge = settings.Setting(False)
reg_type = settings.Setting(OLS)
alpha_index = settings.Setting(0)
l2_ratio = settings.Setting(0.5)
fit_intercept = settings.Setting(True)
autosend = settings.Setting(True)
alphas = list(chain([x / 10000 for x in range(1, 10)],
[x / 1000 for x in range(1, 20)],
[x / 100 for x in range(2, 20)],
[x / 10 for x in range(2, 9)],
range(1, 20),
range(20, 100, 5),
range(100, 1001, 100)))
def add_main_layout(self):
# this is part of init, pylint: disable=attribute-defined-outside-init
box = gui.hBox(self.controlArea, i18n.t("common.general.parameter"))
gui.checkBox(box, self, "fit_intercept",
__("row_fit_intercept"),
callback=self._intercept_changed)
box = gui.hBox(self.controlArea, i18n.t("common.algorithm.regularization"))
gui.radioButtons(box, self, "reg_type",
btnLabels=self.REGULARIZATION_TYPES,
callback=self._reg_type_changed)
self.alpha_box = box2 = gui.vBox(box, margin=10)
gui.widgetLabel(box2, __("row_regularization_strength"))
gui.hSlider(
box2, self, "alpha_index",
minValue=0, maxValue=len(self.alphas) - 1,
callback=self._alpha_changed, createLabel=False)
box3 = gui.hBox(box2)
box3.layout().setAlignment(Qt.AlignCenter)
self.alpha_label = gui.widgetLabel(box3, "")
self._set_alpha_label()
box4 = gui.vBox(box2, margin=0)
gui.widgetLabel(box4, __("row_elastic_net_mix"))
box5 = gui.hBox(box4)
gui.widgetLabel(box5, "L1")
self.l2_ratio_slider = gui.hSlider(
box5, self, "l2_ratio", minValue=0.01, maxValue=0.99,
intOnly=False, ticks=0.1, createLabel=False, width=120,
step=0.01, callback=self._l2_ratio_changed)
gui.widgetLabel(box5, "L2")
self.l2_ratio_label = gui.widgetLabel(
box4, "",
sizePolicy=(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed))
self.l2_ratio_label.setAlignment(Qt.AlignCenter)
box5 = gui.hBox(self.controlArea)
box5.layout().setAlignment(Qt.AlignCenter)
self._set_l2_ratio_label()
self.layout().setSizeConstraint(QLayout.SetFixedSize)
self.controls.alpha_index.setEnabled(self.reg_type != self.OLS)
self.l2_ratio_slider.setEnabled(self.reg_type == self.Elastic)
def handleNewSignals(self):
self.apply()
def _intercept_changed(self):
self.apply()
def _reg_type_changed(self):
self.controls.alpha_index.setEnabled(self.reg_type != self.OLS)
self.l2_ratio_slider.setEnabled(self.reg_type == self.Elastic)
self.apply()
def _set_alpha_label(self):
self.alpha_label.setText(__("label.alpha").format(self.alphas[self.alpha_index]))
def _alpha_changed(self):
self._set_alpha_label()
self.apply()
def _set_l2_ratio_label(self):
self.l2_ratio_label.setText(
"{:.{}f} : {:.{}f}".format(1 - self.l2_ratio, 2, self.l2_ratio, 2))
def _l2_ratio_changed(self):
self._set_l2_ratio_label()
self.apply()
def create_learner(self):
alpha = self.alphas[self.alpha_index]
preprocessors = self.preprocessors
args = dict(preprocessors=preprocessors,
fit_intercept=self.fit_intercept)
if self.reg_type == OWLinearRegression.OLS:
learner = LinearRegressionLearner(**args)
elif self.reg_type == OWLinearRegression.Ridge:
learner = RidgeRegressionLearner(alpha=alpha, **args)
elif self.reg_type == OWLinearRegression.Lasso:
learner = LassoRegressionLearner(alpha=alpha, **args)
elif self.reg_type == OWLinearRegression.Elastic:
learner = ElasticNetLearner(alpha=alpha,
l1_ratio=1 - self.l2_ratio, **args)
return learner
def update_model(self):
super().update_model()
coef_table = None
if self.model is not None:
domain = Domain(
[ContinuousVariable(i18n.t("common.general.coef"))], metas=[StringVariable(i18n.t("common.general.name"))])
coefs = list(self.model.coefficients)
names = [attr.name for attr in self.model.domain.attributes]
if self.fit_intercept:
coefs.insert(0, self.model.intercept)
names.insert(0, i18n.t("common.general.intercept"))
coef_table = Table.from_list(domain, list(zip(coefs, names)))
coef_table.name = i18n.t("common.general.coefficients")
self.Outputs.coefficients.send(coef_table)
def get_learner_parameters(self):
regularization = __("report.no_regularization")
if self.reg_type == OWLinearRegression.Ridge:
regularization = (__("report.ridge_regression_l2")
.format(self.alphas[self.alpha_index]))
elif self.reg_type == OWLinearRegression.Lasso:
regularization = (__("report.lasso_regression_l1")
.format(self.alphas[self.alpha_index]))
elif self.reg_type == OWLinearRegression.Elastic:
regularization = (__("report.elastic_net_regression")
.format(self.alphas[self.alpha_index],
self.l2_ratio,
1 - self.l2_ratio))
return (
(i18n.t("common.algorithm.regularization"), regularization),
(i18n.t("common.algorithm.fit_intercept"), ["No", "Yes"][self.fit_intercept])
)
if __name__ == "__main__": # pragma: no cover
WidgetPreview(OWLinearRegression).run(Table("housing")) | PypiClean |
/DelegatorBot-1.1.8.tar.gz/DelegatorBot-1.1.8/INSTALLATION.md | # Installation
These instructions are for Ubuntu 16.04 or later. Please use the appropriate commands for your system.
### Install MySQL
DelegatorBot Uses MySQL. In short, to install execute these commands.
```
sudo apt-get update
sudo apt-get install mysql-server
mysql_secure_installation
```
For a more detailed set of instructions please read [How To Install MySQL on Ubuntu 16.04](https://www.digitalocean.com/community/tutorials/how-to-install-mysql-on-ubuntu-16-04)
### Setup a MySQL user and database
Create a user for your MySQL database. You will need to know the username and password for this user in order to configure DelegarorBot.
Then create a database. You will also need to know the name of this database. Running the bot for the first time will create all necessary tables.
```
CREATE USER 'newuser'@'localhost' IDENTIFIED BY 'password';
GRANT ALL PRIVILEGES ON * . * TO 'newuser'@'localhost';
FLUSH PRIVILEGES;
CREATE DATABASE databasename;
```
### Install DelegatorBot
To install Delegator bot:
```
pip3 install delegatorbot
```
Or from source
```
git clone https://github.com/ArtoLabs/DelegatorBot.git
```
### Setup DelegatorBot
After installation it's necessary to configure the settings file. You'll need to navigate to the source files directory, so if you used pip this
should be in your site-packages folder, or the folder you cloned into. There you will find a file named `settings.py`. It is best to copy this
file to a new file with a new name. This name will be used to execute commands. The name of this file can be anything, but it's recommended you
give it the same name as your bot.
### Setting up database tables and SimpleSteem
The first time DelegatorBot runs, it will step through a series of questions meant to configure SimpleSteem, the python module that handles all the connections to the blockchain. DelegatorBot uses it's own custom settings when interacting with SimpleSteem and so these initial questions can safely be left blank. For each question simply hit the enter key and that setting will be automatically configured. This list of questions only appears the very first time you run DelegatorBot.
Similarly, the first time DelegatorBot needs to interact with the database it will create all necessary tables if they don't already exist.
| PypiClean |
/Dovetail-1.0beta2.tar.gz/Dovetail-1.0beta2/dovetail/directives/packages.py | # This class implements functions declared elsewhere and
# cannot control the arguments
# pylint: disable-msg=W0613
from setuptools.command import easy_install
from pkg_resources import working_set, parse_requirements, VersionConflict
from dovetail.model import TaskWrapper
from dovetail.util import Logger, MissingRequirement, pp_exception
def pp_requirements(requirements):
"""Pretty-print a list of requirements.
:param requirements: Requirements in the form "pylint" or "coverage>3"
:type requirements: list of string
:rtype: string
"""
return ", ".join(requirements)
def not_present(requirements, stop_on_error=True):
"""Checks to see which requirements are currently satisfied, returning a list of
those requirements not satisfied.
:param requirements: Requirements in the form "pylint" or "coverage>3"
:type requirements: list of string
:param stop_on_error: Default True; if True, if :exc:`pkg_resources.VersionConflict`
is raised, this is propagated to the caller. Otherwise the
exception is handled.
:type stop_on_error: boolean
:return: A list of unsatisfied requirements - may be empty
:rtype: list of string
:raises: :exc:`pkg_resources.VersionConflict` if a requirement is in conflict with the
environment"""
result = []
for requirement in requirements:
for parsed in parse_requirements(requirement):
try:
match = working_set.find(parsed)
if match is None:
Logger.debug("Missing {0}".format(requirement))
result.append(requirement)
else:
Logger.debug("Requirement {0} is present".format(requirement))
except VersionConflict:
Logger.major("Requirement {0} is in conflict with existing packages - aborting".format(requirement))
if stop_on_error:
raise
else:
result.append(requirement)
return result
def install(requirements):
"""Uses :mod:`setuptools.commands.easy_install` to install a series of
package requirements and adjust the system path so they are
immediately available.
:param requirements: Requirement specifications as per :program:`easy_install`,
eg: "pylint" and "coverage>3"
:type requirements: *Either* a string *or* a list of string
:raises: :exc:`dovetail.util.MissingRequirement` if :program:`easy_install`
cannot locate or install the requirement
.. note::
If you need to set specific :program:`easy_install` behaviour, such as
loading from a local host, then modify the :program:`easy_install`
configuration as described in:
* http://packages.python.org/distribute/easy_install.html#configuration-files
"""
if isinstance(requirements, basestring):
# If a single string argument, make it into a list of strings
requirements = [ requirements ]
from pkg_resources import require
Logger.major("Installing requirements: {0}".format(" ".join(requirements)))
for requirement in requirements:
Logger.major(" Running: easy_install {0}".format(requirement))
try:
easy_install.main( [requirement] )
except BaseException as exception:
raise MissingRequirement("easy_install could not locate or install {0}: {1}".format(requirement, pp_exception(exception)))
Logger.log(" Ensuring {0} is on the system path".format(requirement))
require(requirement)
def requires(*requirements):
"""Ensures all package requirements are installed before executing Task.
:param requirements: One or more requirement specifications as per
:program:`easy_install`, eg: "pylint" and "coverage>3"
:type requirements: string
:raises: :exc:`dovetail.util.exception.MissingRequirement` if
:program:`easy_install` cannot locate or install the requirement
:raises: :exc:`pkg_resources.VersionConflict` if a requirement is in
conflict with the current environment.=
.. note::
If you need to set specific :program:`easy_install` behaviour, such as
loading from a local host, then modify the :program:`easy_install`
configuration as described in:
* http://packages.python.org/distribute/easy_install.html#configuration-files
"""
#noinspection PyUnusedLocal
def before(execution):
missing = not_present(requirements)
if missing:
Logger.major("@requires: Going to attempt to install the following requirements: {0}".format(pp_requirements(missing)))
install(missing)
else:
Logger.log("@requires: Requirements met: {0}".format(pp_requirements(requirements)))
return TaskWrapper.decorator_maker("@requires", before=before)
class Installed(object):
"""A predicate that returns True if all requirements are met in the Python
environment.
:param requirement: A requirement specifications as per
:program:`easy_install`, eg: "pylint" and "coverage>3"
:type requirement: string
:param requirements: Additional requirements (optional)
:type requirements: string
:return: True if all specified requirements are satisfied
:rtype: boolean
"""
def __init__(self, requirement, *requirements):
if requirements is None or len(requirements) == 0:
self.requirements = [ requirement ]
else:
self.requirements = list(requirements)
self.requirements.insert(0, requirement)
def __call__(self):
missing = not_present(self.requirements, stop_on_error=False)
if len(missing) > 0:
Logger.log("Installed: missing requirements: {0}".format(pp_requirements(missing)))
return False
else:
Logger.debug("Installed: all requirements met: {0}".format(pp_requirements(self.requirements)))
return True
def __str__(self):
return "Installed({0})".format(pp_requirements(self.requirements)) | PypiClean |
/Brian2-2.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/brian2/units/fundamentalunits.py | import collections
import itertools
import numbers
import operator
import sys
from typing import Callable
from warnings import warn
import numpy as np
from numpy import VisibleDeprecationWarning
from sympy import latex
__all__ = [
"DimensionMismatchError",
"get_or_create_dimension",
"get_dimensions",
"is_dimensionless",
"have_same_dimensions",
"in_unit",
"in_best_unit",
"Quantity",
"Unit",
"register_new_unit",
"check_units",
"is_scalar_type",
"get_unit",
]
unit_checking = True
def _flatten(iterable):
"""
Flatten a given list `iterable`.
"""
for e in iterable:
if isinstance(e, list):
yield from _flatten(e)
else:
yield e
def _short_str(arr):
"""
Return a short string representation of an array, suitable for use in
error messages.
"""
arr = np.asanyarray(arr)
old_printoptions = np.get_printoptions()
np.set_printoptions(edgeitems=2, threshold=5)
arr_string = str(arr)
np.set_printoptions(**old_printoptions)
return arr_string
# ===============================================================================
# Numpy ufuncs
# ===============================================================================
# Note: A list of numpy ufuncs can be found here:
# http://docs.scipy.org/doc/numpy/reference/ufuncs.html#available-ufuncs
#: ufuncs that work on all dimensions and preserve the dimensions, e.g. abs
UFUNCS_PRESERVE_DIMENSIONS = [
"absolute",
"rint",
"negative",
"conj",
"conjugate",
"floor",
"ceil",
"trunc",
]
#: ufuncs that work on all dimensions but change the dimensions, e.g. square
UFUNCS_CHANGE_DIMENSIONS = [
"multiply",
"divide",
"true_divide",
"floor_divide",
"sqrt",
"square",
"reciprocal",
"dot",
"matmul",
]
#: ufuncs that work with matching dimensions, e.g. add
UFUNCS_MATCHING_DIMENSIONS = [
"add",
"subtract",
"maximum",
"minimum",
"remainder",
"mod",
"fmod",
]
#: ufuncs that compare values, i.e. work only with matching dimensions but do
#: not result in a value with dimensions, e.g. equals
UFUNCS_COMPARISONS = [
"less",
"less_equal",
"greater",
"greater_equal",
"equal",
"not_equal",
]
#: Logical operations that work on all quantities and return boolean arrays
UFUNCS_LOGICAL = [
"logical_and",
"logical_or",
"logical_xor",
"logical_not",
"isreal",
"iscomplex",
"isfinite",
"isinf",
"isnan",
]
#: ufuncs that only work on dimensionless quantities
UFUNCS_DIMENSIONLESS = [
"sin",
"sinh",
"arcsin",
"arcsinh",
"cos",
"cosh",
"arccos",
"arccosh",
"tan",
"tanh",
"arctan",
"arctanh",
"log",
"log2",
"log10",
"log1p",
"exp",
"exp2",
"expm1",
]
#: ufuncs that only work on two dimensionless quantities
UFUNCS_DIMENSIONLESS_TWOARGS = ["logaddexp", "logaddexp2", "arctan2", "hypot"]
#: ufuncs that only work on integers and therefore never on quantities
UFUNCS_INTEGERS = [
"bitwise_and",
"bitwise_or",
"bitwise_xor",
"invert",
"left_shift",
"right_shift",
]
# ==============================================================================
# Utility functions
# ==============================================================================
def fail_for_dimension_mismatch(
obj1, obj2=None, error_message=None, **error_quantities
):
"""
Compare the dimensions of two objects.
Parameters
----------
obj1, obj2 : {array-like, `Quantity`}
The object to compare. If `obj2` is ``None``, assume it to be
dimensionless
error_message : str, optional
An error message that is used in the DimensionMismatchError
error_quantities : dict mapping str to `Quantity`, optional
Quantities in this dictionary will be converted using the `_short_str`
helper method and inserted into the ``error_message`` (which should
have placeholders with the corresponding names). The reason for doing
this in a somewhat complicated way instead of directly including all the
details in ``error_messsage`` is that converting large quantity arrays
to strings can be rather costly and we don't want to do it if no error
occured.
Returns
-------
dim1, dim2 : `Dimension`, `Dimension`
The physical dimensions of the two arguments (so that later code does
not need to get the dimensions again).
Raises
------
DimensionMismatchError
If the dimensions of `obj1` and `obj2` do not match (or, if `obj2` is
``None``, in case `obj1` is not dimensionsless).
Notes
-----
Implements special checking for ``0``, treating it as having "any
dimensions".
"""
if not unit_checking:
return None, None
dim1 = get_dimensions(obj1)
if obj2 is None:
dim2 = DIMENSIONLESS
else:
dim2 = get_dimensions(obj2)
if dim1 is not dim2 and not (dim1 is None or dim2 is None):
# Special treatment for "0":
# if it is not a Quantity, it has "any dimension".
# This allows expressions like 3*mV + 0 to pass (useful in cases where
# zero is treated as the neutral element, e.g. in the Python sum
# builtin) or comparisons like 3 * mV == 0 to return False instead of
# failing # with a DimensionMismatchError. Note that 3*mV == 0*second
# is not allowed, though.
if (dim1 is DIMENSIONLESS and np.all(obj1 == 0)) or (
dim2 is DIMENSIONLESS and np.all(obj2 == 0)
):
return dim1, dim2
# We do another check here, this should allow Brian1 units to pass as
# having the same dimensions as a Brian2 unit
if dim1 == dim2:
return dim1, dim2
if error_message is None:
error_message = "Dimension mismatch"
else:
error_quantities = {
name: _short_str(q) for name, q in error_quantities.items()
}
error_message = error_message.format(**error_quantities)
# If we are comparing an object to a specific unit, we don't want to
# restate this unit (it is probably mentioned in the text already)
if obj2 is None or isinstance(obj2, (Dimension, Unit)):
raise DimensionMismatchError(error_message, dim1)
else:
raise DimensionMismatchError(error_message, dim1, dim2)
else:
return dim1, dim2
def wrap_function_dimensionless(func):
"""
Returns a new function that wraps the given function `func` so that it
raises a DimensionMismatchError if the function is called on a quantity
with dimensions (excluding dimensionless quantities). Quantities are
transformed to unitless numpy arrays before calling `func`.
These checks/transformations apply only to the very first argument, all
other arguments are ignored/untouched.
"""
def f(x, *args, **kwds): # pylint: disable=C0111
fail_for_dimension_mismatch(
x,
error_message=(
"%s expects a dimensionless argument but got {value}" % func.__name__
),
value=x,
)
return func(np.array(x, copy=False), *args, **kwds)
f._arg_units = [1]
f._return_unit = 1
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f._do_not_run_doctests = True
return f
def wrap_function_keep_dimensions(func):
"""
Returns a new function that wraps the given function `func` so that it
keeps the dimensions of its input. Quantities are transformed to
unitless numpy arrays before calling `func`, the output is a quantity
with the original dimensions re-attached.
These transformations apply only to the very first argument, all
other arguments are ignored/untouched, allowing to work functions like
``sum`` to work as expected with additional ``axis`` etc. arguments.
"""
def f(x, *args, **kwds): # pylint: disable=C0111
return Quantity(func(np.array(x, copy=False), *args, **kwds), dim=x.dim)
f._arg_units = [None]
f._return_unit = lambda u: u
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f._do_not_run_doctests = True
return f
def wrap_function_change_dimensions(func, change_dim_func):
"""
Returns a new function that wraps the given function `func` so that it
changes the dimensions of its input. Quantities are transformed to
unitless numpy arrays before calling `func`, the output is a quantity
with the original dimensions passed through the function
`change_dim_func`. A typical use would be a ``sqrt`` function that uses
``lambda d: d ** 0.5`` as ``change_dim_func``.
These transformations apply only to the very first argument, all
other arguments are ignored/untouched.
"""
def f(x, *args, **kwds): # pylint: disable=C0111
ar = np.array(x, copy=False)
return Quantity(func(ar, *args, **kwds), dim=change_dim_func(ar, x.dim))
f._arg_units = [None]
f._return_unit = change_dim_func
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f._do_not_run_doctests = True
return f
def wrap_function_remove_dimensions(func):
"""
Returns a new function that wraps the given function `func` so that it
removes any dimensions from its input. Useful for functions that are
returning integers (indices) or booleans, irrespective of the datatype
contained in the array.
These transformations apply only to the very first argument, all
other arguments are ignored/untouched.
"""
def f(x, *args, **kwds): # pylint: disable=C0111
return func(np.array(x, copy=False), *args, **kwds)
f._arg_units = [None]
f._return_unit = 1
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f._do_not_run_doctests = True
return f
# SI dimensions (see table at the top of the file) and various descriptions,
# each description maps to an index i, and the power of each dimension
# is stored in the variable dims[i]
_di = {
"Length": 0,
"length": 0,
"metre": 0,
"metres": 0,
"meter": 0,
"meters": 0,
"m": 0,
"Mass": 1,
"mass": 1,
"kilogram": 1,
"kilograms": 1,
"kg": 1,
"Time": 2,
"time": 2,
"second": 2,
"seconds": 2,
"s": 2,
"Electric Current": 3,
"electric current": 3,
"Current": 3,
"current": 3,
"ampere": 3,
"amperes": 3,
"A": 3,
"Temperature": 4,
"temperature": 4,
"kelvin": 4,
"kelvins": 4,
"K": 4,
"Quantity of Substance": 5,
"Quantity of substance": 5,
"quantity of substance": 5,
"Substance": 5,
"substance": 5,
"mole": 5,
"moles": 5,
"mol": 5,
"Luminosity": 6,
"luminosity": 6,
"candle": 6,
"candles": 6,
"cd": 6,
}
_ilabel = ["m", "kg", "s", "A", "K", "mol", "cd"]
# The same labels with the names used for constructing them in Python code
_iclass_label = ["metre", "kilogram", "second", "amp", "kelvin", "mole", "candle"]
# SI unit _prefixes as integer exponents of 10, see table at end of file.
_siprefixes = {
"y": -24,
"z": -21,
"a": -18,
"f": -15,
"p": -12,
"n": -9,
"u": -6,
"m": -3,
"c": -2,
"d": -1,
"": 0,
"da": 1,
"h": 2,
"k": 3,
"M": 6,
"G": 9,
"T": 12,
"P": 15,
"E": 18,
"Z": 21,
"Y": 24,
}
class Dimension:
"""
Stores the indices of the 7 basic SI unit dimension (length, mass, etc.).
Provides a subset of arithmetic operations appropriate to dimensions:
multiplication, division and powers, and equality testing.
Parameters
----------
dims : sequence of `float`
The dimension indices of the 7 basic SI unit dimensions.
Notes
-----
Users shouldn't use this class directly, it is used internally in Quantity
and Unit. Even internally, never use ``Dimension(...)`` to create a new
instance, use `get_or_create_dimension` instead. This function makes
sure that only one Dimension instance exists for every combination of
indices, allowing for a very fast dimensionality check with ``is``.
"""
__slots__ = ["_dims"]
__array_priority__ = 1000
#### INITIALISATION ####
def __init__(self, dims):
self._dims = dims
#### METHODS ####
def get_dimension(self, d):
"""
Return a specific dimension.
Parameters
----------
d : `str`
A string identifying the SI basic unit dimension. Can be either a
description like "length" or a basic unit like "m" or "metre".
Returns
-------
dim : `float`
The dimensionality of the dimension `d`.
"""
return self._dims[_di[d]]
@property
def is_dimensionless(self):
"""
Whether this Dimension is dimensionless.
Notes
-----
Normally, instead one should check dimension for being identical to
`DIMENSIONLESS`.
"""
return all([x == 0 for x in self._dims])
@property
def dim(self):
"""
Returns the `Dimension` object itself. This can be useful, because it
allows to check for the dimension of an object by checking its ``dim``
attribute -- this will return a `Dimension` object for `Quantity`,
`Unit` and `Dimension`.
"""
return self
#### REPRESENTATION ####
def _str_representation(self, python_code=False):
"""
String representation in basic SI units, or ``"1"`` for dimensionless.
Use ``python_code=False`` for display purposes and ``True`` for valid
Python code.
"""
if python_code:
power_operator = " ** "
else:
power_operator = "^"
parts = []
for i in range(len(self._dims)):
if self._dims[i]:
if python_code:
s = _iclass_label[i]
else:
s = _ilabel[i]
if self._dims[i] != 1:
s += power_operator + str(self._dims[i])
parts.append(s)
if python_code:
s = " * ".join(parts)
if not len(s):
return f"{self.__class__.__name__}()"
else:
s = " ".join(parts)
if not len(s):
return "1"
return s.strip()
def _latex(self, *args):
parts = []
for i in range(len(self._dims)):
if self._dims[i]:
s = _ilabel[i]
if self._dims[i] != 1:
s += "^{%s}" % str(self._dims[i])
parts.append(s)
s = "\\,".join(parts)
if not len(s):
return "1"
return s.strip()
def _repr_latex(self):
return f"${latex(self)}$"
def __repr__(self):
return self._str_representation(python_code=True)
def __str__(self):
return self._str_representation(python_code=False)
#### ARITHMETIC ####
# Note that none of the dimension arithmetic objects do sanity checking
# on their inputs, although most will throw an exception if you pass the
# wrong sort of input
def __mul__(self, value):
return get_or_create_dimension([x + y for x, y in zip(self._dims, value._dims)])
def __div__(self, value):
return get_or_create_dimension([x - y for x, y in zip(self._dims, value._dims)])
def __truediv__(self, value):
return self.__div__(value)
def __pow__(self, value):
value = np.array(value, copy=False)
if value.size > 1:
raise TypeError("Too many exponents")
return get_or_create_dimension([x * value for x in self._dims])
def __imul__(self, value):
raise TypeError("Dimension object is immutable")
def __idiv__(self, value):
raise TypeError("Dimension object is immutable")
def __itruediv__(self, value):
raise TypeError("Dimension object is immutable")
def __ipow__(self, value):
raise TypeError("Dimension object is immutable")
#### COMPARISON ####
def __eq__(self, value):
try:
return np.allclose(self._dims, value._dims)
except AttributeError:
# Only compare equal to another Dimensions object
return False
def __ne__(self, value):
return not self.__eq__(value)
def __hash__(self):
return hash(self._dims)
#### MAKE DIMENSION PICKABLE ####
def __getstate__(self):
return self._dims
def __setstate__(self, state):
self._dims = state
def __reduce__(self):
# Make sure that unpickling Dimension objects does not bypass the singleton system
return (get_or_create_dimension, (self._dims,))
### Dimension objects are singletons and deepcopy is therefore not necessary
def __deepcopy__(self, memodict):
return self
#: The singleton object for dimensionless Dimensions.
DIMENSIONLESS = Dimension((0, 0, 0, 0, 0, 0, 0))
_dimensions = {(0, 0, 0, 0, 0, 0, 0): DIMENSIONLESS}
def get_or_create_dimension(*args, **kwds):
"""
Create a new Dimension object or get a reference to an existing one.
This function takes care of only creating new objects if they were not
created before and otherwise returning a reference to an existing object.
This allows to compare dimensions very efficiently using ``is``.
Parameters
----------
args : sequence of `float`
A sequence with the indices of the 7 elements of an SI dimension.
kwds : keyword arguments
a sequence of ``keyword=value`` pairs where the keywords are the names of
the SI dimensions, or the standard unit.
Examples
--------
The following are all definitions of the dimensions of force
>>> from brian2 import *
>>> get_or_create_dimension(length=1, mass=1, time=-2)
metre * kilogram * second ** -2
>>> get_or_create_dimension(m=1, kg=1, s=-2)
metre * kilogram * second ** -2
>>> get_or_create_dimension([1, 1, -2, 0, 0, 0, 0])
metre * kilogram * second ** -2
Notes
-----
The 7 units are (in order):
Length, Mass, Time, Electric Current, Temperature,
Quantity of Substance, Luminosity
and can be referred to either by these names or their SI unit names,
e.g. length, metre, and m all refer to the same thing here.
"""
if len(args):
# initialisation by list
dims = args[0]
try:
if len(dims) != 7:
raise TypeError()
except TypeError:
raise TypeError("Need a sequence of exactly 7 items")
else:
# initialisation by keywords
dims = [0, 0, 0, 0, 0, 0, 0]
for k in kwds:
# _di stores the index of the dimension with name 'k'
dims[_di[k]] = kwds[k]
dims = tuple(dims)
# check whether this Dimension object has already been created
if dims in _dimensions:
return _dimensions[dims]
else:
new_dim = Dimension(dims)
_dimensions[dims] = new_dim
return new_dim
class DimensionMismatchError(Exception):
"""
Exception class for attempted operations with inconsistent dimensions.
For example, ``3*mvolt + 2*amp`` raises this exception. The purpose of this
class is to help catch errors based on incorrect units. The exception will
print a representation of the dimensions of the two inconsistent objects
that were operated on.
Parameters
----------
description : ``str``
A description of the type of operation being performed, e.g. Addition,
Multiplication, etc.
dims : `Dimension`
The physical dimensions of the objects involved in the operation, any
number of them is possible
"""
def __init__(self, description, *dims):
# Call the base class constructor to make Exception pickable, see:
# http://bugs.python.org/issue1692335
Exception.__init__(self, description, *dims)
self.dims = dims
self.desc = description
def __repr__(self):
dims_repr = [repr(dim) for dim in self.dims]
return f"{self.__class__.__name__}({self.desc!r}, {', '.join(dims_repr)})"
def __str__(self):
s = self.desc
if len(self.dims) == 0:
pass
elif len(self.dims) == 1:
s += f" (unit is {get_unit_for_display(self.dims[0])}"
elif len(self.dims) == 2:
d1, d2 = self.dims
s += (
f" (units are {get_unit_for_display(d1)} and {get_unit_for_display(d2)}"
)
else:
s += (
" (units are"
f" {' '.join([f'({get_unit_for_display(d)})' for d in self.dims])}"
)
if len(self.dims):
s += ")."
return s
def is_scalar_type(obj):
"""
Tells you if the object is a 1d number type.
Parameters
----------
obj : `object`
The object to check.
Returns
-------
scalar : `bool`
``True`` if `obj` is a scalar that can be interpreted as a
dimensionless `Quantity`.
"""
try:
return obj.ndim == 0 and is_dimensionless(obj)
except AttributeError:
return np.isscalar(obj) and not isinstance(obj, str)
def get_dimensions(obj):
"""
Return the dimensions of any object that has them.
Slightly more general than `Quantity.dimensions` because it will
return `DIMENSIONLESS` if the object is of number type but not a `Quantity`
(e.g. a `float` or `int`).
Parameters
----------
obj : `object`
The object to check.
Returns
-------
dim : `Dimension`
The physical dimensions of the `obj`.
"""
try:
return obj.dim
except AttributeError:
# The following is not very pretty, but it will avoid the costly
# isinstance check for the common types
if type(obj) in [
int,
float,
np.int32,
np.int64,
np.float32,
np.float64,
np.ndarray,
] or isinstance(obj, (numbers.Number, np.number, np.ndarray)):
return DIMENSIONLESS
try:
return Quantity(obj).dim
except TypeError:
raise TypeError(f"Object of type {type(obj)} does not have dimensions")
def is_dimensionless(obj):
"""
Test if a value is dimensionless or not.
Parameters
----------
obj : `object`
The object to check.
Returns
-------
dimensionless : `bool`
``True`` if `obj` is dimensionless.
"""
return get_dimensions(obj) is DIMENSIONLESS
def have_same_dimensions(obj1, obj2):
"""Test if two values have the same dimensions.
Parameters
----------
obj1, obj2 : {`Quantity`, array-like, number}
The values of which to compare the dimensions.
Returns
-------
same : `bool`
``True`` if `obj1` and `obj2` have the same dimensions.
"""
if not unit_checking:
return True # ignore units when unit checking is disabled
# If dimensions are consistently created using get_or_create_dimensions,
# the fast "is" comparison should always return the correct result.
# To be safe, we also do an equals comparison in case it fails. This
# should only add a small amount of unnecessary computation for cases in
# which this function returns False which very likely leads to a
# DimensionMismatchError anyway.
dim1 = get_dimensions(obj1)
dim2 = get_dimensions(obj2)
return (dim1 is dim2) or (dim1 == dim2) or dim1 is None or dim2 is None
def in_unit(x, u, precision=None):
"""
Display a value in a certain unit with a given precision.
Parameters
----------
x : {`Quantity`, array-like, number}
The value to display
u : {`Quantity`, `Unit`}
The unit to display the value `x` in.
precision : `int`, optional
The number of digits of precision (in the given unit, see Examples).
If no value is given, numpy's `get_printoptions` value is used.
Returns
-------
s : `str`
A string representation of `x` in units of `u`.
Examples
--------
>>> from brian2 import *
>>> in_unit(3 * volt, mvolt)
'3000. mV'
>>> in_unit(123123 * msecond, second, 2)
'123.12 s'
>>> in_unit(10 * uA/cm**2, nA/um**2)
'1.00000000e-04 nA/(um^2)'
>>> in_unit(10 * mV, ohm * amp)
'0.01 ohm A'
>>> in_unit(10 * nS, ohm) # doctest: +NORMALIZE_WHITESPACE
... # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
DimensionMismatchError: Non-matching unit for method "in_unit",
dimensions were (m^-2 kg^-1 s^3 A^2) (m^2 kg s^-3 A^-2)
See Also
--------
Quantity.in_unit
"""
if is_dimensionless(x):
fail_for_dimension_mismatch(x, u, 'Non-matching unit for function "in_unit"')
return str(np.array(x / u, copy=False))
else:
return x.in_unit(u, precision=precision)
def in_best_unit(x, precision=None):
"""
Represent the value in the "best" unit.
Parameters
----------
x : {`Quantity`, array-like, number}
The value to display
precision : `int`, optional
The number of digits of precision (in the best unit, see Examples).
If no value is given, numpy's `get_printoptions` value is used.
Returns
-------
representation : `str`
A string representation of this `Quantity`.
Examples
--------
>>> from brian2.units import *
>>> in_best_unit(0.00123456 * volt)
'1.23456 mV'
>>> in_best_unit(0.00123456 * volt, 2)
'1.23 mV'
>>> in_best_unit(0.123456)
'0.123456'
>>> in_best_unit(0.123456, 2)
'0.12'
See Also
--------
Quantity.in_best_unit
"""
if is_dimensionless(x):
if precision is None:
precision = np.get_printoptions()["precision"]
return str(np.round(x, precision))
u = x.get_best_unit()
return x.in_unit(u, precision=precision)
def quantity_with_dimensions(floatval, dims):
"""
Create a new `Quantity` with the given dimensions. Calls
`get_or_create_dimensions` with the dimension tuple of the `dims`
argument to make sure that unpickling (which calls this function) does not
accidentally create new Dimension objects which should instead refer to
existing ones.
Parameters
----------
floatval : `float`
The floating point value of the quantity.
dims : `Dimension`
The physical dimensions of the quantity.
Returns
-------
q : `Quantity`
A quantity with the given dimensions.
Examples
--------
>>> from brian2 import *
>>> quantity_with_dimensions(0.001, volt.dim)
1. * mvolt
See Also
--------
get_or_create_dimensions
"""
return Quantity(floatval, get_or_create_dimension(dims._dims))
class Quantity(np.ndarray):
"""
A number with an associated physical dimension. In most cases, it is not
necessary to create a Quantity object by hand, instead use multiplication
and division of numbers with the constant unit names ``second``,
``kilogram``, etc.
Notes
-----
The `Quantity` class defines arithmetic operations which check for
consistency of dimensions and raise the DimensionMismatchError exception
if they are inconsistent. It also defines default and other representations
for a number for printing purposes.
See the documentation on the Unit class for more details
about the available unit names like mvolt, etc.
*Casting rules*
The rules that define the casting operations for
Quantity object are:
1. Quantity op Quantity = Quantity
Performs dimension checking if appropriate
2. (Scalar or Array) op Quantity = Quantity
Assumes that the scalar or array is dimensionless
There is one exception to the above rule, the number ``0`` is interpreted
as having "any dimension".
Examples
--------
>>> from brian2 import *
>>> I = 3 * amp # I is a Quantity object
>>> R = 2 * ohm # same for R
>>> I * R
6. * volt
>>> (I * R).in_unit(mvolt)
'6000. mV'
>>> (I * R) / mvolt
6000.0
>>> X = I + R # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
DimensionMismatchError: Addition, dimensions were (A) (m^2 kg s^-3 A^-2)
>>> Is = np.array([1, 2, 3]) * amp
>>> Is * R
array([ 2., 4., 6.]) * volt
>>> np.asarray(Is * R) # gets rid of units
array([ 2., 4., 6.])
See also
--------
Unit
Attributes
----------
dimensions
is_dimensionless
dim : Dimensions
The physical dimensions of this quantity.
Methods
-------
with_dimensions
has_same_dimensions
in_unit
in_best_unit
"""
__slots__ = ["dim"]
__array_priority__ = 1000
# ==========================================================================
# Construction and handling of numpy ufuncs
# ==========================================================================
def __new__(cls, arr, dim=None, dtype=None, copy=False, force_quantity=False):
# Do not create dimensionless quantities, use pure numpy arrays instead
if dim is DIMENSIONLESS and not force_quantity:
arr = np.array(arr, dtype=dtype, copy=copy)
if arr.shape == ():
# For scalar values, return a simple Python object instead of
# a numpy scalar
return arr.item()
return arr
# All np.ndarray subclasses need something like this, see
# http://www.scipy.org/Subclasses
subarr = np.array(arr, dtype=dtype, copy=copy).view(cls)
# We only want numerical datatypes
if not np.issubclass_(np.dtype(subarr.dtype).type, (np.number, np.bool_)):
raise TypeError("Quantities can only be created from numerical data.")
# If a dimension is given, force this dimension
if dim is not None:
subarr.dim = dim
return subarr
# Use the given dimension or the dimension of the given array (if any)
try:
subarr.dim = arr.dim
except AttributeError:
if not isinstance(arr, (np.ndarray, np.number, numbers.Number)):
# check whether it is an iterable containing Quantity objects
try:
is_quantity = [isinstance(x, Quantity) for x in _flatten(arr)]
except TypeError:
# Not iterable
is_quantity = [False]
if len(is_quantity) == 0:
# Empty list
subarr.dim = DIMENSIONLESS
elif all(is_quantity):
dims = [x.dim for x in _flatten(arr)]
one_dim = dims[0]
for d in dims:
if d != one_dim:
raise DimensionMismatchError(
"Mixing quantities "
"with different "
"dimensions is not "
"allowed",
d,
one_dim,
)
subarr.dim = dims[0]
elif any(is_quantity):
raise TypeError(
"Mixing quantities and non-quantities is not allowed."
)
return subarr
def __array_finalize__(self, orig):
self.dim = getattr(orig, "dim", DIMENSIONLESS)
def __array_prepare__(self, array, context=None):
if context is None:
return array
uf, args, _ = context
if uf.__name__ in (
UFUNCS_PRESERVE_DIMENSIONS + UFUNCS_CHANGE_DIMENSIONS + UFUNCS_LOGICAL
):
# always allowed
pass
elif uf.__name__ in UFUNCS_INTEGERS:
# Numpy should already raise a TypeError by itself
raise TypeError(f"{uf.__name__} cannot be used on quantities.")
elif uf.__name__ in UFUNCS_MATCHING_DIMENSIONS + UFUNCS_COMPARISONS:
# Ok if dimension of arguments match
fail_for_dimension_mismatch(
args[0],
args[1],
error_message=(
"Cannot calculate {val1} %s {val2}, the units do not match"
)
% uf.__name__,
val1=args[0],
val2=args[1],
)
elif uf.__name__ in UFUNCS_DIMENSIONLESS:
# Ok if argument is dimensionless
fail_for_dimension_mismatch(
args[0],
error_message="%s expects a dimensionless argument but got {value}"
% uf.__name__,
value=args[0],
)
elif uf.__name__ in UFUNCS_DIMENSIONLESS_TWOARGS:
# Ok if both arguments are dimensionless
fail_for_dimension_mismatch(
args[0],
error_message=(
"Both arguments for "
'"%s" should be '
"dimensionless but "
"first argument was "
"{value}"
)
% uf.__name__,
value=args[0],
)
fail_for_dimension_mismatch(
args[1],
error_message=(
"Both arguments for "
'"%s" should be '
"dimensionless but "
"second argument was "
"{value}"
)
% uf.__name__,
value=args[1],
)
elif uf.__name__ == "power":
fail_for_dimension_mismatch(
args[1],
error_message=(
"The exponent for a "
"power operation has to "
"be dimensionless but "
"was {value}"
),
value=args[1],
)
if np.array(args[1], copy=False).size != 1:
raise TypeError(
"Only length-1 arrays can be used as an exponent for quantities."
)
elif uf.__name__ in ("sign", "ones_like"):
return np.array(array, copy=False)
else:
warn(f"Unknown ufunc '{uf.__name__}' in __array_prepare__")
return array
def __array_wrap__(self, array, context=None):
dim = DIMENSIONLESS
if context is not None:
uf, args, _ = context
if uf.__name__ in (UFUNCS_PRESERVE_DIMENSIONS + UFUNCS_MATCHING_DIMENSIONS):
dim = self.dim
elif uf.__name__ in (UFUNCS_DIMENSIONLESS + UFUNCS_DIMENSIONLESS_TWOARGS):
# We should have been arrived here only for dimensionless
# quantities
dim = DIMENSIONLESS
elif uf.__name__ in (
UFUNCS_COMPARISONS + UFUNCS_LOGICAL + ["sign", "ones_like"]
):
# Do not touch the return value (boolean or integer array)
return array
elif uf.__name__ == "sqrt":
dim = self.dim**0.5
elif uf.__name__ == "power":
dim = get_dimensions(args[0]) ** np.array(args[1], copy=False)
elif uf.__name__ == "square":
dim = self.dim**2
elif uf.__name__ in ("divide", "true_divide", "floor_divide"):
dim = get_dimensions(args[0]) / get_dimensions(args[1])
elif uf.__name__ == "reciprocal":
dim = get_dimensions(args[0]) ** -1
elif uf.__name__ in ("multiply", "dot", "matmul"):
dim = get_dimensions(args[0]) * get_dimensions(args[1])
else:
warn(f"Unknown ufunc '{uf.__name__}' in __array_wrap__")
# TODO: Remove units in this case?
# This seems to be better than using type(self) instead of quantity
# This may convert units to Quantities, e.g. np.square(volt) leads to
# a 1 * volt ** 2 quantitiy instead of volt ** 2. But this should
# rarely be an issue. The alternative leads to more confusing
# behaviour: np.float64(3) * mV would result in a dimensionless float64
result = array.view(Quantity)
result.dim = dim
return result
def __deepcopy__(self, memo):
return Quantity(self, copy=True)
# ==============================================================================
# Quantity-specific functions (not existing in ndarray)
# ==============================================================================
@staticmethod
def with_dimensions(value, *args, **keywords):
"""
Create a `Quantity` object with dim.
Parameters
----------
value : {array_like, number}
The value of the dimension
args : {`Dimension`, sequence of float}
Either a single argument (a `Dimension`) or a sequence of 7 values.
kwds
Keywords defining the dim, see `Dimension` for details.
Returns
-------
q : `Quantity`
A `Quantity` object with the given dim
Examples
--------
All of these define an equivalent `Quantity` object:
>>> from brian2 import *
>>> Quantity.with_dimensions(2, get_or_create_dimension(length=1))
2. * metre
>>> Quantity.with_dimensions(2, length=1)
2. * metre
>>> 2 * metre
2. * metre
"""
if len(args) and isinstance(args[0], Dimension):
dimensions = args[0]
else:
dimensions = get_or_create_dimension(*args, **keywords)
return Quantity(value, dim=dimensions)
### ATTRIBUTES ###
is_dimensionless = property(
lambda self: self.dim.is_dimensionless,
doc="Whether this is a dimensionless quantity.",
)
@property
def dimensions(self):
"""
The physical dimensions of this quantity.
"""
return self.dim
@dimensions.setter
def dimensions(self, dim):
self.dim = dim
#### METHODS ####
def has_same_dimensions(self, other):
"""
Return whether this object has the same dimensions as another.
Parameters
----------
other : {`Quantity`, array-like, number}
The object to compare the dimensions against.
Returns
-------
same : `bool`
``True`` if `other` has the same dimensions.
"""
if not unit_checking:
return True # ignore units if unit checking is disabled
other_dim = get_dimensions(other)
return (self.dim is other_dim) or (self.dim == other_dim)
def in_unit(self, u, precision=None, python_code=False):
"""
Represent the quantity in a given unit. If `python_code` is ``True``,
this will return valid python code, i.e. a string like ``5.0 * um ** 2``
instead of ``5.0 um^2``
Parameters
----------
u : {`Quantity`, `Unit`}
The unit in which to show the quantity.
precision : `int`, optional
The number of digits of precision (in the given unit, see Examples).
If no value is given, numpy's `get_printoptions` value is used.
python_code : `bool`, optional
Whether to return valid python code (``True``) or a human readable
string (``False``, the default).
Returns
-------
s : `str`
String representation of the object in unit `u`.
Examples
--------
>>> from brian2.units import *
>>> from brian2.units.stdunits import *
>>> x = 25.123456 * mV
>>> x.in_unit(volt)
'0.02512346 V'
>>> x.in_unit(volt, 3)
'0.025 V'
>>> x.in_unit(mV, 3)
'25.123 mV'
See Also
--------
in_unit
"""
fail_for_dimension_mismatch(self, u, 'Non-matching unit for method "in_unit"')
value = np.array(self / u, copy=False)
# numpy uses the printoptions setting only in arrays, not in array
# scalars, so we use this hackish way of turning the scalar first into
# an array, then removing the square brackets from the output
if value.shape == ():
s = np.array_str(np.array([value]), precision=precision)
s = s.replace("[", "").replace("]", "").strip()
else:
if python_code:
s = np.array_repr(value, precision=precision)
else:
s = np.array_str(value, precision=precision)
if not u.is_dimensionless:
if isinstance(u, Unit):
if python_code:
s += f" * {repr(u)}"
else:
s += f" {str(u)}"
else:
if python_code:
s += f" * {repr(u.dim)}"
else:
s += f" {str(u.dim)}"
elif python_code: # Make a quantity without unit recognisable
return f"{self.__class__.__name__}({s.strip()})"
return s.strip()
def get_best_unit(self, *regs):
"""
Return the best unit for this `Quantity`.
Parameters
----------
regs : any number of `UnitRegistry` objects
The registries that are searched for units. If none are provided, it
will check the standard, user and additional unit registers in turn.
Returns
-------
u : `Quantity` or `Unit`
The best-fitting unit for the quantity `x`.
"""
if self.is_dimensionless:
return Unit(1)
if len(regs):
for r in regs:
try:
return r[self]
except KeyError:
pass
return Quantity(1, self.dim)
else:
return self.get_best_unit(
standard_unit_register, user_unit_register, additional_unit_register
)
def _get_best_unit(self, *regs):
warn(
"Quantity._get_best_unit has been renamed to Quantity.get_best_unit.",
VisibleDeprecationWarning,
)
return self.get_best_unit(*regs)
def in_best_unit(self, precision=None, python_code=False, *regs):
"""
Represent the quantity in the "best" unit.
Parameters
----------
python_code : `bool`, optional
If set to ``False`` (the default), will return a string like
``5.0 um^2`` which is not a valid Python expression. If set to
``True``, it will return ``5.0 * um ** 2`` instead.
precision : `int`, optional
The number of digits of precision (in the best unit, see
Examples). If no value is given, numpy's
`get_printoptions` value is used.
regs : `UnitRegistry` objects
The registries where to search for units. If none are given, the
standard, user-defined and additional registries are searched in
that order.
Returns
-------
representation : `str`
A string representation of this `Quantity`.
Examples
--------
>>> from brian2.units import *
>>> x = 0.00123456 * volt
>>> x.in_best_unit()
'1.23456 mV'
>>> x.in_best_unit(3)
'1.235 mV'
See Also
--------
in_best_unit
"""
u = self.get_best_unit(*regs)
return self.in_unit(u, precision=precision, python_code=python_code)
# ==============================================================================
# Overwritten ndarray methods
# ==============================================================================
#### Setting/getting items ####
def __getitem__(self, key):
"""Overwritten to assure that single elements (i.e., indexed with a
single integer or a tuple of integers) retain their unit.
"""
return Quantity(np.ndarray.__getitem__(self, key), self.dim)
def item(self, *args):
"""Overwritten to assure that the returned element retains its unit."""
return Quantity(np.ndarray.item(self, *args), self.dim)
def __setitem__(self, key, value):
fail_for_dimension_mismatch(self, value, "Inconsistent units in assignment")
return super().__setitem__(key, value)
#### ARITHMETIC ####
def _binary_operation(
self,
other,
operation,
dim_operation=lambda a, b: a,
fail_for_mismatch=False,
operator_str=None,
inplace=False,
):
"""
General implementation for binary operations.
Parameters
----------
other : {`Quantity`, `ndarray`, scalar}
The object with which the operation should be performed.
operation : function of two variables
The function with which the two objects are combined. For example,
`operator.mul` for a multiplication.
dim_operation : function of two variables, optional
The function with which the dimension of the resulting object is
calculated (as a function of the dimensions of the two involved
objects). For example, `operator.mul` for a multiplication. If not
specified, the dimensions of `self` are used for the resulting
object.
fail_for_mismatch : bool, optional
Whether to fail for a dimension mismatch between `self` and `other`
(defaults to ``False``)
operator_str : str, optional
The string to use for the operator in an error message.
inplace: bool, optional
Whether to do the operation in-place (defaults to ``False``).
Notes
-----
For in-place operations on scalar values, a copy of the original object
is returned, i.e. it rather works like a fundamental Python type and
not like a numpy array scalar, preventing weird effects when a reference
to the same value was stored in another variable. See github issue #469.
"""
other_dim = None
if fail_for_mismatch:
if inplace:
message = (
"Cannot calculate ... %s {value}, units do not match" % operator_str
)
_, other_dim = fail_for_dimension_mismatch(
self, other, message, value=other
)
else:
message = (
"Cannot calculate {value1} %s {value2}, units do not match"
% operator_str
)
_, other_dim = fail_for_dimension_mismatch(
self, other, message, value1=self, value2=other
)
if other_dim is None:
other_dim = get_dimensions(other)
if inplace:
if self.shape == ():
self_value = Quantity(self, copy=True)
else:
self_value = self
operation(self_value, other)
self_value.dim = dim_operation(self.dim, other_dim)
return self_value
else:
newdims = dim_operation(self.dim, other_dim)
self_arr = np.array(self, copy=False)
other_arr = np.array(other, copy=False)
result = operation(self_arr, other_arr)
return Quantity(result, newdims)
def __mul__(self, other):
return self._binary_operation(other, operator.mul, operator.mul)
def __rmul__(self, other):
return self.__mul__(other)
def __imul__(self, other):
return self._binary_operation(
other, np.ndarray.__imul__, operator.mul, inplace=True
)
def __div__(self, other):
return self._binary_operation(other, operator.truediv, operator.truediv)
def __truediv__(self, other):
return self.__div__(other)
def __rdiv__(self, other):
# division with swapped arguments
rdiv = lambda a, b: operator.truediv(b, a)
return self._binary_operation(other, rdiv, rdiv)
def __rtruediv__(self, other):
return self.__rdiv__(other)
def __idiv__(self, other):
return self._binary_operation(
other, np.ndarray.__itruediv__, operator.truediv, inplace=True
)
def __itruediv__(self, other):
return self._binary_operation(
other, np.ndarray.__itruediv__, operator.truediv, inplace=True
)
def __mod__(self, other):
return self._binary_operation(
other, operator.mod, fail_for_mismatch=True, operator_str=r"%"
)
def __add__(self, other):
return self._binary_operation(
other, operator.add, fail_for_mismatch=True, operator_str="+"
)
def __radd__(self, other):
return self.__add__(other)
def __iadd__(self, other):
return self._binary_operation(
other,
np.ndarray.__iadd__,
fail_for_mismatch=True,
operator_str="+=",
inplace=True,
)
def __sub__(self, other):
return self._binary_operation(
other, operator.sub, fail_for_mismatch=True, operator_str="-"
)
def __rsub__(self, other):
# We allow operations with 0 even for dimension mismatches, e.g.
# 0 - 3*mV is allowed. In this case, the 0 is not represented by a
# Quantity object so we cannot simply call Quantity.__sub__
if (not isinstance(other, Quantity) or other.dim is DIMENSIONLESS) and np.all(
other == 0
):
return self.__neg__()
else:
return Quantity(other, copy=False, force_quantity=True).__sub__(self)
def __isub__(self, other):
return self._binary_operation(
other,
np.ndarray.__isub__,
fail_for_mismatch=True,
operator_str="-=",
inplace=True,
)
def __pow__(self, other):
if isinstance(other, np.ndarray) or is_scalar_type(other):
fail_for_dimension_mismatch(
other,
error_message=(
"Cannot calculate "
"{base} ** {exponent}, "
"the exponent has to be "
"dimensionless"
),
base=self,
exponent=other,
)
other = np.array(other, copy=False)
return Quantity(np.array(self, copy=False) ** other, self.dim**other)
else:
return NotImplemented
def __rpow__(self, other):
if self.is_dimensionless:
if isinstance(other, np.ndarray) or isinstance(other, np.ndarray):
new_array = np.array(other, copy=False) ** np.array(self, copy=False)
return Quantity(new_array, DIMENSIONLESS)
else:
return NotImplemented
else:
base = _short_str(other)
exponent = _short_str(self)
raise DimensionMismatchError(
f"Cannot calculate {base} ** {exponent}, "
"the exponent has to be dimensionless.",
self.dim,
)
def __ipow__(self, other):
if isinstance(other, np.ndarray) or is_scalar_type(other):
fail_for_dimension_mismatch(
other,
error_message=(
"Cannot calculate "
"... **= {exponent}, "
"the exponent has to be "
"dimensionless"
),
exponent=other,
)
other = np.array(other, copy=False)
super().__ipow__(other)
self.dim = self.dim**other
return self
else:
return NotImplemented
def __neg__(self):
return Quantity(-np.array(self, copy=False), self.dim)
def __pos__(self):
return self
def __abs__(self):
return Quantity(abs(np.array(self, copy=False)), self.dim)
def tolist(self):
"""
Convert the array into a list.
Returns
-------
l : list of `Quantity`
A (possibly nested) list equivalent to the original array.
"""
def replace_with_quantity(seq, dim):
"""
Replace all the elements in the list with an equivalent `Quantity`
with the given `dim`.
"""
# No recursion needed for single values
if not isinstance(seq, list):
return Quantity(seq, dim)
def top_replace(s):
"""
Recursivley descend into the list.
"""
for i in s:
if not isinstance(i, list):
yield Quantity(i, dim)
else:
yield type(i)(top_replace(i))
return type(seq)(top_replace(seq))
return replace_with_quantity(np.array(self, copy=False).tolist(), self.dim)
#### COMPARISONS ####
def _comparison(self, other, operator_str, operation):
is_scalar = is_scalar_type(other)
if not is_scalar and not isinstance(other, np.ndarray):
return NotImplemented
if not is_scalar or not np.isinf(other):
message = (
"Cannot perform comparison {value1} %s {value2}, units do not match"
% operator_str
)
fail_for_dimension_mismatch(self, other, message, value1=self, value2=other)
return operation(np.array(self, copy=False), np.array(other, copy=False))
def __lt__(self, other):
return self._comparison(other, "<", operator.lt)
def __le__(self, other):
return self._comparison(other, "<=", operator.le)
def __gt__(self, other):
return self._comparison(other, ">", operator.gt)
def __ge__(self, other):
return self._comparison(other, ">=", operator.ge)
def __eq__(self, other):
return self._comparison(other, "==", operator.eq)
def __ne__(self, other):
return self._comparison(other, "!=", operator.ne)
#### MAKE QUANTITY PICKABLE ####
def __reduce__(self):
return quantity_with_dimensions, (np.array(self, copy=False), self.dim)
#### REPRESENTATION ####
def __repr__(self):
return self.in_best_unit(python_code=True)
def _latex(self, expr):
"""
Translates a scalar, 1-d or 2-d array into a LaTeX representation. Will be called
by ``sympy``'s `~sympy.latex` function and used as a "rich representation" in e.g.
jupyter notebooks.
The values in the array will be formatted with `numpy.array2string` and will
therefore observe ``numpy``'s "print options" such as ``precision``. Including
all numbers in the LaTeX output will rarely be useful for large arrays; this
function will therefore apply a ``threshold`` value divided by 100 (the default
``threshold`` value is 1000, this function hence applies 10). Note that the
``max_line_width`` print option is ignored.
"""
best_unit = self.get_best_unit()
if isinstance(best_unit, Unit):
best_unit_latex = latex(best_unit)
else: # A quantity
best_unit_latex = latex(best_unit.dimensions)
unitless = np.array(self / best_unit, copy=False)
threshold = np.get_printoptions()["threshold"] // 100
if unitless.ndim == 0:
sympy_quantity = float(unitless)
elif unitless.ndim == 1:
array_str = np.array2string(
unitless,
separator=" & ",
threshold=threshold,
max_line_width=sys.maxsize,
)
# Replace [ and ]
sympy_quantity = (
r"\left[\begin{matrix}"
+ array_str[1:-1].replace("...", r"\dots")
+ r"\end{matrix}\right]"
)
elif unitless.ndim == 2:
array_str = np.array2string(
unitless,
separator=" & ",
threshold=threshold,
max_line_width=sys.maxsize,
)
array_str = array_str[1:-1].replace("...", r"\dots")
array_str = (
array_str.replace("[", "").replace("] &", r"\\").replace("]", "\n")
)
lines = array_str.split("\n")
n_cols = lines[0].count("&") + 1
new_lines = []
for line in lines:
if line.strip() == r"\dots &":
new_lines.append(" & ".join([r"\vdots"] * n_cols) + r"\\")
else:
new_lines.append(line)
sympy_quantity = (
r"\left[\begin{matrix}"
+ "\n"
+ "\n".join(new_lines)
+ r"\end{matrix}\right]"
)
else:
raise NotImplementedError(
f"Cannot create a LaTeX representation for a {unitless.ndim}-d matrix."
)
return f"{sympy_quantity}\\,{best_unit_latex}"
def _repr_latex_(self):
return f"${latex(self)}$"
def __str__(self):
return self.in_best_unit()
def __format__(self, format_spec):
# Avoid that formatted strings like f"{q}" use floating point formatting for the
# quantity, i.e. discard the unit
if format_spec == "":
return str(self)
else:
return super().__format__(format_spec)
#### Mathematic methods ####
cumsum = wrap_function_keep_dimensions(np.ndarray.cumsum)
diagonal = wrap_function_keep_dimensions(np.ndarray.diagonal)
max = wrap_function_keep_dimensions(np.ndarray.max)
mean = wrap_function_keep_dimensions(np.ndarray.mean)
min = wrap_function_keep_dimensions(np.ndarray.min)
ptp = wrap_function_keep_dimensions(np.ndarray.ptp)
# To work around an issue in matplotlib 1.3.1 (see
# https://github.com/matplotlib/matplotlib/pull/2591), we make `ravel`
# return a unitless array and emit a warning explaining the issue.
use_matplotlib_units_fix = False
try:
import matplotlib
if matplotlib.__version__ == "1.3.1":
use_matplotlib_units_fix = True
except ImportError:
pass
if use_matplotlib_units_fix:
def ravel(self, *args, **kwds):
# Note that we don't use Brian's logging system here as we don't want
# the unit system to depend on other parts of Brian
warn(
"As a workaround for a bug in matplotlib 1.3.1, calling "
'"ravel()" on a quantity will return unit-less values. If you '
"get this warning during plotting, consider removing the units "
"before plotting, e.g. by dividing by the unit. If you are "
'explicitly calling "ravel()", consider using "flatten()" '
"instead."
)
return np.array(self, copy=False).ravel(*args, **kwds)
ravel._arg_units = [None]
ravel._return_unit = 1
ravel.__name__ = np.ndarray.ravel.__name__
ravel.__doc__ = np.ndarray.ravel.__doc__
else:
ravel = wrap_function_keep_dimensions(np.ndarray.ravel)
round = wrap_function_keep_dimensions(np.ndarray.round)
std = wrap_function_keep_dimensions(np.ndarray.std)
sum = wrap_function_keep_dimensions(np.ndarray.sum)
trace = wrap_function_keep_dimensions(np.ndarray.trace)
var = wrap_function_change_dimensions(np.ndarray.var, lambda ar, d: d**2)
all = wrap_function_remove_dimensions(np.ndarray.all)
any = wrap_function_remove_dimensions(np.ndarray.any)
nonzero = wrap_function_remove_dimensions(np.ndarray.nonzero)
argmax = wrap_function_remove_dimensions(np.ndarray.argmax)
argmin = wrap_function_remove_dimensions(np.ndarray.argmin)
argsort = wrap_function_remove_dimensions(np.ndarray.argsort)
def fill(self, values): # pylint: disable=C0111
fail_for_dimension_mismatch(self, values, "fill")
super().fill(values)
fill.__doc__ = np.ndarray.fill.__doc__
fill._do_not_run_doctests = True
def put(self, indices, values, *args, **kwds): # pylint: disable=C0111
fail_for_dimension_mismatch(self, values, "fill")
super().put(indices, values, *args, **kwds)
put.__doc__ = np.ndarray.put.__doc__
put._do_not_run_doctests = True
def clip(self, a_min, a_max, *args, **kwds): # pylint: disable=C0111
fail_for_dimension_mismatch(self, a_min, "clip")
fail_for_dimension_mismatch(self, a_max, "clip")
return Quantity(
np.clip(
np.array(self, copy=False),
np.array(a_min, copy=False),
np.array(a_max, copy=False),
*args,
**kwds,
),
self.dim,
)
clip.__doc__ = np.ndarray.clip.__doc__
clip._do_not_run_doctests = True
def dot(self, other, **kwds): # pylint: disable=C0111
return Quantity(
np.array(self).dot(np.array(other), **kwds),
self.dim * get_dimensions(other),
)
dot.__doc__ = np.ndarray.dot.__doc__
dot._do_not_run_doctests = True
def searchsorted(self, v, **kwds): # pylint: disable=C0111
fail_for_dimension_mismatch(self, v, "searchsorted")
return super().searchsorted(np.array(v, copy=False), **kwds)
searchsorted.__doc__ = np.ndarray.searchsorted.__doc__
searchsorted._do_not_run_doctests = True
def prod(self, *args, **kwds): # pylint: disable=C0111
prod_result = super().prod(*args, **kwds)
# Calculating the correct dimensions is not completly trivial (e.g.
# like doing self.dim**self.size) because prod can be called on
# multidimensional arrays along a certain axis.
# Our solution: Use a "dummy matrix" containing a 1 (without units) at
# each entry and sum it, using the same keyword arguments as provided.
# The result gives the exponent for the dimensions.
# This relies on sum and prod having the same arguments, which is true
# now and probably remains like this in the future
dim_exponent = np.ones_like(self).sum(*args, **kwds)
# The result is possibly multidimensional but all entries should be
# identical
if dim_exponent.size > 1:
dim_exponent = dim_exponent[0]
return Quantity(np.array(prod_result, copy=False), self.dim**dim_exponent)
prod.__doc__ = np.ndarray.prod.__doc__
prod._do_not_run_doctests = True
def cumprod(self, *args, **kwds): # pylint: disable=C0111
if not self.is_dimensionless:
raise TypeError(
"cumprod over array elements on quantities "
"with dimensions is not possible."
)
return Quantity(np.array(self, copy=False).cumprod(*args, **kwds))
cumprod.__doc__ = np.ndarray.cumprod.__doc__
cumprod._do_not_run_doctests = True
class Unit(Quantity):
r"""
A physical unit.
Normally, you do not need to worry about the implementation of
units. They are derived from the `Quantity` object with
some additional information (name and string representation).
Basically, a unit is just a number with given dimensions, e.g.
mvolt = 0.001 with the dimensions of voltage. The units module
defines a large number of standard units, and you can also define
your own (see below).
The unit class also keeps track of various things that were used
to define it so as to generate a nice string representation of it.
See below.
When creating scaled units, you can use the following prefixes:
====== ====== ==============
Factor Name Prefix
====== ====== ==============
10^24 yotta Y
10^21 zetta Z
10^18 exa E
10^15 peta P
10^12 tera T
10^9 giga G
10^6 mega M
10^3 kilo k
10^2 hecto h
10^1 deka da
1
10^-1 deci d
10^-2 centi c
10^-3 milli m
10^-6 micro u (\mu in SI)
10^-9 nano n
10^-12 pico p
10^-15 femto f
10^-18 atto a
10^-21 zepto z
10^-24 yocto y
====== ====== ==============
**Defining your own**
It can be useful to define your own units for printing
purposes. So for example, to define the newton metre, you
write
>>> from brian2 import *
>>> from brian2.units.allunits import newton
>>> Nm = newton * metre
You can then do
>>> (1*Nm).in_unit(Nm)
'1. N m'
New "compound units", i.e. units that are composed of other units will be
automatically registered and from then on used for display. For example,
imagine you define total conductance for a membrane, and the total area of
that membrane:
>>> conductance = 10.*nS
>>> area = 20000*um**2
If you now ask for the conductance density, you will get an "ugly" display
in basic SI dimensions, as Brian does not know of a corresponding unit:
>>> conductance/area
0.5 * metre ** -4 * kilogram ** -1 * second ** 3 * amp ** 2
By using an appropriate unit once, it will be registered and from then on
used for display when appropriate:
>>> usiemens/cm**2
usiemens / (cmetre ** 2)
>>> conductance/area # same as before, but now Brian knows about uS/cm^2
50. * usiemens / (cmetre ** 2)
Note that user-defined units cannot override the standard units (`volt`,
`second`, etc.) that are predefined by Brian. For example, the unit
``Nm`` has the dimensions "length²·mass/time²", and therefore the same
dimensions as the standard unit `joule`. The latter will be used for display
purposes:
>>> 3*joule
3. * joule
>>> 3*Nm
3. * joule
"""
__slots__ = ["dim", "scale", "_dispname", "_name", "_latexname", "iscompound"]
__array_priority__ = 100
automatically_register_units = True
#### CONSTRUCTION ####
def __new__(
cls,
arr,
dim=None,
scale=0,
name=None,
dispname=None,
latexname=None,
iscompound=False,
dtype=None,
copy=False,
):
if dim is None:
dim = DIMENSIONLESS
obj = super().__new__(
cls, arr, dim=dim, dtype=dtype, copy=copy, force_quantity=True
)
return obj
def __array_finalize__(self, orig):
self.dim = getattr(orig, "dim", DIMENSIONLESS)
self.scale = getattr(orig, "scale", 0)
self._name = getattr(orig, "_name", "")
self._dispname = getattr(orig, "_dispname", "")
self._latexname = getattr(orig, "_latexname", "")
self.iscompound = getattr(orig, "_iscompound", False)
return self
def __init__(
self,
value,
dim=None,
scale=0,
name=None,
dispname=None,
latexname="",
iscompound=False,
):
if value != 10.0**scale:
raise AssertionError(
f"Unit value has to be 10**scale (scale={scale}, value={value})"
)
if dim is None:
dim = DIMENSIONLESS
self.dim = dim #: The Dimensions of this unit
#: The scale for this unit (as the integer exponent of 10), i.e.
#: a scale of 3 means 10^3, e.g. for a "k" prefix.
self.scale = scale
if name is None:
if dim is DIMENSIONLESS:
name = "Unit(1)"
else:
name = repr(dim)
if dispname is None:
if dim is DIMENSIONLESS:
dispname = "1"
else:
dispname = str(dim)
#: The full name of this unit.
self._name = name
#: The display name of this unit.
self._dispname = dispname
#: A LaTeX expression for the name of this unit.
self._latexname = latexname
#: Whether this unit is a combination of other units.
self.iscompound = iscompound
if Unit.automatically_register_units:
register_new_unit(self)
@staticmethod
def create(dim, name, dispname, latexname=None, scale=0):
"""
Create a new named unit.
Parameters
----------
dim : `Dimension`
The dimensions of the unit.
name : `str`
The full name of the unit, e.g. ``'volt'``
dispname : `str`
The display name, e.g. ``'V'``
latexname : str, optional
The name as a LaTeX expression (math mode is assumed, do not add
$ signs or similar), e.g. ``'\\omega'``. If no `latexname` is
specified, `dispname` will be used.
scale : int, optional
The scale of this unit as an exponent of 10, e.g. -3 for a unit that
is 1/1000 of the base scale. Defaults to 0 (i.e. a base unit).
Returns
-------
u : `Unit`
The new unit.
"""
name = str(name)
dispname = str(dispname)
if latexname is None:
latexname = f"\\mathrm{{{dispname}}}"
u = Unit(
10.0**scale,
dim=dim,
scale=scale,
name=name,
dispname=dispname,
latexname=latexname,
)
return u
@staticmethod
def create_scaled_unit(baseunit, scalefactor):
"""
Create a scaled unit from a base unit.
Parameters
----------
baseunit : `Unit`
The unit of which to create a scaled version, e.g. ``volt``,
``amp``.
scalefactor : `str`
The scaling factor, e.g. ``"m"`` for mvolt, mamp
Returns
-------
u : `Unit`
The new unit.
"""
name = scalefactor + baseunit.name
dispname = scalefactor + baseunit.dispname
scale = _siprefixes[scalefactor] + baseunit.scale
if scalefactor == "u":
scalefactor = r"\mu"
latexname = f"\\mathrm{{{scalefactor}}}{baseunit.latexname}"
u = Unit(
10.0**scale,
dim=baseunit.dim,
name=name,
dispname=dispname,
latexname=latexname,
scale=scale,
)
return u
#### METHODS ####
def set_name(self, name):
"""Sets the name for the unit.
.. deprecated:: 2.1
Create a new unit with `Unit.create` instead.
"""
raise NotImplementedError(
"Setting the name for a unit after"
"its creation is no longer supported, use"
"'Unit.create' to create a new unit."
)
def set_display_name(self, name):
"""Sets the display name for the unit.
.. deprecated:: 2.1
Create a new unit with `Unit.create` instead.
"""
raise NotImplementedError(
"Setting the display name for a unit after"
"its creation is no longer supported, use"
"'Unit.create' to create a new unit."
)
def set_latex_name(self, name):
"""Sets the LaTeX name for the unit.
.. deprecated:: 2.1
Create a new unit with `Unit.create` instead.
"""
raise NotImplementedError(
"Setting the LaTeX name for a unit after"
"its creation is no longer supported, use"
"'Unit.create' to create a new unit."
)
name = property(
fget=lambda self: self._name, fset=set_name, doc="The name of the unit"
)
dispname = property(
fget=lambda self: self._dispname,
fset=set_display_name,
doc="The display name of the unit",
)
latexname = property(
fget=lambda self: self._latexname,
fset=set_latex_name,
doc="The LaTeX name of the unit",
)
#### REPRESENTATION ####
def __repr__(self):
return self.name
def __str__(self):
return self.dispname
def _latex(self, *args):
return self.latexname
def _repr_latex_(self):
return f"${latex(self)}$"
#### ARITHMETIC ####
def __mul__(self, other):
if isinstance(other, Unit):
name = f"{self.name} * {other.name}"
dispname = f"{self.dispname} {other.dispname}"
latexname = f"{self.latexname}\\,{other.latexname}"
scale = self.scale + other.scale
u = Unit(
10.0**scale,
dim=self.dim * other.dim,
name=name,
dispname=dispname,
latexname=latexname,
iscompound=True,
scale=scale,
)
return u
else:
return super().__mul__(other)
def __rmul__(self, other):
return self.__mul__(other)
def __div__(self, other):
if isinstance(other, Unit):
if self.iscompound:
dispname = f"({self.dispname})"
name = f"({self.name})"
else:
dispname = self.dispname
name = self.name
dispname += "/"
name += " / "
if other.iscompound:
dispname += f"({other.dispname})"
name += f"({other.name})"
else:
dispname += other.dispname
name += other.name
latexname = rf"\frac{{{self.latexname}}}{{{other.latexname}}}"
scale = self.scale - other.scale
u = Unit(
10.0**scale,
dim=self.dim / other.dim,
name=name,
dispname=dispname,
latexname=latexname,
scale=scale,
iscompound=True,
)
return u
else:
return super().__div__(other)
def __rdiv__(self, other):
if isinstance(other, Unit):
return other.__div__(self)
else:
try:
if is_dimensionless(other) and other == 1:
return self**-1
except (ValueError, TypeError, DimensionMismatchError):
pass
return super().__rdiv__(other)
def __pow__(self, other):
if is_scalar_type(other):
if self.iscompound:
dispname = f"({self.dispname})"
name = f"({self.name})"
latexname = r"\left(%s\right)" % self.latexname
else:
dispname = self.dispname
name = self.name
latexname = self.latexname
dispname += f"^{str(other)}"
name += f" ** {repr(other)}"
latexname += "^{%s}" % latex(other)
scale = self.scale * other
u = Unit(
10.0**scale,
dim=self.dim**other,
name=name,
dispname=dispname,
latexname=latexname,
scale=scale,
iscompound=True,
) # To avoid issues with units like (second ** -1) ** -1
return u
else:
return super().__pow__(other)
def __iadd__(self, other):
raise TypeError("Units cannot be modified in-place")
def __isub__(self, other):
raise TypeError("Units cannot be modified in-place")
def __imul__(self, other):
raise TypeError("Units cannot be modified in-place")
def __idiv__(self, other):
raise TypeError("Units cannot be modified in-place")
def __itruediv__(self, other):
raise TypeError("Units cannot be modified in-place")
def __ifloordiv__(self, other):
raise TypeError("Units cannot be modified in-place")
def __imod__(self, other):
raise TypeError("Units cannot be modified in-place")
def __ipow__(self, other, modulo=None):
raise TypeError("Units cannot be modified in-place")
def __eq__(self, other):
if isinstance(other, Unit):
return other.dim is self.dim and other.scale == self.scale
else:
return Quantity.__eq__(self, other)
def __neq__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.dim, self.scale))
class UnitRegistry:
"""
Stores known units for printing in best units.
All a user needs to do is to use the `register_new_unit`
function.
Default registries:
The units module defines three registries, the standard units,
user units, and additional units. Finding best units is done
by first checking standard, then user, then additional. New
user units are added by using the `register_new_unit` function.
Standard units includes all the basic non-compound unit names
built in to the module, including volt, amp, etc. Additional
units defines some compound units like newton metre (Nm) etc.
Methods
-------
add
__getitem__
"""
def __init__(self):
self.units = collections.OrderedDict()
self.units_for_dimensions = collections.defaultdict(dict)
def add(self, u):
"""Add a unit to the registry"""
self.units[repr(u)] = u
self.units_for_dimensions[u.dim][float(u)] = u
def __getitem__(self, x):
"""Returns the best unit for quantity x
The algorithm is to consider the value:
m=abs(x/u)
for all matching units u. We select the unit where this ratio is the
closest to 10 (if it is an array with several values, we select the
unit where the deviations from that are the smallest. More precisely,
the unit that minimizes the sum of (log10(m)-1)**2 over all entries).
"""
matching = self.units_for_dimensions.get(x.dim, {})
if len(matching) == 0:
raise KeyError("Unit not found in registry.")
matching_values = np.array(list(matching.keys()), copy=False)
print_opts = np.get_printoptions()
edgeitems, threshold = print_opts["edgeitems"], print_opts["threshold"]
if x.size > threshold:
# Only care about optimizing the units for the values that will
# actually be shown later
# The code looks a bit complex, but should return the same numbers
# that are shown by numpy's string conversion
slices = []
for shape in x.shape:
if shape > 2 * edgeitems:
slices.append((slice(0, edgeitems), slice(-edgeitems, None)))
else:
slices.append((slice(None),))
x_flat = np.hstack(
[x[use_slices].flatten() for use_slices in itertools.product(*slices)]
)
else:
x_flat = np.array(x, copy=False).flatten()
floatreps = np.tile(np.abs(x_flat), (len(matching), 1)).T / matching_values
# ignore zeros, they are well represented in any unit
floatreps[floatreps == 0] = np.nan
if np.all(np.isnan(floatreps)):
return matching[1.0] # all zeros, use the base unit
deviations = np.nansum((np.log10(floatreps) - 1) ** 2, axis=0)
return list(matching.values())[deviations.argmin()]
def register_new_unit(u):
"""Register a new unit for automatic displaying of quantities
Parameters
----------
u : `Unit`
The unit that should be registered.
Examples
--------
>>> from brian2 import *
>>> 2.0*farad/metre**2
2. * metre ** -4 * kilogram ** -1 * second ** 4 * amp ** 2
>>> register_new_unit(pfarad / mmetre**2)
>>> 2.0*farad/metre**2
2000000. * pfarad / (mmetre ** 2)
"""
user_unit_register.add(u)
#: `UnitRegistry` containing all the standard units (metre, kilogram, um2...)
standard_unit_register = UnitRegistry()
#: `UnitRegistry` containing additional units (newton*metre, farad / metre, ...)
additional_unit_register = UnitRegistry()
#: `UnitRegistry` containing all units defined by the user
user_unit_register = UnitRegistry()
def get_unit(d):
"""
Find an unscaled unit (e.g. `volt` but not `mvolt`) for a `Dimension`.
Parameters
----------
d : `Dimension`
The dimension to find a unit for.
Returns
-------
u : `Unit`
A registered unscaled `Unit` for the dimensions ``d``, or a new `Unit`
if no unit was found.
"""
for unit_register in [
standard_unit_register,
user_unit_register,
additional_unit_register,
]:
if 1.0 in unit_register.units_for_dimensions[d]:
return unit_register.units_for_dimensions[d][1.0]
return Unit(1.0, dim=d)
def get_unit_for_display(d):
"""
Return a string representation of an appropriate unscaled unit or ``'1'``
for a dimensionless quantity.
Parameters
----------
d : `Dimension` or int
The dimension to find a unit for.
Returns
-------
s : str
A string representation of the respective unit or the string ``'1'``.
"""
if (isinstance(d, int) and d == 1) or d is DIMENSIONLESS:
return "1"
else:
return str(get_unit(d))
#### DECORATORS
def check_units(**au):
"""Decorator to check units of arguments passed to a function
Examples
--------
>>> from brian2.units import *
>>> @check_units(I=amp, R=ohm, wibble=metre, result=volt)
... def getvoltage(I, R, **k):
... return I*R
You don't have to check the units of every variable in the function, and
you can define what the units should be for variables that aren't
explicitly named in the definition of the function. For example, the code
above checks that the variable wibble should be a length, so writing
>>> getvoltage(1*amp, 1*ohm, wibble=1) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
DimensionMismatchError: Function "getvoltage" variable "wibble" has wrong dimensions, dimensions were (1) (m)
fails, but
>>> getvoltage(1*amp, 1*ohm, wibble=1*metre)
1. * volt
passes. String arguments or ``None`` are not checked
>>> getvoltage(1*amp, 1*ohm, wibble='hello')
1. * volt
By using the special name ``result``, you can check the return value of the
function.
You can also use ``1`` or ``bool`` as a special value to check for a
unitless number or a boolean value, respectively:
>>> @check_units(value=1, absolute=bool, result=bool)
... def is_high(value, absolute=False):
... if absolute:
... return abs(value) >= 5
... else:
... return value >= 5
This will then again raise an error if the argument if not of the expected
type:
>>> is_high(7)
True
>>> is_high(-7, True)
True
>>> is_high(3, 4) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: Function "is_high" expected a boolean value for argument "absolute" but got 4.
If the return unit depends on the unit of an argument, you can also pass
a function that takes the units of all the arguments as its inputs (in the
order specified in the function header):
>>> @check_units(result=lambda d: d**2)
... def square(value):
... return value**2
If several arguments take arbitrary units but they have to be
consistent among each other, you can state the name of another argument as
a string to state that it uses the same unit as that argument.
>>> @check_units(summand_1=None, summand_2='summand_1')
... def multiply_sum(multiplicand, summand_1, summand_2):
... "Calculates multiplicand*(summand_1 + summand_2)"
... return multiplicand*(summand_1 + summand_2)
>>> multiply_sum(3, 4*mV, 5*mV)
27. * mvolt
>>> multiply_sum(3*nA, 4*mV, 5*mV)
27. * pwatt
>>> multiply_sum(3*nA, 4*mV, 5*nA) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
brian2.units.fundamentalunits.DimensionMismatchError: Function 'multiply_sum' expected the same arguments for arguments 'summand_1', 'summand_2', but argument 'summand_1' has unit V, while argument 'summand_2' has unit A.
Raises
------
DimensionMismatchError
In case the input arguments or the return value do not have the
expected dimensions.
TypeError
If an input argument or return value was expected to be a boolean but
is not.
Notes
-----
This decorator will destroy the signature of the original function, and
replace it with the signature ``(*args, **kwds)``. Other decorators will
do the same thing, and this decorator critically needs to know the signature
of the function it is acting on, so it is important that it is the first
decorator to act on a function. It cannot be used in combination with
another decorator that also needs to know the signature of the function.
Note that the ``bool`` type is "strict", i.e. it expects a proper
boolean value and does not accept 0 or 1. This is not the case the other
way round, declaring an argument or return value as "1" *does* allow for a
``True`` or ``False`` value.
"""
def do_check_units(f):
def new_f(*args, **kwds):
newkeyset = kwds.copy()
arg_names = f.__code__.co_varnames[0 : f.__code__.co_argcount]
for n, v in zip(arg_names, args[0 : f.__code__.co_argcount]):
if (
not isinstance(v, (Quantity, str, bool))
and v is not None
and n in au
):
try:
# allow e.g. to pass a Python list of values
v = Quantity(v)
except TypeError:
if have_same_dimensions(au[n], 1):
raise TypeError(
f"Argument {n} is not a unitless value/array."
)
else:
raise TypeError(
f"Argument '{n}' is not a quantity, "
"expected a quantity with dimensions "
f"{au[n]}"
)
newkeyset[n] = v
for k in newkeyset:
# string variables are allowed to pass, the presumption is they
# name another variable. None is also allowed, useful for
# default parameters
if (
k in au
and not isinstance(newkeyset[k], str)
and not newkeyset[k] is None
and not au[k] is None
):
if au[k] == bool:
if not isinstance(newkeyset[k], bool):
value = newkeyset[k]
error_message = (
f"Function '{f.__name__}' "
"expected a boolean value "
f"for argument '{k}' but got "
f"'{value}'"
)
raise TypeError(error_message)
elif isinstance(au[k], str):
if not au[k] in newkeyset:
error_message = (
f"Function '{f.__name__}' "
"expected its argument to have the "
f"same units as argument '{k}', but "
"there is no argument of that name"
)
raise TypeError(error_message)
if not have_same_dimensions(newkeyset[k], newkeyset[au[k]]):
d1 = get_dimensions(newkeyset[k])
d2 = get_dimensions(newkeyset[au[k]])
error_message = (
f"Function '{f.__name__}' expected "
f"the argument '{k}' to have the same "
f"units as argument '{au[k]}', but "
f"argument '{k}' has "
f"unit {get_unit_for_display(d1)}, "
f"while argument '{au[k]}' "
f"has unit {get_unit_for_display(d2)}."
)
raise DimensionMismatchError(error_message)
elif not have_same_dimensions(newkeyset[k], au[k]):
unit = repr(au[k])
value = newkeyset[k]
error_message = (
f"Function '{f.__name__}' "
"expected a quantitity with unit "
f"{unit} for argument '{k}' but got "
f"'{value}'"
)
raise DimensionMismatchError(
error_message, get_dimensions(newkeyset[k])
)
result = f(*args, **kwds)
if "result" in au:
if isinstance(au["result"], Callable) and au["result"] != bool:
expected_result = au["result"](*[get_dimensions(a) for a in args])
else:
expected_result = au["result"]
if au["result"] == bool:
if not isinstance(result, bool):
error_message = (
"The return value of function "
f"'{f.__name__}' was expected to be "
"a boolean value, but was of type "
f"{type(result)}"
)
raise TypeError(error_message)
elif not have_same_dimensions(result, expected_result):
unit = get_unit_for_display(expected_result)
error_message = (
"The return value of function "
f"'{f.__name__}' was expected to have "
f"unit {unit} but was "
f"'{result}'"
)
raise DimensionMismatchError(error_message, get_dimensions(result))
return result
new_f._orig_func = f
new_f.__doc__ = f.__doc__
new_f.__name__ = f.__name__
# store the information in the function, necessary when using the
# function in expressions or equations
if hasattr(f, "_orig_arg_names"):
arg_names = f._orig_arg_names
else:
arg_names = f.__code__.co_varnames[: f.__code__.co_argcount]
new_f._arg_names = arg_names
new_f._arg_units = [au.get(name, None) for name in arg_names]
return_unit = au.get("result", None)
if return_unit is None:
new_f._return_unit = None
else:
new_f._return_unit = return_unit
if return_unit == bool:
new_f._returns_bool = True
else:
new_f._returns_bool = False
new_f._orig_arg_names = arg_names
# copy any annotation attributes
if hasattr(f, "_annotation_attributes"):
for attrname in f._annotation_attributes:
setattr(new_f, attrname, getattr(f, attrname))
new_f._annotation_attributes = getattr(f, "_annotation_attributes", []) + [
"_arg_units",
"_arg_names",
"_return_unit",
"_orig_func",
"_returns_bool",
]
return new_f
return do_check_units | PypiClean |
/DI_engine-0.4.9-py3-none-any.whl/ding/framework/middleware/functional/data_processor.py | import os
from typing import TYPE_CHECKING, Callable, List, Union, Tuple, Dict, Optional
from easydict import EasyDict
from ditk import logging
import torch
from ding.data import Buffer, Dataset, DataLoader, offline_data_save_type
from ding.data.buffer.middleware import PriorityExperienceReplay
from ding.framework import task
from ding.utils import get_rank
if TYPE_CHECKING:
from ding.framework import OnlineRLContext, OfflineRLContext
def data_pusher(cfg: EasyDict, buffer_: Buffer, group_by_env: Optional[bool] = None):
"""
Overview:
Push episodes or trajectories into the buffer.
Arguments:
- cfg (:obj:`EasyDict`): Config.
- buffer (:obj:`Buffer`): Buffer to push the data in.
"""
if task.router.is_active and not task.has_role(task.role.LEARNER):
return task.void()
def _push(ctx: "OnlineRLContext"):
"""
Overview:
In ctx, either `ctx.trajectories` or `ctx.episodes` should not be None.
Input of ctx:
- trajectories (:obj:`List[Dict]`): Trajectories.
- episodes (:obj:`List[Dict]`): Episodes.
"""
if ctx.trajectories is not None: # each data in buffer is a transition
if group_by_env:
for i, t in enumerate(ctx.trajectories):
buffer_.push(t, {'env': t.env_data_id.item()})
else:
for t in ctx.trajectories:
buffer_.push(t)
ctx.trajectories = None
elif ctx.episodes is not None: # each data in buffer is a episode
for t in ctx.episodes:
buffer_.push(t)
ctx.episodes = None
else:
raise RuntimeError("Either ctx.trajectories or ctx.episodes should be not None.")
return _push
def buffer_saver(cfg: EasyDict, buffer_: Buffer, every_envstep: int = 1000, replace: bool = False):
"""
Overview:
Save current buffer data.
Arguments:
- cfg (:obj:`EasyDict`): Config.
- buffer (:obj:`Buffer`): Buffer to push the data in.
- every_envstep (:obj:`int`): save at every env step.
- replace (:obj:`bool`): Whether replace the last file.
"""
buffer_saver_env_counter = -every_envstep
def _save(ctx: "OnlineRLContext"):
"""
Overview:
In ctx, `ctx.env_step` should not be None.
Input of ctx:
- env_step (:obj:`int`): env step.
"""
nonlocal buffer_saver_env_counter
if ctx.env_step is not None:
if ctx.env_step >= every_envstep + buffer_saver_env_counter:
buffer_saver_env_counter = ctx.env_step
if replace:
buffer_.save_data(os.path.join(cfg.exp_name, "replaybuffer", "data_latest.hkl"))
else:
buffer_.save_data(
os.path.join(cfg.exp_name, "replaybuffer", "data_envstep_{}.hkl".format(ctx.env_step))
)
else:
raise RuntimeError("buffer_saver only supports collecting data by step rather than episode.")
return _save
def offpolicy_data_fetcher(
cfg: EasyDict,
buffer_: Union[Buffer, List[Tuple[Buffer, float]], Dict[str, Buffer]],
data_shortage_warning: bool = False,
) -> Callable:
"""
Overview:
The return function is a generator which meanly fetch a batch of data from a buffer, \
a list of buffers, or a dict of buffers.
Arguments:
- cfg (:obj:`EasyDict`): Config which should contain the following keys: `cfg.policy.learn.batch_size`.
- buffer (:obj:`Union[Buffer, List[Tuple[Buffer, float]], Dict[str, Buffer]]`): \
The buffer where the data is fetched from. \
``Buffer`` type means a buffer.\
``List[Tuple[Buffer, float]]`` type means a list of tuple. In each tuple there is a buffer and a float. \
The float defines, how many batch_size is the size of the data \
which is sampled from the corresponding buffer.\
``Dict[str, Buffer]`` type means a dict in which the value of each element is a buffer. \
For each key-value pair of dict, batch_size of data will be sampled from the corresponding buffer \
and assigned to the same key of `ctx.train_data`.
- data_shortage_warning (:obj:`bool`): Whether to output warning when data shortage occurs in fetching.
"""
def _fetch(ctx: "OnlineRLContext"):
"""
Input of ctx:
- train_output (:obj:`Union[Dict, Deque[Dict]]`): This attribute should exist \
if `buffer_` is of type Buffer and if `buffer_` use the middleware `PriorityExperienceReplay`. \
The meta data `priority` of the sampled data in the `buffer_` will be updated \
to the `priority` attribute of `ctx.train_output` if `ctx.train_output` is a dict, \
or the `priority` attribute of `ctx.train_output`'s popped element \
if `ctx.train_output` is a deque of dicts.
Output of ctx:
- train_data (:obj:`Union[List[Dict], Dict[str, List[Dict]]]`): The fetched data. \
``List[Dict]`` type means a list of data.
`train_data` is of this type if the type of `buffer_` is Buffer or List.
``Dict[str, List[Dict]]]`` type means a dict, in which the value of each key-value pair
is a list of data. `train_data` is of this type if the type of `buffer_` is Dict.
"""
try:
unroll_len = cfg.policy.collect.unroll_len
if isinstance(buffer_, Buffer):
if unroll_len > 1:
buffered_data = buffer_.sample(
cfg.policy.learn.batch_size, groupby="env", unroll_len=unroll_len, replace=True
)
ctx.train_data = [[t.data for t in d] for d in buffered_data] # B, unroll_len
else:
buffered_data = buffer_.sample(cfg.policy.learn.batch_size)
ctx.train_data = [d.data for d in buffered_data]
elif isinstance(buffer_, List): # like sqil, r2d3
assert unroll_len == 1, "not support"
buffered_data = []
for buffer_elem, p in buffer_:
data_elem = buffer_elem.sample(int(cfg.policy.learn.batch_size * p))
assert data_elem is not None
buffered_data.append(data_elem)
buffered_data = sum(buffered_data, [])
ctx.train_data = [d.data for d in buffered_data]
elif isinstance(buffer_, Dict): # like ppg_offpolicy
assert unroll_len == 1, "not support"
buffered_data = {k: v.sample(cfg.policy.learn.batch_size) for k, v in buffer_.items()}
ctx.train_data = {k: [d.data for d in v] for k, v in buffered_data.items()}
else:
raise TypeError("not support buffer argument type: {}".format(type(buffer_)))
assert buffered_data is not None
except (ValueError, AssertionError):
if data_shortage_warning:
# You can modify data collect config to avoid this warning, e.g. increasing n_sample, n_episode.
# Fetcher will skip this this attempt.
logging.warning(
"Replay buffer's data is not enough to support training, so skip this training to wait more data."
)
ctx.train_data = None
return
yield
if isinstance(buffer_, Buffer):
if any([isinstance(m, PriorityExperienceReplay) for m in buffer_._middleware]):
index = [d.index for d in buffered_data]
meta = [d.meta for d in buffered_data]
# such as priority
if isinstance(ctx.train_output, List):
priority = ctx.train_output.pop()['priority']
else:
priority = ctx.train_output['priority']
for idx, m, p in zip(index, meta, priority):
m['priority'] = p
buffer_.update(index=idx, data=None, meta=m)
return _fetch
def offline_data_fetcher_from_mem(cfg: EasyDict, dataset: Dataset) -> Callable:
from threading import Thread
from queue import Queue
import time
stream = torch.cuda.Stream()
def producer(queue, dataset, batch_size, device):
torch.set_num_threads(4)
nonlocal stream
idx_iter = iter(range(len(dataset)))
with torch.cuda.stream(stream):
while True:
if queue.full():
time.sleep(0.1)
else:
try:
start_idx = next(idx_iter)
except StopIteration:
del idx_iter
idx_iter = iter(range(len(dataset)))
start_idx = next(idx_iter)
data = [dataset.__getitem__(idx) for idx in range(start_idx, start_idx + batch_size)]
data = [[i[j] for i in data] for j in range(len(data[0]))]
data = [torch.stack(x).to(device) for x in data]
queue.put(data)
queue = Queue(maxsize=50)
device = 'cuda:{}'.format(get_rank() % torch.cuda.device_count()) if cfg.policy.cuda else 'cpu'
producer_thread = Thread(
target=producer, args=(queue, dataset, cfg.policy.batch_size, device), name='cuda_fetcher_producer'
)
def _fetch(ctx: "OfflineRLContext"):
nonlocal queue, producer_thread
if not producer_thread.is_alive():
time.sleep(5)
producer_thread.start()
while queue.empty():
time.sleep(0.001)
ctx.train_data = queue.get()
return _fetch
def offline_data_fetcher(cfg: EasyDict, dataset: Dataset) -> Callable:
"""
Overview:
The outer function transforms a Pytorch `Dataset` to `DataLoader`. \
The return function is a generator which each time fetches a batch of data from the previous `DataLoader`.\
Please refer to the link https://pytorch.org/tutorials/beginner/basics/data_tutorial.html \
and https://pytorch.org/docs/stable/data.html for more details.
Arguments:
- cfg (:obj:`EasyDict`): Config which should contain the following keys: `cfg.policy.learn.batch_size`.
- dataset (:obj:`Dataset`): The dataset of type `torch.utils.data.Dataset` which stores the data.
"""
# collate_fn is executed in policy now
dataloader = DataLoader(dataset, batch_size=cfg.policy.learn.batch_size, shuffle=True, collate_fn=lambda x: x)
dataloader = iter(dataloader)
def _fetch(ctx: "OfflineRLContext"):
"""
Overview:
Every time this generator is iterated, the fetched data will be assigned to ctx.train_data. \
After the dataloader is empty, the attribute `ctx.train_epoch` will be incremented by 1.
Input of ctx:
- train_epoch (:obj:`int`): Number of `train_epoch`.
Output of ctx:
- train_data (:obj:`List[Tensor]`): The fetched data batch.
"""
nonlocal dataloader
try:
ctx.train_data = next(dataloader) # noqa
except StopIteration:
ctx.train_epoch += 1
del dataloader
dataloader = DataLoader(
dataset, batch_size=cfg.policy.learn.batch_size, shuffle=True, collate_fn=lambda x: x
)
dataloader = iter(dataloader)
ctx.train_data = next(dataloader)
# TODO apply data update (e.g. priority) in offline setting when necessary
return _fetch
def offline_data_saver(data_path: str, data_type: str = 'hdf5') -> Callable:
"""
Overview:
Save the expert data of offline RL in a directory.
Arguments:
- data_path (:obj:`str`): File path where the expert data will be written into, which is usually ./expert.pkl'.
- data_type (:obj:`str`): Define the type of the saved data. \
The type of saved data is pkl if `data_type == 'naive'`. \
The type of saved data is hdf5 if `data_type == 'hdf5'`.
"""
def _save(ctx: "OnlineRLContext"):
"""
Input of ctx:
- trajectories (:obj:`List[Tensor]`): The expert data to be saved.
"""
data = ctx.trajectories
offline_data_save_type(data, data_path, data_type)
ctx.trajectories = None
return _save
def sqil_data_pusher(cfg: EasyDict, buffer_: Buffer, expert: bool) -> Callable:
"""
Overview:
Push trajectories into the buffer in sqil learning pipeline.
Arguments:
- cfg (:obj:`EasyDict`): Config.
- buffer (:obj:`Buffer`): Buffer to push the data in.
- expert (:obj:`bool`): Whether the pushed data is expert data or not. \
In each element of the pushed data, the reward will be set to 1 if this attribute is `True`, otherwise 0.
"""
def _pusher(ctx: "OnlineRLContext"):
"""
Input of ctx:
- trajectories (:obj:`List[Dict]`): The trajectories to be pushed.
"""
for t in ctx.trajectories:
if expert:
t.reward = torch.ones_like(t.reward)
else:
t.reward = torch.zeros_like(t.reward)
buffer_.push(t)
ctx.trajectories = None
return _pusher | PypiClean |
/BRAILS-3.0.1.tar.gz/BRAILS-3.0.1/brails/modules/FoundationClassifier/csail_segmentation_tool/csail_seg/utils.py | import sys
import os
import logging
import re
import functools
import fnmatch
import numpy as np
def setup_logger(distributed_rank=0, filename="log.txt"):
logger = logging.getLogger("Logger")
logger.setLevel(logging.DEBUG)
# don't log results for the non-master process
if distributed_rank > 0:
return logger
ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(logging.DEBUG)
fmt = "[%(asctime)s %(levelname)s %(filename)s line %(lineno)d %(process)d] %(message)s"
ch.setFormatter(logging.Formatter(fmt))
logger.addHandler(ch)
return logger
def find_recursive(root_dir, ext='.jpg'):
files = []
for root, dirnames, filenames in os.walk(root_dir):
for filename in fnmatch.filter(filenames, '*' + ext):
files.append(os.path.join(root, filename))
return files
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.initialized = False
self.val = None
self.avg = None
self.sum = None
self.count = None
def initialize(self, val, weight):
self.val = val
self.avg = val
self.sum = val * weight
self.count = weight
self.initialized = True
def update(self, val, weight=1):
if not self.initialized:
self.initialize(val, weight)
else:
self.add(val, weight)
def add(self, val, weight):
self.val = val
self.sum += val * weight
self.count += weight
self.avg = self.sum / self.count
def value(self):
return self.val
def average(self):
return self.avg
def unique(ar, return_index=False, return_inverse=False, return_counts=False):
ar = np.asanyarray(ar).flatten()
optional_indices = return_index or return_inverse
optional_returns = optional_indices or return_counts
if ar.size == 0:
if not optional_returns:
ret = ar
else:
ret = (ar,)
if return_index:
ret += (np.empty(0, np.bool),)
if return_inverse:
ret += (np.empty(0, np.bool),)
if return_counts:
ret += (np.empty(0, np.intp),)
return ret
if optional_indices:
perm = ar.argsort(kind='mergesort' if return_index else 'quicksort')
aux = ar[perm]
else:
ar.sort()
aux = ar
flag = np.concatenate(([True], aux[1:] != aux[:-1]))
if not optional_returns:
ret = aux[flag]
else:
ret = (aux[flag],)
if return_index:
ret += (perm[flag],)
if return_inverse:
iflag = np.cumsum(flag) - 1
inv_idx = np.empty(ar.shape, dtype=np.intp)
inv_idx[perm] = iflag
ret += (inv_idx,)
if return_counts:
idx = np.concatenate(np.nonzero(flag) + ([ar.size],))
ret += (np.diff(idx),)
return ret
def colorEncode(labelmap, colors, mode='RGB'):
labelmap = labelmap.astype('int')
labelmap_rgb = np.zeros((labelmap.shape[0], labelmap.shape[1], 3),
dtype=np.uint8)
for label in unique(labelmap):
if label < 0:
continue
labelmap_rgb += (labelmap == label)[:, :, np.newaxis] * \
np.tile(colors[label],
(labelmap.shape[0], labelmap.shape[1], 1))
if mode == 'BGR':
return labelmap_rgb[:, :, ::-1]
else:
return labelmap_rgb
def accuracy(preds, label):
valid = (label >= 0)
acc_sum = (valid * (preds == label)).sum()
valid_sum = valid.sum()
acc = float(acc_sum) / (valid_sum + 1e-10)
return acc, valid_sum
def intersectionAndUnion(imPred, imLab, numClass):
imPred = np.asarray(imPred).copy()
imLab = np.asarray(imLab).copy()
imPred += 1
imLab += 1
# Remove classes from unlabeled pixels in gt image.
# We should not penalize detections in unlabeled portions of the image.
imPred = imPred * (imLab > 0)
# Compute area intersection:
intersection = imPred * (imPred == imLab)
(area_intersection, _) = np.histogram(
intersection, bins=numClass, range=(1, numClass))
# Compute area union:
(area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass))
(area_lab, _) = np.histogram(imLab, bins=numClass, range=(1, numClass))
area_union = area_pred + area_lab - area_intersection
return (area_intersection, area_union)
class NotSupportedCliException(Exception):
pass
def process_range(xpu, inp):
start, end = map(int, inp)
if start > end:
end, start = start, end
return map(lambda x: '{}{}'.format(xpu, x), range(start, end+1))
REGEX = [
(re.compile(r'^gpu(\d+)$'), lambda x: ['gpu%s' % x[0]]),
(re.compile(r'^(\d+)$'), lambda x: ['gpu%s' % x[0]]),
(re.compile(r'^gpu(\d+)-(?:gpu)?(\d+)$'),
functools.partial(process_range, 'gpu')),
(re.compile(r'^(\d+)-(\d+)$'),
functools.partial(process_range, 'gpu')),
]
def parse_devices(input_devices):
"""Parse user's devices input str to standard format.
e.g. [gpu0, gpu1, ...]
"""
ret = []
for d in input_devices.split(','):
for regex, func in REGEX:
m = regex.match(d.lower().strip())
if m:
tmp = func(m.groups())
# prevent duplicate
for x in tmp:
if x not in ret:
ret.append(x)
break
else:
raise NotSupportedCliException(
'Can not recognize device: "{}"'.format(d))
return ret | PypiClean |
/2wf90-assignment-1.0.8.tar.gz/2wf90-assignment-1.0.8/src/2wf90_assignment/unused/addition_modulo.py | def add_function(x, y, b, m=None):
base = {'0':0,'1':1,'2':2,'3':3,
'4':4,'5':5,'6':6,'7':7,
'8':8,'9':9,'a':10,'b':11,
'c':12,'d':13,'e':14,'f':15}
X = str(x)
Y = str(y)
carry = 0
result = ''
if x == '0' and m is None:
return y
if y == '0' and m is None:
return x
if y == '0' and x == '0':
return '0'
if b == 10:
if m is None:
return str(int(x)+int(y))
else:
return (int(x) + int(y)) % int(m)
if X[0] == '-' and Y[0] == '-': #both inputs are minus, take -sign out and do normal addition
count = 1
X = X.replace('-','')
Y = Y.replace('-','')
return add_function(X,Y,b)
if X[0] == '-':
X = X.replace('-','')
return subtract_function(Y,X,b)
if Y[0] == '-':
Y = Y.replace('-','')
return subtract_function(X,Y,b)
if b >= 2 and b <= 16:
result = []
maxL = max(len(X), len(Y))
X = X.zfill(maxL)
Y = Y.zfill(maxL)
X2 = list(X)
Y2 = list(Y)
# maka sure the modulo has the same length as x and y
if m is not None:
m = m.zfill(maxL)
# convert abcdef into integers
for i in range(maxL):
X2[i] = base.get(X2[i])
Y2[i] = base.get(Y2[i])
# primary school method of addition
for i in range(1,maxL+1):
dig = X2[-i] + Y2[-i] + carry
if dig >= b:
carry = 1
dig %= b
else:
carry = 0
result.append(dig)
if carry == 1:
result.append(str(carry))
# remap the dictionary such that integers >= 10
# are converted to alphabet
for i in range(maxL):
invMap = {v: k for k, v in base.items()}
result[i] = invMap.get(result[i])
answer = ''.join(result[::-1])
# if m, divide by m and keep remainder as answer
if m is not None:
answer = simple_division(answer, m, b)[1]
if answer[0] is "0": answer = answer[1:]
return answer | PypiClean |
/DeFiLlama-1.1.0.tar.gz/DeFiLlama-1.1.0/defillama/defillama.py | import requests
# --------- Constants --------- #
BASE_URL = "https://api.llama.fi"
# --------- Constants --------- #
class DefiLlama:
"""
DeFi Llama class to act as DeFi Llama's API client.
All the requests can be made through this class.
"""
def __init__(self):
"""
Initialize the object
"""
self.session = requests.Session()
def _send_message(self, method, endpoint, params=None, data=None):
"""
Send API request.
:param method: HTTP method (get, post, delete, etc.)
:param endpoint: Endpoint (to be added to base URL)
:param params: HTTP request parameters
:param data: JSON-encoded string payload for POST
:return: dict/list: JSON response
"""
url = BASE_URL + endpoint
response = self.session.request(method, url, params=params,
data=data, timeout=30)
return response.json()
def _get(self, endpoint, params=None):
"""
Get API request
:param endpoint: Endpoint (to be added to base URL)
:param params: HTTP request parameters
:return:
"""
return self._send_message('GET', endpoint, params=params)
def get_all_protocols(self):
"""
Returns basic information on all listed protocols, their current
TVL and the changes to it in the last hour/day/week.
Endpoint: GET /protocols
:return: JSON response
"""
path = '/protocols'
return self._get(path)
def get_protocol(self, name):
"""
Returns historical data on the TVL of a protocol along with some basic data on it.
The fields `tokensInUsd` and `tokens` are only available for some protocols..
Endpoint: GET /protocol/{name}
:param: name : ID of the protocol to get (eg: uniswap, WBTC...).
This can be obtained from the /protocols endpoint
:return: JSON response
"""
path = f'/protocol/{name}'
return self._get(path)
def get_historical_tvl(self):
"""
Returns historical values of the total sum of TVLs from all listed protocols.
Endpoint: GET /charts
:return: JSON response
"""
path = '/charts'
return self._get(path)
def get_protocol_tvl(self, name):
"""
Returns historical values of the total sum of TVLs from given protocol.
Mainly meant to make life easier for users that import data to spreadsheets
Endpoint: GET /tvl/{name}
:param: name : ID of the protocol to get (eg: uniswap, WBTC...).
This can be obtained from the /protocols endpoint
:return: JSON response
"""
path = f'/tvl/{name}'
return self._get(path) | PypiClean |
/FreeClimb-4.5.0-py3-none-any.whl/freeclimb/model/incoming_number_result_all_of.py | import re # noqa: F401
import sys # noqa: F401
from freeclimb.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from freeclimb.exceptions import ApiAttributeError
def lazy_import():
from freeclimb.model.capabilities import Capabilities
globals()['Capabilities'] = Capabilities
class IncomingNumberResultAllOf(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'capabilities': (Capabilities,), # noqa: E501
'campaign_id': (str, none_type,), # noqa: E501
'phone_number_id': (str, none_type,), # noqa: E501
'account_id': (str, none_type,), # noqa: E501
'application_id': (str, none_type,), # noqa: E501
'phone_number': (str, none_type,), # noqa: E501
'alias': (str, none_type,), # noqa: E501
'region': (str, none_type,), # noqa: E501
'country': (str, none_type,), # noqa: E501
'voice_enabled': (bool, none_type,), # noqa: E501
'sms_enabled': (bool, none_type,), # noqa: E501
'offnet': (bool, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'capabilities': 'capabilities', # noqa: E501
'campaign_id': 'campaignId', # noqa: E501
'phone_number_id': 'phoneNumberId', # noqa: E501
'account_id': 'accountId', # noqa: E501
'application_id': 'applicationId', # noqa: E501
'phone_number': 'phoneNumber', # noqa: E501
'alias': 'alias', # noqa: E501
'region': 'region', # noqa: E501
'country': 'country', # noqa: E501
'voice_enabled': 'voiceEnabled', # noqa: E501
'sms_enabled': 'smsEnabled', # noqa: E501
'offnet': 'offnet', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""IncomingNumberResultAllOf - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
capabilities (Capabilities): [optional] # noqa: E501
campaign_id (str, none_type): The campaign ID generated by the campaign registry. [optional] # noqa: E501
phone_number_id (str, none_type): String that uniquely identifies this phone number resource.. [optional] # noqa: E501
account_id (str, none_type): ID of the account that owns this phone number.. [optional] # noqa: E501
application_id (str, none_type): ID of the Application that FreeClimb should contact if a Call or SMS arrives for this phone number or a Call from this number is placed. An incoming phone number is not useful until associated with an applicationId.. [optional] # noqa: E501
phone_number (str, none_type): Phone number in E.164 format.. [optional] # noqa: E501
alias (str, none_type): Description for this phone number. Typically the conventionally-formatted version of the phone number.. [optional] # noqa: E501
region (str, none_type): State or province of this phone number.. [optional] # noqa: E501
country (str, none_type): Country of this phone number.. [optional] # noqa: E501
voice_enabled (bool, none_type): Indicates whether the phone number can handle Calls. Typically set to true for all numbers.. [optional] # noqa: E501
sms_enabled (bool, none_type): Indication of whether the phone number can handle sending and receiving SMS messages. Typically set to true for all numbers.. [optional] # noqa: E501
offnet (bool, none_type): The offnet field is a boolean representing whether the number is offnet registered or not. This field will be rendered only for requests to the IncomingPhone number resource.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""IncomingNumberResultAllOf - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
capabilities (Capabilities): [optional] # noqa: E501
campaign_id (str, none_type): The campaign ID generated by the campaign registry. [optional] # noqa: E501
phone_number_id (str, none_type): String that uniquely identifies this phone number resource.. [optional] # noqa: E501
account_id (str, none_type): ID of the account that owns this phone number.. [optional] # noqa: E501
application_id (str, none_type): ID of the Application that FreeClimb should contact if a Call or SMS arrives for this phone number or a Call from this number is placed. An incoming phone number is not useful until associated with an applicationId.. [optional] # noqa: E501
phone_number (str, none_type): Phone number in E.164 format.. [optional] # noqa: E501
alias (str, none_type): Description for this phone number. Typically the conventionally-formatted version of the phone number.. [optional] # noqa: E501
region (str, none_type): State or province of this phone number.. [optional] # noqa: E501
country (str, none_type): Country of this phone number.. [optional] # noqa: E501
voice_enabled (bool, none_type): Indicates whether the phone number can handle Calls. Typically set to true for all numbers.. [optional] # noqa: E501
sms_enabled (bool, none_type): Indication of whether the phone number can handle sending and receiving SMS messages. Typically set to true for all numbers.. [optional] # noqa: E501
offnet (bool, none_type): The offnet field is a boolean representing whether the number is offnet registered or not. This field will be rendered only for requests to the IncomingPhone number resource.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/Client_API_VN-2.11.1.tar.gz/Client_API_VN-2.11.1/README.rst | =============
Client_API_VN
=============
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/psf/black
.. image:: https://img.shields.io/pypi/status/Client-API-VN
:alt: PyPI - Status
.. image:: https://img.shields.io/pypi/pyversions/Client-API-VN
:alt: PyPI - Python Version
.. image:: https://img.shields.io/pypi/l/Client-API-VN
:alt: PyPI - License
.. image:: https://codecov.io/gh/dthonon/Client_API_VN/branch/develop/graph/badge.svg
:target: https://codecov.io/gh/dthonon/Client_API_VN
Presentation
============
Python applications that use Biolovision/VisioNature (VN) API to:
- download data from VN sites and stores it to a Postgresql database.
- update sightings directly in VN site
Applications are available either as:
- Python modules from PyPI
- Docker images from Docker Hub
They are tested under Linux Ubuntu >20 or Debian 10. Other Linux
distributions could work. Windows is not tested at all and will
probably not work.
See `Documentation <https://client-api-vn1.readthedocs.io/en/stable/>`_
for more informations.
A thin Python layer on top of Biolovision API is provided, as described in
`API Manual <https://client-api-vn1.readthedocs.io/en/stable/api/modules.html>`_.
Installation - Python
---------------------
These instructions present the steps required to install the
Python applications.
Windows:
Install Python from Microsoft store
Add python script directory to Path, as described in
`How to add Python to Windows PATH <https://datatofish.com/add-python-to-windows-path/>`_.
Linux: add the following debian packages::
sudo apt -y install build-essential python3-dev python3-venv
Create a python virtual environment, activate it and update basic tools::
python3 -m venv env_VN
source env_VN/bin/activate
python -m pip install --upgrade pip
Install from PyPI::
pip install Client-API-VN
Installation - Docker
---------------------
These instructions present the steps required to install the
Docker applications::
docker pull dthonon/client-api-vn
docker run --name xfer_vn \
--mount source=xfer_vn,target=/root \
--workdir /root \
--tty --interactive \
dthonon/client-api-vn bash
The following steps are the common to both Python and Docker installation.
Getting Started - transfer_vn
-----------------------------
See `Documentation <https://client-api-vn1.readthedocs.io/en/latest/apps/transfer_vn.html>`__
for more informations.
Getting Started - update_vn
---------------------------
See `Documentation <https://client-api-vn1.readthedocs.io/en/latest/apps/update_vn.html>`__
for more informations.
Prerequisites
-------------
For Linux and Postgresql installation, refer to
`Server installation <https://client-api-vn1.readthedocs.io/en/latest/apps/server_install.html>`_.
Installation requires the following python module::
pip
All other python dependencies are managed by pip install.
| PypiClean |
/NeodroidVision-0.3.0-py36-none-any.whl/neodroidvision/segmentation/gmm/visualisation.py | import os
import numpy
from matplotlib import cm, patches, pyplot
__all__ = ["visualise_3d_gmm", "visualise_2D_gmm"]
def plot_sphere(
w=0, c=(0, 0, 0), r=(1, 1, 1), sub_divisions=10, ax=None, sigma_multiplier=3
):
"""
plot a sphere surface
Input:
c: 3 elements list, sphere center
r: 3 element list, sphere original scale in each axis ( allowing to draw elipsoids)
sub_divisions: scalar, number of subdivisions (subdivision^2 points sampled on the surface)
ax: optional pyplot axis object to plot the sphere in.
sigma_multiplier: sphere additional scale (choosing an std value when plotting gaussians)
Output:
ax: pyplot axis object
"""
if ax is None:
fig = pyplot.figure()
ax = fig.add_subplot(111, projection="3d")
pi = numpy.pi
cos = numpy.cos
sin = numpy.sin
phi, theta = numpy.mgrid[
0.0 : pi : complex(0, sub_divisions), 0.0 : 2.0 * pi : complex(0, sub_divisions)
]
x = sigma_multiplier * r[0] * sin(phi) * cos(theta) + c[0]
y = sigma_multiplier * r[1] * sin(phi) * sin(theta) + c[1]
z = sigma_multiplier * r[2] * cos(phi) + c[2]
cmap = cm.ScalarMappable()
cmap.set_cmap("jet")
c = cmap.to_rgba(w)
ax.plot_surface(x, y, z, color=c, alpha=0.2, linewidth=1)
return ax
def visualise_3d_gmm(points, w, mu, std_dev, export=False):
"""
plots points and their corresponding gmm model in 3D
Input:
points: N X 3, sampled points
w: n_components, gmm weights
mu: 3 X n_components, gmm means
std_dev: 3 X n_components, gmm standard deviation (assuming diagonal covariance matrix)
Output:
None
"""
n_components = mu.shape[1]
N = int(numpy.round(points.shape[0] / n_components))
# Visualize data
fig = pyplot.figure(figsize=(8, 8))
axes = fig.add_subplot(111, projection="3d")
axes.set_xlim([-1, 1])
axes.set_ylim([-1, 1])
axes.set_zlim([-1, 1])
pyplot.set_cmap("Set1")
colors = cm.Set1(numpy.linspace(0, 1, n_components))
for i in range(n_components):
idx = range(i * N, (i + 1) * N)
axes.scatter(
points[idx, 0], points[idx, 1], points[idx, 2], alpha=0.3, c=colors[i]
)
plot_sphere(w=w[i], c=mu[:, i], r=std_dev[:, i], ax=axes)
pyplot.title("3D GMM")
axes.set_xlabel("X")
axes.set_ylabel("Y")
axes.set_zlabel("Z")
axes.view_init(35.246, 45)
if export:
if not os.path.exists("images/"):
os.mkdir("images/")
pyplot.savefig("images/3D_GMM_demonstration.png", dpi=100, format="png")
pyplot.show()
def visualise_2D_gmm(points, w, mu, std_dev, export=False):
"""
plots points and their corresponding gmm model in 2D
Input:
points: N X 2, sampled points
w: n_components, gmm weights
mu: 2 X n_components, gmm means
std_dev: 2 X n_components, gmm standard deviation (assuming diagonal covariance matrix)
Output:
None
"""
n_components = mu.shape[1]
N = int(numpy.round(points.shape[0] / n_components))
# Visualize data
fig = pyplot.figure(figsize=(8, 8))
axes = pyplot.gca()
axes.set_xlim([-1, 1])
axes.set_ylim([-1, 1])
pyplot.set_cmap("Set1")
colors = cm.Set1(numpy.linspace(0, 1, n_components))
for i in range(n_components):
idx = range(i * N, (i + 1) * N)
pyplot.scatter(points[idx, 0], points[idx, 1], alpha=0.3, c=colors[i])
for j in range(8):
axes.add_patch(
patches.Ellipse(
mu[:, i],
width=(j + 1) * std_dev[0, i],
height=(j + 1) * std_dev[1, i],
fill=False,
color=[0.0, 0.0, 1.0, 1.0 / (0.5 * j + 1)],
)
)
pyplot.title("GMM")
pyplot.xlabel("X")
pyplot.ylabel("Y")
if export:
if not os.path.exists("images/"):
os.mkdir("images/")
pyplot.savefig("images/2D_GMM_demonstration.png", dpi=100, format="png")
pyplot.show() | PypiClean |
/OGN_Flogger-0.3.2a14.tar.gz/OGN_Flogger-0.3.2a14/src/flarm_db.py | import string
import requests
import sqlite3
import time
import flogger_settings
from flogger_OGN_db import ogndb
# import unicodedata
# def flarmdb (flarmnet, flogger_db, flarm_data):
def flarmdb (flarmnet, cursor, database, flarm_data, settings):
#
#-----------------------------------------------------------------
# Initialise flarm table in local db with Flarm ID to registration mappings.
# This can be from Flarmnet or OGN, use OGN by default
#-----------------------------------------------------------------
#
#
print "flarmdb use: ", settings.FLOGGER_OGN_DB_URL
if settings.FLOGGER_OGN_DB_URL <> "":
print "Use OGN database"
ogndb(settings.FLOGGER_OGN_DB_URL, cursor, database, flarm_data, settings)
return True
print "Use Flarmnet database"
dbflarm = database
try:
# flarmnet_db is at "http://www.flarmnet.org/files/data.fln"
flarmnet_db = flarmnet
r = requests.get(flarmnet_db)
except:
print "Failed to connect to flarmnet db, exit"
exit()
print "flarm_data is: ", flarm_data
data = r.content
# flm = open("flarm_data", "w")
# flm_txt = open("flarm_data_txt", "w")
flm = open(flarm_data, "w")
flm_ln = len(r.content) - 1
print "Flarm db length: ", flm_ln
try:
for i in range(0, flm_ln, 1):
c = "%c" % data[i]
flm.write(c)
except :
print "Error writing flarm_data"
exit()
flm.close()
# db = open("data.fln", 'r')
db = open(flarm_data, 'r')
# Read first line and convert to number
x = db.readline()
val = int(x, 16)
print "First line from FlarmNet data is : ", val
try:
print "flarm_db.py: Create flarm_db table"
cursor.execute('''CREATE TABLE IF NOT EXISTS
flarm_db(id INTEGER PRIMARY KEY, flarm_id TEXT, airport STRING, type TEXT, registration TEXT, radio TEXT)''')
print "flarm_db table created"
except Exception as e:
# Roll back any change if something goes wrong
print "Failed to create flarm_db"
dbflarm.rollback()
raise e
i = 1
line = ""
nos_lines = val
while True:
try:
line = db.readline()
line_lng = len(line)
# print "Line length is: ", line_lng
string = ""
# print "read: ", i, " returns: ", line
for j in range(0, 172, 2):
c = chr(int(line[j:j + 2], 16))
string = string + c
i = i + 1
# print "FlarmBD line: ", string
# v.decode("iso-8859-15").encode("utf-8")
ID = str(string[0:6]).decode("iso-8859-15").encode("utf-8")
# Airport = str(string[27:47]).decode("iso-8859-15").encode("utf-8", errors="replace")
try:
Airport = str(string[27:47]).decode("utf-8").encode("iso-8859-15")
# Airport = str(string[27:47]).decode("iso-8858-15").encode("iso-8859-15")
Airport = Airport.rstrip()
except:
print "Code error ", str(string[27:47]), " Row: ", i
Type = str(string[48:69]).decode("iso-8859-15").encode("utf-8")
Registration = str(string[69:75]).decode("iso-8859-15").encode("utf-8")
Radio = str(string[79:86]).decode("iso-8859-15").encode("utf-8")
# print "Line: ", i-1, " ID: ", ID, " Airport: ", Airport, " Type: ", Type, " Registration: ", Registration, " Radio: ", Radio
# row = "%s__%s__%s__%s__%s\n" % (ID, Airport, Type, Registration, Radio)
# flm_txt.write(row)
#
# Start Add aircraft type for tug logging
#
aircraft_type = 0
try:
aircraft_type_val = settings.FLOGGER_FLEET_LIST[Registration]
if aircraft_type_val >= 1 and aircraft_type_val < 100:
aircraft_type = 1
if aircraft_type_val >= 100 and aircraft_type_val < 200:
aircraft_type = 2
if aircraft_type_val >= 200 and aircraft_type_val < 300:
aircraft_type = 1
# print "Fleet list aircraft: ", Registration, " Type: ", str(aircraft_type)
except:
aircraft_type = 1 # Since it's not in the fleet list can't be a tug hence assume it's a glider
# print "Aircraft not in fleet list: ", Registration, " Type: ", str(aircraft_type)
aircraft_type = str(aircraft_type)
#
# End Add aircraft type for tug logging
#
# Note 'type': "F" is used as it means: Flarm device. Since this is taken from the Flarmnet db seems reasonable default
# since no value is supplied, to be compatible with OGN db
try:
cursor.execute('''INSERT INTO flarm_db(type, flarm_id, airport, aircraft_model, registration, aircraft_type)
VALUES(:type, :flarm_id, :airport, :aircraft_model, :registration, :aircraft_type)''',
{'type': "F", 'flarm_id': ID, 'airport': Airport, 'aircraft_model': Type, 'registration': Registration, 'aircraft_type': aircraft_type})
# try:
# cursor.execute('''INSERT INTO flarm_db(flarm_id, airport, type, registration, radio, aircraft_type)
# VALUES(:flarm_id, :airport, :type, :registration, :radio, :aircraft_type)''',
# {'flarm_id': ID, 'airport': Airport, 'type': Type, 'registration': Registration, 'radio': Radio, 'aircraft_type': aircraft_type})
# dbflarm.commit()
except :
print "Flarm_db insert failed ", Airport
dbflarm.commit()
return False
except:
print "Number of rows is: ", i - 1
# dbflarm.commit()
dbflarm.commit()
return True
return True
# dbflarm.commit()
# print "Start build Flarm DB: Test"
# t1 = time.time()
# flarmdb("http://www.flarmnet.org/files/data.fln", 'flogger.sql3', "flarm_data")
# t2 = time.time()
# print "End build Flarm DB in ", t2 - t1 , " seconds" | PypiClean |
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/overview.py |
from aqt.utils import openLink, shortcut, tooltip
from anki.utils import isMac
import aqt
from anki.sound import clearAudioQueue
class Overview(object):
"Deck overview."
def __init__(self, mw):
self.mw = mw
self.web = mw.web
self.bottom = aqt.toolbar.BottomBar(mw, mw.bottomWeb)
def show(self):
clearAudioQueue()
self.web.setLinkHandler(self._linkHandler)
self.web.setKeyHandler(None)
self.mw.keyHandler = self._keyHandler
self.mw.web.setFocus()
self.refresh()
def refresh(self):
self.mw.col.reset()
self._renderPage()
self._renderBottom()
# Handlers
############################################################
def _linkHandler(self, url):
if url == "study":
self.mw.col.startTimebox()
self.mw.moveToState("review")
if self.mw.state == "overview":
tooltip(_("No cards are due yet."))
elif url == "anki":
print "anki menu"
elif url == "opts":
self.mw.onDeckConf()
elif url == "cram":
deck = self.mw.col.decks.current()
self.mw.onCram("'deck:%s'" % deck['name'])
elif url == "refresh":
self.mw.col.sched.rebuildDyn()
self.mw.reset()
elif url == "empty":
self.mw.col.sched.emptyDyn(self.mw.col.decks.selected())
self.mw.reset()
elif url == "decks":
self.mw.moveToState("deckBrowser")
elif url == "review":
openLink(aqt.appShared+"info/%s?v=%s"%(self.sid, self.sidVer))
elif url == "studymore":
self.onStudyMore()
elif url == "unbury":
self.mw.col.sched.unburyCardsForDeck()
self.mw.reset()
elif url.lower().startswith("http"):
openLink(url)
def _keyHandler(self, evt):
cram = self.mw.col.decks.current()['dyn']
key = unicode(evt.text())
if key == "o":
self.mw.onDeckConf()
if key == "r" and cram:
self.mw.col.sched.rebuildDyn()
self.mw.reset()
if key == "e" and cram:
self.mw.col.sched.emptyDyn(self.mw.col.decks.selected())
self.mw.reset()
if key == "c" and not cram:
self.onStudyMore()
if key == "u":
self.mw.col.sched.unburyCardsForDeck()
self.mw.reset()
# HTML
############################################################
def _renderPage(self):
but = self.mw.button
deck = self.mw.col.decks.current()
self.sid = deck.get("sharedFrom")
if self.sid:
self.sidVer = deck.get("ver", None)
shareLink = '<a class=smallLink href="review">Reviews and Updates</a>'
else:
shareLink = ""
self.web.stdHtml(self._body % dict(
deck=deck['name'],
shareLink=shareLink,
desc=self._desc(deck),
table=self._table()
), self.mw.sharedCSS + self._css)
def _desc(self, deck):
if deck['dyn']:
desc = _("""\
This is a special deck for studying outside of the normal schedule.""")
desc += " " + _("""\
Cards will be automatically returned to their original decks after you review \
them.""")
desc += " " + _("""\
Deleting this deck from the deck list will return all remaining cards \
to their original deck.""")
else:
desc = deck.get("desc", "")
if not desc:
return "<p>"
if deck['dyn']:
dyn = "dyn"
else:
dyn = ""
return '<div class="descfont descmid description %s">%s</div>' % (
dyn, desc)
def _table(self):
counts = list(self.mw.col.sched.counts())
finished = not sum(counts)
for n in range(len(counts)):
if counts[n] >= 1000:
counts[n] = "1000+"
but = self.mw.button
if finished:
return '<div style="white-space: pre-wrap;">%s</div>' % (
self.mw.col.sched.finishedMsg())
else:
return '''
<table width=300 cellpadding=5>
<tr><td align=center valign=top>
<table cellspacing=5>
<tr><td>%s:</td><td><b><font color=#00a>%s</font></b></td></tr>
<tr><td>%s:</td><td><b><font color=#C35617>%s</font></b></td></tr>
<tr><td>%s:</td><td><b><font color=#0a0>%s</font></b></td></tr>
</table>
</td><td align=center>
%s</td></tr></table>''' % (
_("New"), counts[0],
_("Learning"), counts[1],
_("To Review"), counts[2],
but("study", _("Study Now"), id="study"))
_body = """
<center>
<h3>%(deck)s</h3>
%(shareLink)s
%(desc)s
%(table)s
</center>
<script>$(function () { $("#study").focus(); });</script>
"""
_css = """
.smallLink { font-size: 10px; }
h3 { margin-bottom: 0; }
.descfont {
padding: 1em; color: #333;
}
.description {
white-space: pre-wrap;
}
#fulldesc {
display:none;
}
.descmid {
width: 70%;
margin: 0 auto 0;
text-align: left;
}
.dyn {
text-align: center;
}
"""
# Bottom area
######################################################################
def _renderBottom(self):
links = [
["O", "opts", _("Options")],
]
if self.mw.col.decks.current()['dyn']:
links.append(["R", "refresh", _("Rebuild")])
links.append(["E", "empty", _("Empty")])
else:
links.append(["C", "studymore", _("Custom Study")])
#links.append(["F", "cram", _("Filter/Cram")])
if self.mw.col.sched.haveBuried():
links.append(["U", "unbury", _("Unbury")])
buf = ""
for b in links:
if b[0]:
b[0] = _("Shortcut key: %s") % shortcut(b[0])
buf += """
<button title="%s" onclick='py.link(\"%s\");'>%s</button>""" % tuple(b)
self.bottom.draw(buf)
if isMac:
size = 28
else:
size = 36 + self.mw.fontHeightDelta*3
self.bottom.web.setFixedHeight(size)
self.bottom.web.setLinkHandler(self._linkHandler)
# Studying more
######################################################################
def onStudyMore(self):
import aqt.customstudy
aqt.customstudy.CustomStudy(self.mw) | PypiClean |
/EthTx-0.3.22.tar.gz/EthTx-0.3.22/ethtx/providers/static/tracer.js | {
// callstack is the current recursive call stack of the EVM execution.
callstack: [{}],
// descended tracks whether we've just descended from an outer transaction into
// an inner call.
descended: false,
returnData: undefined,
// step is invoked for every opcode that the VM executes.
step: function(log, db) {
// Capture any errors immediately
var error = log.getError();
if (error !== undefined) {
this.fault(log, db);
return;
}
// We only care about system opcodes, faster if we pre-check once
var syscall = (log.op.toNumber() & 0xf0) == 0xf0;
if (syscall) {
var op = log.op.toString();
}
if (op === "RETURN") {
var outOff = log.stack.peek(0).valueOf();
var outEnd = outOff + log.stack.peek(1).valueOf();
this.returnData = toHex(log.memory.slice(outOff, outEnd));
}
// If a new contract is being created, add to the call stack
if (syscall && (op == 'CREATE' || op == "CREATE2")) {
var inOff = log.stack.peek(1).valueOf();
var inEnd = inOff + log.stack.peek(2).valueOf();
// Assemble the internal call report and store for completion
var call = {
type: op,
from: toHex(log.contract.getAddress()),
input: toHex(log.memory.slice(inOff, inEnd)),
gasIn: log.getGas(),
gasCost: log.getCost(),
value: '0x' + log.stack.peek(0).toString(16)
};
this.callstack.push(call);
this.descended = true
return;
}
// If a contract is being self destructed, gather that as a subcall too
if (syscall && op == 'SELFDESTRUCT') {
var left = this.callstack.length;
if (this.callstack[left-1].calls === undefined) {
this.callstack[left-1].calls = [];
}
this.callstack[left-1].calls.push({
type: op,
from: toHex(log.contract.getAddress()),
to: toHex(toAddress(log.stack.peek(0).toString(16))),
gasIn: log.getGas(),
gasCost: log.getCost(),
value: '0x' + db.getBalance(log.contract.getAddress()).toString(16)
});
return
}
// If a new method invocation is being done, add to the call stack
if (syscall && (op == 'CALL' || op == 'CALLCODE' || op == 'DELEGATECALL' || op == 'STATICCALL')) {
// Skip any pre-compile invocations, those are just fancy opcodes
var to = toAddress(log.stack.peek(1).toString(16));
if (isPrecompiled(to)) {
return
}
var off = (op == 'DELEGATECALL' || op == 'STATICCALL' ? 0 : 1);
var inOff = log.stack.peek(2 + off).valueOf();
var inEnd = inOff + log.stack.peek(3 + off).valueOf();
// Assemble the internal call report and store for completion
var call = {
type: op,
from: toHex(log.contract.getAddress()),
to: toHex(to),
input: toHex(log.memory.slice(inOff, inEnd)),
gasIn: log.getGas(),
gasCost: log.getCost(),
outOff: log.stack.peek(4 + off).valueOf(),
outLen: log.stack.peek(5 + off).valueOf(),
};
if (op != 'DELEGATECALL' && op != 'STATICCALL') {
call.value = '0x' + log.stack.peek(2).toString(16);
}
this.callstack.push(call);
this.descended = true
return;
}
// If we've just descended into an inner call, retrieve it's true allowance. We
// need to extract if from within the call as there may be funky gas dynamics
// with regard to requested and actually given gas (2300 stipend, 63/64 rule).
if (this.descended) {
if (log.getDepth() >= this.callstack.length) {
this.callstack[this.callstack.length - 1].gas = log.getGas();
} else {
// TODO(karalabe): The call was made to a plain account. We currently don't
// have access to the true gas amount inside the call and so any amount will
// mostly be wrong since it depends on a lot of input args. Skip gas for now.
}
this.descended = false;
}
// If an existing call is returning, pop off the call stack
if (syscall && op == 'REVERT') {
this.callstack[this.callstack.length - 1].error = "execution reverted";
return;
}
if (log.getDepth() == this.callstack.length - 1) {
// Pop off the last call and get the execution results
var call = this.callstack.pop();
if (call.type == 'CREATE' || call.type == "CREATE2") {
// If the call was a CREATE, retrieve the contract address and output code
call.gasUsed = '0x' + bigInt(call.gasIn - call.gasCost - log.getGas()).toString(16);
delete call.gasIn; delete call.gasCost;
var ret = log.stack.peek(0);
if (!ret.equals(0)) {
call.to = toHex(toAddress(ret.toString(16)));
call.output = toHex(db.getCode(toAddress(ret.toString(16))));
} else if (call.error === undefined) {
call.error = "internal failure"; // TODO(karalabe): surface these faults somehow
}
} else {
// If the call was a contract call, retrieve the gas usage and output
if (call.gas !== undefined) {
call.gasUsed = '0x' + bigInt(call.gasIn - call.gasCost + call.gas - log.getGas()).toString(16);
}
var ret = log.stack.peek(0);
if (!ret.equals(0)) {
call.output = this.returnData;
} else if (call.error === undefined) {
call.error = "internal failure"; // TODO(karalabe): surface these faults somehow
}
delete call.gasIn; delete call.gasCost;
delete call.outOff; delete call.outLen;
}
if (call.gas !== undefined) {
call.gas = '0x' + bigInt(call.gas).toString(16);
}
// Inject the call into the previous one
var left = this.callstack.length;
if (this.callstack[left-1].calls === undefined) {
this.callstack[left-1].calls = [];
}
this.callstack[left-1].calls.push(call);
this.returnData = undefined;
}
},
// fault is invoked when the actual execution of an opcode fails.
fault: function(log, db) {
// If the topmost call already reverted, don't handle the additional fault again
if (this.callstack[this.callstack.length - 1].error !== undefined) {
return;
}
// Pop off the just failed call
var call = this.callstack.pop();
call.error = log.getError();
// Consume all available gas and clean any leftovers
if (call.gas !== undefined) {
call.gas = '0x' + bigInt(call.gas).toString(16);
call.gasUsed = call.gas
}
delete call.gasIn; delete call.gasCost;
delete call.outOff; delete call.outLen;
// Flatten the failed call into its parent
var left = this.callstack.length;
if (left > 0) {
if (this.callstack[left-1].calls === undefined) {
this.callstack[left-1].calls = [];
}
this.callstack[left-1].calls.push(call);
return;
}
// Last call failed too, leave it in the stack
this.callstack.push(call);
},
// result is invoked when all the opcodes have been iterated over and returns
// the final result of the tracing.
result: function(ctx, db) {
var result = {
type: ctx.type,
from: toHex(ctx.from),
to: toHex(ctx.to),
value: '0x' + ctx.value.toString(16),
gas: '0x' + bigInt(ctx.gas).toString(16),
gasUsed: '0x' + bigInt(ctx.gasUsed).toString(16),
input: toHex(ctx.input),
output: toHex(ctx.output),
time: ctx.time,
};
if (this.callstack[0].calls !== undefined) {
result.calls = this.callstack[0].calls;
}
if (this.callstack[0].error !== undefined) {
result.error = this.callstack[0].error;
} else if (ctx.error !== undefined) {
result.error = ctx.error;
}
if (result.error !== undefined && (result.error !== "execution reverted" || result.output ==="0x")) {
delete result.output;
}
return this.finalize(result);
},
// finalize recreates a call object using the final desired field oder for json
// serialization. This is a nicety feature to pass meaningfully ordered results
// to users who don't interpret it, just display it.
finalize: function(call) {
var sorted = {
type: call.type,
from: call.from,
to: call.to,
value: call.value,
gas: call.gas,
gasUsed: call.gasUsed,
input: call.input,
output: call.output,
error: call.error,
time: call.time,
calls: call.calls,
}
for (var key in sorted) {
if (sorted[key] === undefined) {
delete sorted[key];
}
}
if (sorted.calls !== undefined) {
for (var i=0; i<sorted.calls.length; i++) {
sorted.calls[i] = this.finalize(sorted.calls[i]);
}
}
return sorted;
}
} | PypiClean |
/DFHypercode-0.0.1-py3-none-any.whl/Hypercode/reading/callable_decorators.py | import collections
import inspect
import typing
from ..classes import CallableBlock, Tag, JSONData, Arguments, Item
from ..enums import BlockType, CallableAction
from ..utils import remove_u200b_from_doc
from .reader import DFReader
class Function(CallableBlock):
"""Used to define a line of code that can be called with a Call Function block. Decorator. Example usage::
@Function(name="My Function") # 'hidden' is False by default
def my_function():
# code
@Function(name="Other Function", hidden=False)
def other_function():
# code
@Function # default name is the function's name as Capitalized Words.
def cool_function(): # in this case, the name will be 'Cool Function'
# code
Parameters
----------\u200b
name_or_func : Union[:class:`str`, Callable]
Can be either the proper Python function containing the code to be run when this Function/Process is called,
or the name of this Function/Process (its :attr:`data`).
hidden : :class:`bool`, optional
Whether or not this Function is hidden in the Call Function menu. Defaults to ``False``.
item_icon : Optional[:class:`~py2df.classes.mc_types.Item`], optional
An optional item that represents this Function in the Call Function menu.
Attributes
----------\u200b
block : :attr:`~py2df.enums.parameters.BlockType.FUNCTION`
The type of this codeblock (Function).
args : :attr:`~py2df.classes.collections.Arguments`
The arguments of the Function (containing the item icon, if specified, and the Hidden tag, i.e.,
whether or not the Function is hidden in the Call Function menu).
action : ``None``
('Function' codeblocks have no action.)
sub_action : ``None``
('Function' codeblocks have no sub-actions.)
function : Callable
The Python function containing this Function's code.
item_icon : Optional[:class:`~py2df.classes.mc_types.Item`]
An optional item that represents this Function in the Call Function menu.
length : :class:`int`
The length of a Function codeblock, in blocks. This is always 2.
name : :class:`str`
The name of this function.
data : :class:`str`
The name of this function (same as :attr:`Function.name`).
hidden : :class:`bool`
Whether or not this function is hidden in the Call Function menu. Defaults to ``False``.
target : ``None``
('Function' codeblocks have no targets.)
"""
__slots__ = ("data", "hidden", "item_icon", "function")
block: BlockType = BlockType.FUNCTION
args: Arguments
action: None = None
sub_action: None = None
function: typing.Callable
length: int = 2
data: str # Name
item_icon: typing.Optional[Item]
hidden: bool
target: None = None
@property
def args(self) -> Arguments:
"""The arguments of the Function (containing the item icon, if specified, and the Hidden tag, i.e., whether \
or not the Function is hidden in the Call Function menu)."""
return Arguments(
*([(self.item_icon.to_item(),)] if self.item_icon else []),
tags=[Tag("Is Hidden", option=self.hidden, action=CallableAction.DYNAMIC, block=BlockType.FUNCTION)]
)
def __call__(self, func: typing.Callable) -> "Function": # decorate
"""Decorator for storing this Function and its line of code.
Parameters
----------
func : Callable
The Python function containing the code that will be run when this function is invoked
by a Call Function block.
Returns
-------
:class:`Function`
self
Notes
-----
This appends the Function to the list of lines in the :class:`~py2df.reading.reader.DFReader` singleton.
"""
self.function = func
DFReader().append_function(self)
return self
class Process(CallableBlock):
"""Used to define a line of code that can be called with a Call Process block. Decorator. Example usage::
@Process(name="My Process") # 'hidden' is False by default
def my_process():
# code
@Process(name="Other Process", hidden=False)
def other_process():
# code
@Process # default name is the process name as Capitalized Words.
def cool_process(): # in this case, the name will be 'Cool Process'
# code
Parameters
----------\u200b
name_or_func : Union[:class:`str`, Callable]
Can be either the proper Python function containing the code to be run when this Process is started,
or the name of this Process (its :attr:`data`).
hidden : :class:`bool`, optional
Whether or not this Process is hidden in the Start Process menu. Defaults to ``False``.
item_icon : Optional[:class:`~py2df.classes.mc_types.Item`], optional
An optional item that represents this Process in the Start Process menu.
Attributes
----------\u200b
block : :attr:`~py2df.enums.parameters.BlockType.PROCESS`
The type of this codeblock (Process).
args : :attr:`~py2df.classes.collections.Arguments`
The arguments of the Process (containing the Hidden tag, i.e., whether or not the Process is hidden in
the Start Process menu).
action : ``None``
('Process' codeblocks have no action.)
sub_action : ``None``
('Process' codeblocks have no sub-actions.)
function : Callable
The Python function containing this Process's code.
item_icon : Optional[:class:`~py2df.classes.mc_types.Item`]
An optional item that represents this Process in the Start Process menu.
length : :class:`int`
The length of a Process codeblock, in blocks. This is always 2.
name : :class:`str`
The name of this function.
data : :class:`str`
The name of this function (same as :attr:`Process.name`).
hidden : :class:`bool`
Whether or not this function is hidden in the Call Process menu. Defaults to ``False``.
target : ``None``
('Process' codeblocks have no targets.)
"""
__slots__ = ("data", "hidden", "item_icon", "function")
block: BlockType = BlockType.PROCESS
args: Arguments
action: None = None
sub_action: None = None
function: typing.Callable
length: int = 2
data: str # Name
item_icon: typing.Optional[Item]
hidden: bool
target: None = None
@property
def args(self) -> Arguments:
"""The arguments of the Function (containing the item icon, if specified, and the Hidden tag, i.e., whether \
or not the Function is hidden in the Call Function menu)."""
return Arguments(
*([(self.item_icon.to_item(),)] if self.item_icon else []),
tags=[Tag("Is Hidden", option=self.hidden, action=CallableAction.DYNAMIC, block=BlockType.FUNCTION)]
)
def __call__(self, func: typing.Callable) -> "Process": # decorate
"""Decorator for storing this Process and its line of code.
Parameters
----------
func : Callable
The Python function containing the code that will be run when this Process is invoked
by a Start Process block.
Returns
-------
:class:`Process`
self
Notes
-----
This appends the Process to the list of lines in the :class:`~py2df.reading.reader.DFReader` singleton.
"""
self.function = func
DFReader().append_function(self)
return self
remove_u200b_from_doc(Function, Process) | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/xmpp/bosh.js | if(!dojo._hasResource["dojox.xmpp.bosh"]){
dojo._hasResource["dojox.xmpp.bosh"]=true;
dojo.provide("dojox.xmpp.bosh");
dojo.require("dojo.io.script");
dojo.require("dojo.io.iframe");
dojo.require("dojox.xml.parser");
dojox.xmpp.bosh={transportIframes:[],initialize:function(_1){
this.transportIframes=[];
var _2=dojox._scopeName+".xmpp.bosh";
var c=dojo.connect(dojo.getObject(_2),"_iframeOnload",this,function(_3){
if(_3==0){
_1.load();
dojo.disconnect(c);
}
});
for(var i=0;i<_1.iframes;i++){
var _4="xmpp-transport-"+i;
var _5=dojo.byId("xmpp-transport-"+i);
if(_5){
if(window[_4]){
window[_4]=null;
}
if(window.frames[_4]){
window.frames[_4]=null;
}
dojo.destroy(_5);
}
_5=dojo.io.iframe.create("xmpp-transport-"+i,_2+"._iframeOnload("+i+");");
this.transportIframes.push(_5);
}
},_iframeOnload:function(_6){
var _7=dojo.io.iframe.doc(dojo.byId("xmpp-transport-"+_6));
_7.write("<script>var isLoaded=true; var rid=0; var transmiting=false; function _BOSH_(msg) { transmiting=false; parent.dojox.xmpp.bosh.handle(msg, rid); } </script>");
},findOpenIframe:function(){
for(var i=0;i<this.transportIframes.length;i++){
var _8=this.transportIframes[i];
var _9=_8.contentWindow;
if(_9.isLoaded&&!_9.transmiting){
return _8;
}
}
return false;
},handle:function(_a,_b){
var _c=this["rid"+_b];
var _d=dojox.xml.parser.parse(_a,"text/xml");
if(_d){
_c.ioArgs.xmppMessage=_d;
}else{
_c.errback(new Error("Recieved bad document from server: "+_a));
}
},get:function(_e){
var _f=this.findOpenIframe();
var _10=dojo.io.iframe.doc(_f);
_e.frameDoc=_10;
var dfd=this._makeScriptDeferred(_e);
var _11=dfd.ioArgs;
_f.contentWindow.rid=_11.rid;
_f.contentWindow.transmiting=true;
dojo._ioAddQueryToUrl(_11);
dojo._ioNotifyStart(dfd);
dojo.io.script.attach(_11.id,_11.url,_10);
dojo._ioWatch(dfd,this._validCheck,this._ioCheck,this._resHandle);
return dfd;
},remove:function(id,_12){
dojo.destroy(dojo.byId(id,_12));
if(this[id]){
delete this[id];
}
},_makeScriptDeferred:function(_13){
var dfd=dojo._ioSetArgs(_13,this._deferredCancel,this._deferredOk,this._deferredError);
var _14=dfd.ioArgs;
_14.id="rid"+_13.rid;
_14.rid=_13.rid;
_14.canDelete=true;
_14.frameDoc=_13.frameDoc;
this[_14.id]=dfd;
return dfd;
},_deferredCancel:function(dfd){
dfd.canceled=true;
if(dfd.ioArgs.canDelete){
dojox.xmpp.bosh._addDeadScript(dfd.ioArgs);
}
},_deferredOk:function(dfd){
var _15=dfd.ioArgs;
if(_15.canDelete){
dojox.xmpp.bosh._addDeadScript(_15);
}
return _15.xmppMessage||_15;
},_deferredError:function(_16,dfd){
if(dfd.ioArgs.canDelete){
if(_16.dojoType=="timeout"){
dojox.xmpp.bosh.remove(dfd.ioArgs.id,dfd.ioArgs.frameDoc);
}else{
dojox.xmpp.bosh._addDeadScript(dfd.ioArgs);
}
}
return _16;
},_deadScripts:[],_addDeadScript:function(_17){
dojox.xmpp.bosh._deadScripts.push({id:_17.id,frameDoc:_17.frameDoc});
_17.frameDoc=null;
},_validCheck:function(dfd){
var _18=dojox.xmpp.bosh;
var _19=_18._deadScripts;
if(_19&&_19.length>0){
for(var i=0;i<_19.length;i++){
_18.remove(_19[i].id,_19[i].frameDoc);
_19[i].frameDoc=null;
}
dojox.xmpp.bosh._deadScripts=[];
}
return true;
},_ioCheck:function(dfd){
var _1a=dfd.ioArgs;
if(_1a.xmppMessage){
return true;
}
return false;
},_resHandle:function(dfd){
if(dojox.xmpp.bosh._ioCheck(dfd)){
dfd.callback(dfd);
}else{
dfd.errback(new Error("inconceivable dojox.xmpp.bosh._resHandle error"));
}
}};
} | PypiClean |
/KalturaApiClient-19.3.0.tar.gz/KalturaApiClient-19.3.0/KalturaClient/Plugins/ElasticSearch.py | from __future__ import absolute_import
from .Core import *
from ..Base import (
getXmlNodeBool,
getXmlNodeFloat,
getXmlNodeInt,
getXmlNodeText,
KalturaClientPlugin,
KalturaEnumsFactory,
KalturaObjectBase,
KalturaObjectFactory,
KalturaParams,
KalturaServiceBase,
)
########## enums ##########
# @package Kaltura
# @subpackage Client
class KalturaESearchItemType(object):
EXACT_MATCH = 1
PARTIAL = 2
STARTS_WITH = 3
EXISTS = 4
RANGE = 5
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchOperatorType(object):
AND_OP = 1
OR_OP = 2
NOT_OP = 3
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCaptionFieldName(object):
CAPTION_ASSET_ID = "caption_asset_id"
CONTENT = "content"
END_TIME = "end_time"
LABEL = "label"
LANGUAGE = "language"
START_TIME = "start_time"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryAggregateByFieldName(object):
CATEGORY_NAME = "category_name"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryEntryFieldName(object):
ANCESTOR_ID = "ancestor_id"
ANCESTOR_NAME = "ancestor_name"
FULL_IDS = "full_ids"
ID = "id"
NAME = "name"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryFieldName(object):
CONTRIBUTION_POLICY = "contribution_policy"
CREATED_AT = "created_at"
DEPTH = "depth"
DESCRIPTION = "description"
DIRECT_ENTRIES_COUNT = "direct_entries_count"
DIRECT_SUB_CATEGORIES_COUNT = "direct_sub_categories_count"
DISPLAY_IN_SEARCH = "display_in_search"
ENTRIES_COUNT = "entries_count"
FULL_IDS = "full_ids"
FULL_NAME = "full_name"
ID = "id"
INHERITANCE_TYPE = "inheritance_type"
INHERITED_PARENT_ID = "inherited_parent_id"
MEMBERS_COUNT = "members_count"
MODERATION = "moderation"
NAME = "name"
PARENT_ID = "parent_id"
PENDING_ENTRIES_COUNT = "pending_entries_count"
PENDING_MEMBERS_COUNT = "pending_members_count"
PRIVACY = "privacy"
PRIVACY_CONTEXT = "privacy_context"
PRIVACY_CONTEXTS = "privacy_contexts"
REFERENCE_ID = "reference_id"
TAGS = "tags"
UPDATED_AT = "updated_at"
USER_ID = "user_id"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryOrderByFieldName(object):
CREATED_AT = "created_at"
ENTRIES_COUNT = "entries_count"
MEMBERS_COUNT = "members_count"
NAME = "name"
UPDATED_AT = "updated_at"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryUserFieldName(object):
USER_ID = "user_id"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCuePointAggregateByFieldName(object):
TAGS = "tags"
TYPE = "type"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchCuePointFieldName(object):
ANSWERS = "answers"
END_TIME = "end_time"
EXPLANATION = "explanation"
HINT = "hint"
ID = "id"
NAME = "name"
QUESTION = "question"
START_TIME = "start_time"
SUB_TYPE = "sub_type"
TAGS = "tags"
TEXT = "text"
TYPE = "type"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryAggregateByFieldName(object):
ACCESS_CONTROL_PROFILE = "access_control_profile_id"
ENTRY_TYPE = "entry_type"
MEDIA_TYPE = "media_type"
TAGS = "tags"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryFieldName(object):
ACCESS_CONTROL_ID = "access_control_id"
ADMIN_TAGS = "admin_tags"
CAPTIONS_CONTENT = "captions_content"
CONVERSION_PROFILE_ID = "conversion_profile_id"
CREATED_AT = "created_at"
CREATOR_ID = "creator_kuser_id"
CREDIT = "credit"
DESCRIPTION = "description"
END_DATE = "end_date"
ENTITLED_USER_EDIT = "entitled_kusers_edit"
ENTITLED_USER_PUBLISH = "entitled_kusers_publish"
ENTITLED_USER_VIEW = "entitled_kusers_view"
ENTRY_TYPE = "entry_type"
EXTERNAL_SOURCE_TYPE = "external_source_type"
ID = "id"
IS_LIVE = "is_live"
IS_QUIZ = "is_quiz"
USER_ID = "kuser_id"
LAST_PLAYED_AT = "last_played_at"
LENGTH_IN_MSECS = "length_in_msecs"
MEDIA_TYPE = "media_type"
MODERATION_STATUS = "moderation_status"
NAME = "name"
PARENT_ENTRY_ID = "parent_id"
PARTNER_SORT_VALUE = "partner_sort_value"
PLAYS = "plays"
PUSH_PUBLISH = "push_publish"
RANK = "rank"
RECORDED_ENTRY_ID = "recorded_entry_id"
REDIRECT_ENTRY_ID = "redirect_entry_id"
REFERENCE_ID = "reference_id"
ROOT_ID = "root_id"
SITE_URL = "site_url"
SOURCE_TYPE = "source_type"
START_DATE = "start_date"
TAGS = "tags"
TEMPLATE_ENTRY_ID = "template_entry_id"
UPDATED_AT = "updated_at"
USER_NAMES = "user_names"
VOTES = "votes"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryOrderByFieldName(object):
CREATED_AT = "created_at"
END_DATE = "end_date"
LAST_PLAYED_AT = "last_played_at"
NAME = "name"
PLAYS = "plays"
PLAYS_LAST_1_DAY = "plays_last_1_day"
PLAYS_LAST_30_DAYS = "plays_last_30_days"
PLAYS_LAST_7_DAYS = "plays_last_7_days"
RANK = "rank"
START_DATE = "start_date"
UPDATED_AT = "updated_at"
VIEWS = "views"
VIEWS_LAST_1_DAY = "views_last_1_day"
VIEWS_LAST_30_DAYS = "views_last_30_days"
VIEWS_LAST_7_DAYS = "views_last_7_days"
VOTES = "votes"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchMetadataAggregateByFieldName(object):
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchSortOrder(object):
ORDER_BY_ASC = "asc"
ORDER_BY_DESC = "desc"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchUserFieldName(object):
COMPANY = "company"
COUNTRY = "country"
CREATED_AT = "created_at"
EMAIL = "email"
EXTERNAL_ID = "external_id"
FIRST_NAME = "first_name"
GROUP_IDS = "group_ids"
IS_ADMIN = "is_admin"
IS_HASHED = "is_hashed"
LAST_NAME = "last_name"
LOGIN_ENABLED = "login_enabled"
PERMISSION_NAMES = "permission_names"
ROLE_IDS = "role_ids"
SCREEN_NAME = "screen_name"
TAGS = "tags"
TITLE = "title"
UPDATED_AT = "updated_at"
USER_ID = "user_id"
TYPE = "user_type"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaESearchUserOrderByFieldName(object):
CREATED_AT = "created_at"
USER_ID = "puser_id"
SCREEN_NAME = "screen_name"
UPDATED_AT = "updated_at"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
# @package Kaltura
# @subpackage Client
class KalturaEsearchGroupUserFieldName(object):
GROUP_IDS = "group_ids"
def __init__(self, value):
self.value = value
def getValue(self):
return self.value
########## classes ##########
# @package Kaltura
# @subpackage Client
class KalturaESearchBaseItem(KalturaObjectBase):
def __init__(self):
KalturaObjectBase.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchBaseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchBaseItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaBeaconScheduledResourceBaseItem(KalturaESearchBaseItem):
def __init__(self):
KalturaESearchBaseItem.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaBeaconScheduledResourceBaseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchBaseItem.toParams(self)
kparams.put("objectType", "KalturaBeaconScheduledResourceBaseItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchOrderByItem(KalturaObjectBase):
def __init__(self,
sortOrder=NotImplemented):
KalturaObjectBase.__init__(self)
# @var KalturaESearchSortOrder
self.sortOrder = sortOrder
PROPERTY_LOADERS = {
'sortOrder': (KalturaEnumsFactory.createString, "KalturaESearchSortOrder"),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchOrderByItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchOrderByItem")
kparams.addStringEnumIfDefined("sortOrder", self.sortOrder)
return kparams
def getSortOrder(self):
return self.sortOrder
def setSortOrder(self, newSortOrder):
self.sortOrder = newSortOrder
# @package Kaltura
# @subpackage Client
class KalturaESearchAggregationItem(KalturaObjectBase):
def __init__(self,
size=NotImplemented):
KalturaObjectBase.__init__(self)
# @var int
self.size = size
PROPERTY_LOADERS = {
'size': getXmlNodeInt,
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAggregationItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchAggregationItem")
kparams.addIntIfDefined("size", self.size)
return kparams
def getSize(self):
return self.size
def setSize(self, newSize):
self.size = newSize
# @package Kaltura
# @subpackage Client
class KalturaESearchAggregation(KalturaObjectBase):
def __init__(self,
aggregations=NotImplemented):
KalturaObjectBase.__init__(self)
# @var array of KalturaESearchAggregationItem
self.aggregations = aggregations
PROPERTY_LOADERS = {
'aggregations': (KalturaObjectFactory.createArray, 'KalturaESearchAggregationItem'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAggregation.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchAggregation")
kparams.addArrayIfDefined("aggregations", self.aggregations)
return kparams
def getAggregations(self):
return self.aggregations
def setAggregations(self, newAggregations):
self.aggregations = newAggregations
# @package Kaltura
# @subpackage Client
class KalturaESearchAggregationBucket(KalturaObjectBase):
def __init__(self,
value=NotImplemented,
count=NotImplemented):
KalturaObjectBase.__init__(self)
# @var string
self.value = value
# @var int
self.count = count
PROPERTY_LOADERS = {
'value': getXmlNodeText,
'count': getXmlNodeInt,
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAggregationBucket.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchAggregationBucket")
kparams.addStringIfDefined("value", self.value)
kparams.addIntIfDefined("count", self.count)
return kparams
def getValue(self):
return self.value
def setValue(self, newValue):
self.value = newValue
def getCount(self):
return self.count
def setCount(self, newCount):
self.count = newCount
# @package Kaltura
# @subpackage Client
class KalturaESearchAggregationResponseItem(KalturaObjectBase):
def __init__(self,
name=NotImplemented,
fieldName=NotImplemented,
buckets=NotImplemented):
KalturaObjectBase.__init__(self)
# @var string
self.name = name
# @var string
self.fieldName = fieldName
# @var array of KalturaESearchAggregationBucket
self.buckets = buckets
PROPERTY_LOADERS = {
'name': getXmlNodeText,
'fieldName': getXmlNodeText,
'buckets': (KalturaObjectFactory.createArray, 'KalturaESearchAggregationBucket'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAggregationResponseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchAggregationResponseItem")
kparams.addStringIfDefined("name", self.name)
kparams.addStringIfDefined("fieldName", self.fieldName)
kparams.addArrayIfDefined("buckets", self.buckets)
return kparams
def getName(self):
return self.name
def setName(self, newName):
self.name = newName
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
def getBuckets(self):
return self.buckets
def setBuckets(self, newBuckets):
self.buckets = newBuckets
# @package Kaltura
# @subpackage Client
class KalturaESearchBaseFilter(KalturaObjectBase):
def __init__(self):
KalturaObjectBase.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchBaseFilter.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchBaseFilter")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryBaseItem(KalturaESearchBaseItem):
def __init__(self):
KalturaESearchBaseItem.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryBaseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryBaseItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchHighlight(KalturaObjectBase):
def __init__(self,
fieldName=NotImplemented,
hits=NotImplemented):
KalturaObjectBase.__init__(self)
# @var string
self.fieldName = fieldName
# @var array of KalturaString
self.hits = hits
PROPERTY_LOADERS = {
'fieldName': getXmlNodeText,
'hits': (KalturaObjectFactory.createArray, 'KalturaString'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchHighlight.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchHighlight")
kparams.addStringIfDefined("fieldName", self.fieldName)
kparams.addArrayIfDefined("hits", self.hits)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
def getHits(self):
return self.hits
def setHits(self, newHits):
self.hits = newHits
# @package Kaltura
# @subpackage Client
class KalturaESearchItemData(KalturaObjectBase):
def __init__(self,
highlight=NotImplemented):
KalturaObjectBase.__init__(self)
# @var array of KalturaESearchHighlight
self.highlight = highlight
PROPERTY_LOADERS = {
'highlight': (KalturaObjectFactory.createArray, 'KalturaESearchHighlight'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchItemData.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchItemData")
kparams.addArrayIfDefined("highlight", self.highlight)
return kparams
def getHighlight(self):
return self.highlight
def setHighlight(self, newHighlight):
self.highlight = newHighlight
# @package Kaltura
# @subpackage Client
class KalturaESearchItemDataResult(KalturaObjectBase):
def __init__(self,
totalCount=NotImplemented,
items=NotImplemented,
itemsType=NotImplemented):
KalturaObjectBase.__init__(self)
# @var int
self.totalCount = totalCount
# @var array of KalturaESearchItemData
self.items = items
# @var string
self.itemsType = itemsType
PROPERTY_LOADERS = {
'totalCount': getXmlNodeInt,
'items': (KalturaObjectFactory.createArray, 'KalturaESearchItemData'),
'itemsType': getXmlNodeText,
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchItemDataResult.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchItemDataResult")
kparams.addIntIfDefined("totalCount", self.totalCount)
kparams.addArrayIfDefined("items", self.items)
kparams.addStringIfDefined("itemsType", self.itemsType)
return kparams
def getTotalCount(self):
return self.totalCount
def setTotalCount(self, newTotalCount):
self.totalCount = newTotalCount
def getItems(self):
return self.items
def setItems(self, newItems):
self.items = newItems
def getItemsType(self):
return self.itemsType
def setItemsType(self, newItemsType):
self.itemsType = newItemsType
# @package Kaltura
# @subpackage Client
class KalturaESearchResult(KalturaObjectBase):
def __init__(self,
highlight=NotImplemented,
itemsData=NotImplemented):
KalturaObjectBase.__init__(self)
# @var array of KalturaESearchHighlight
self.highlight = highlight
# @var array of KalturaESearchItemDataResult
self.itemsData = itemsData
PROPERTY_LOADERS = {
'highlight': (KalturaObjectFactory.createArray, 'KalturaESearchHighlight'),
'itemsData': (KalturaObjectFactory.createArray, 'KalturaESearchItemDataResult'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchResult.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchResult")
kparams.addArrayIfDefined("highlight", self.highlight)
kparams.addArrayIfDefined("itemsData", self.itemsData)
return kparams
def getHighlight(self):
return self.highlight
def setHighlight(self, newHighlight):
self.highlight = newHighlight
def getItemsData(self):
return self.itemsData
def setItemsData(self, newItemsData):
self.itemsData = newItemsData
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryResult(KalturaESearchResult):
def __init__(self,
highlight=NotImplemented,
itemsData=NotImplemented,
object=NotImplemented):
KalturaESearchResult.__init__(self,
highlight,
itemsData)
# @var KalturaCategory
self.object = object
PROPERTY_LOADERS = {
'object': (KalturaObjectFactory.create, 'KalturaCategory'),
}
def fromXml(self, node):
KalturaESearchResult.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryResult.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResult.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryResult")
kparams.addObjectIfDefined("object", self.object)
return kparams
def getObject(self):
return self.object
def setObject(self, newObject):
self.object = newObject
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryBaseItem(KalturaESearchBaseItem):
def __init__(self):
KalturaESearchBaseItem.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryBaseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryBaseItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryBaseNestedObject(KalturaESearchEntryBaseItem):
def __init__(self):
KalturaESearchEntryBaseItem.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchEntryBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryBaseNestedObject.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryBaseNestedObject")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryNestedBaseItem(KalturaESearchEntryBaseNestedObject):
def __init__(self):
KalturaESearchEntryBaseNestedObject.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchEntryBaseNestedObject.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryNestedBaseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryBaseNestedObject.toParams(self)
kparams.put("objectType", "KalturaESearchEntryNestedBaseItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryResult(KalturaESearchResult):
def __init__(self,
highlight=NotImplemented,
itemsData=NotImplemented,
object=NotImplemented):
KalturaESearchResult.__init__(self,
highlight,
itemsData)
# @var KalturaBaseEntry
self.object = object
PROPERTY_LOADERS = {
'object': (KalturaObjectFactory.create, 'KalturaBaseEntry'),
}
def fromXml(self, node):
KalturaESearchResult.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryResult.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResult.toParams(self)
kparams.put("objectType", "KalturaESearchEntryResult")
kparams.addObjectIfDefined("object", self.object)
return kparams
def getObject(self):
return self.object
def setObject(self, newObject):
self.object = newObject
# @package Kaltura
# @subpackage Client
class KalturaESearchGroupResult(KalturaESearchResult):
def __init__(self,
highlight=NotImplemented,
itemsData=NotImplemented,
object=NotImplemented):
KalturaESearchResult.__init__(self,
highlight,
itemsData)
# @var KalturaGroup
self.object = object
PROPERTY_LOADERS = {
'object': (KalturaObjectFactory.create, 'KalturaGroup'),
}
def fromXml(self, node):
KalturaESearchResult.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchGroupResult.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResult.toParams(self)
kparams.put("objectType", "KalturaESearchGroupResult")
kparams.addObjectIfDefined("object", self.object)
return kparams
def getObject(self):
return self.object
def setObject(self, newObject):
self.object = newObject
# @package Kaltura
# @subpackage Client
class KalturaESearchOrderBy(KalturaObjectBase):
def __init__(self,
orderItems=NotImplemented):
KalturaObjectBase.__init__(self)
# @var array of KalturaESearchOrderByItem
self.orderItems = orderItems
PROPERTY_LOADERS = {
'orderItems': (KalturaObjectFactory.createArray, 'KalturaESearchOrderByItem'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchOrderBy.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchOrderBy")
kparams.addArrayIfDefined("orderItems", self.orderItems)
return kparams
def getOrderItems(self):
return self.orderItems
def setOrderItems(self, newOrderItems):
self.orderItems = newOrderItems
# @package Kaltura
# @subpackage Client
class KalturaESearchParams(KalturaObjectBase):
def __init__(self,
objectStatuses=NotImplemented,
objectId=NotImplemented,
orderBy=NotImplemented):
KalturaObjectBase.__init__(self)
# @var string
self.objectStatuses = objectStatuses
# @var string
self.objectId = objectId
# @var KalturaESearchOrderBy
self.orderBy = orderBy
PROPERTY_LOADERS = {
'objectStatuses': getXmlNodeText,
'objectId': getXmlNodeText,
'orderBy': (KalturaObjectFactory.create, 'KalturaESearchOrderBy'),
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchParams.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchParams")
kparams.addStringIfDefined("objectStatuses", self.objectStatuses)
kparams.addStringIfDefined("objectId", self.objectId)
kparams.addObjectIfDefined("orderBy", self.orderBy)
return kparams
def getObjectStatuses(self):
return self.objectStatuses
def setObjectStatuses(self, newObjectStatuses):
self.objectStatuses = newObjectStatuses
def getObjectId(self):
return self.objectId
def setObjectId(self, newObjectId):
self.objectId = newObjectId
def getOrderBy(self):
return self.orderBy
def setOrderBy(self, newOrderBy):
self.orderBy = newOrderBy
# @package Kaltura
# @subpackage Client
class KalturaESearchRange(KalturaObjectBase):
def __init__(self,
greaterThanOrEqual=NotImplemented,
lessThanOrEqual=NotImplemented,
greaterThan=NotImplemented,
lessThan=NotImplemented):
KalturaObjectBase.__init__(self)
# @var int
self.greaterThanOrEqual = greaterThanOrEqual
# @var int
self.lessThanOrEqual = lessThanOrEqual
# @var int
self.greaterThan = greaterThan
# @var int
self.lessThan = lessThan
PROPERTY_LOADERS = {
'greaterThanOrEqual': getXmlNodeInt,
'lessThanOrEqual': getXmlNodeInt,
'greaterThan': getXmlNodeInt,
'lessThan': getXmlNodeInt,
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchRange.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchRange")
kparams.addIntIfDefined("greaterThanOrEqual", self.greaterThanOrEqual)
kparams.addIntIfDefined("lessThanOrEqual", self.lessThanOrEqual)
kparams.addIntIfDefined("greaterThan", self.greaterThan)
kparams.addIntIfDefined("lessThan", self.lessThan)
return kparams
def getGreaterThanOrEqual(self):
return self.greaterThanOrEqual
def setGreaterThanOrEqual(self, newGreaterThanOrEqual):
self.greaterThanOrEqual = newGreaterThanOrEqual
def getLessThanOrEqual(self):
return self.lessThanOrEqual
def setLessThanOrEqual(self, newLessThanOrEqual):
self.lessThanOrEqual = newLessThanOrEqual
def getGreaterThan(self):
return self.greaterThan
def setGreaterThan(self, newGreaterThan):
self.greaterThan = newGreaterThan
def getLessThan(self):
return self.lessThan
def setLessThan(self, newLessThan):
self.lessThan = newLessThan
# @package Kaltura
# @subpackage Client
class KalturaESearchResponse(KalturaObjectBase):
def __init__(self,
totalCount=NotImplemented):
KalturaObjectBase.__init__(self)
# @var int
# @readonly
self.totalCount = totalCount
PROPERTY_LOADERS = {
'totalCount': getXmlNodeInt,
}
def fromXml(self, node):
KalturaObjectBase.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchResponse.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaObjectBase.toParams(self)
kparams.put("objectType", "KalturaESearchResponse")
return kparams
def getTotalCount(self):
return self.totalCount
# @package Kaltura
# @subpackage Client
class KalturaESearchUserBaseItem(KalturaESearchBaseItem):
def __init__(self):
KalturaESearchBaseItem.__init__(self)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserBaseItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchUserBaseItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchUserResult(KalturaESearchResult):
def __init__(self,
highlight=NotImplemented,
itemsData=NotImplemented,
object=NotImplemented):
KalturaESearchResult.__init__(self,
highlight,
itemsData)
# @var KalturaUser
self.object = object
PROPERTY_LOADERS = {
'object': (KalturaObjectFactory.create, 'KalturaUser'),
}
def fromXml(self, node):
KalturaESearchResult.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserResult.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResult.toParams(self)
kparams.put("objectType", "KalturaESearchUserResult")
kparams.addObjectIfDefined("object", self.object)
return kparams
def getObject(self):
return self.object
def setObject(self, newObject):
self.object = newObject
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryOperator(KalturaESearchEntryBaseItem):
def __init__(self,
operator=NotImplemented,
searchItems=NotImplemented):
KalturaESearchEntryBaseItem.__init__(self)
# @var KalturaESearchOperatorType
self.operator = operator
# @var array of KalturaESearchEntryBaseItem
self.searchItems = searchItems
PROPERTY_LOADERS = {
'operator': (KalturaEnumsFactory.createInt, "KalturaESearchOperatorType"),
'searchItems': (KalturaObjectFactory.createArray, 'KalturaESearchEntryBaseItem'),
}
def fromXml(self, node):
KalturaESearchEntryBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryOperator.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryOperator")
kparams.addIntEnumIfDefined("operator", self.operator)
kparams.addArrayIfDefined("searchItems", self.searchItems)
return kparams
def getOperator(self):
return self.operator
def setOperator(self, newOperator):
self.operator = newOperator
def getSearchItems(self):
return self.searchItems
def setSearchItems(self, newSearchItems):
self.searchItems = newSearchItems
# @package Kaltura
# @subpackage Client
class KalturaESearchCaptionItemData(KalturaESearchItemData):
def __init__(self,
highlight=NotImplemented,
line=NotImplemented,
startsAt=NotImplemented,
endsAt=NotImplemented,
language=NotImplemented,
captionAssetId=NotImplemented,
label=NotImplemented):
KalturaESearchItemData.__init__(self,
highlight)
# @var string
self.line = line
# @var int
self.startsAt = startsAt
# @var int
self.endsAt = endsAt
# @var string
self.language = language
# @var string
self.captionAssetId = captionAssetId
# @var string
self.label = label
PROPERTY_LOADERS = {
'line': getXmlNodeText,
'startsAt': getXmlNodeInt,
'endsAt': getXmlNodeInt,
'language': getXmlNodeText,
'captionAssetId': getXmlNodeText,
'label': getXmlNodeText,
}
def fromXml(self, node):
KalturaESearchItemData.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCaptionItemData.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchItemData.toParams(self)
kparams.put("objectType", "KalturaESearchCaptionItemData")
kparams.addStringIfDefined("line", self.line)
kparams.addIntIfDefined("startsAt", self.startsAt)
kparams.addIntIfDefined("endsAt", self.endsAt)
kparams.addStringIfDefined("language", self.language)
kparams.addStringIfDefined("captionAssetId", self.captionAssetId)
kparams.addStringIfDefined("label", self.label)
return kparams
def getLine(self):
return self.line
def setLine(self, newLine):
self.line = newLine
def getStartsAt(self):
return self.startsAt
def setStartsAt(self, newStartsAt):
self.startsAt = newStartsAt
def getEndsAt(self):
return self.endsAt
def setEndsAt(self, newEndsAt):
self.endsAt = newEndsAt
def getLanguage(self):
return self.language
def setLanguage(self, newLanguage):
self.language = newLanguage
def getCaptionAssetId(self):
return self.captionAssetId
def setCaptionAssetId(self, newCaptionAssetId):
self.captionAssetId = newCaptionAssetId
def getLabel(self):
return self.label
def setLabel(self, newLabel):
self.label = newLabel
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryAggregationItem(KalturaESearchAggregationItem):
def __init__(self,
size=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAggregationItem.__init__(self,
size)
# @var KalturaESearchCategoryAggregateByFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCategoryAggregateByFieldName"),
}
def fromXml(self, node):
KalturaESearchAggregationItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryAggregationItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAggregationItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryAggregationItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryOrderByItem(KalturaESearchOrderByItem):
def __init__(self,
sortOrder=NotImplemented,
sortField=NotImplemented):
KalturaESearchOrderByItem.__init__(self,
sortOrder)
# @var KalturaESearchCategoryOrderByFieldName
self.sortField = sortField
PROPERTY_LOADERS = {
'sortField': (KalturaEnumsFactory.createString, "KalturaESearchCategoryOrderByFieldName"),
}
def fromXml(self, node):
KalturaESearchOrderByItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryOrderByItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchOrderByItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryOrderByItem")
kparams.addStringEnumIfDefined("sortField", self.sortField)
return kparams
def getSortField(self):
return self.sortField
def setSortField(self, newSortField):
self.sortField = newSortField
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryOperator(KalturaESearchCategoryBaseItem):
def __init__(self,
operator=NotImplemented,
searchItems=NotImplemented):
KalturaESearchCategoryBaseItem.__init__(self)
# @var KalturaESearchOperatorType
self.operator = operator
# @var array of KalturaESearchCategoryBaseItem
self.searchItems = searchItems
PROPERTY_LOADERS = {
'operator': (KalturaEnumsFactory.createInt, "KalturaESearchOperatorType"),
'searchItems': (KalturaObjectFactory.createArray, 'KalturaESearchCategoryBaseItem'),
}
def fromXml(self, node):
KalturaESearchCategoryBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryOperator.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchCategoryBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryOperator")
kparams.addIntEnumIfDefined("operator", self.operator)
kparams.addArrayIfDefined("searchItems", self.searchItems)
return kparams
def getOperator(self):
return self.operator
def setOperator(self, newOperator):
self.operator = newOperator
def getSearchItems(self):
return self.searchItems
def setSearchItems(self, newSearchItems):
self.searchItems = newSearchItems
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryParams(KalturaESearchParams):
def __init__(self,
objectStatuses=NotImplemented,
objectId=NotImplemented,
orderBy=NotImplemented,
searchOperator=NotImplemented):
KalturaESearchParams.__init__(self,
objectStatuses,
objectId,
orderBy)
# @var KalturaESearchCategoryOperator
self.searchOperator = searchOperator
PROPERTY_LOADERS = {
'searchOperator': (KalturaObjectFactory.create, 'KalturaESearchCategoryOperator'),
}
def fromXml(self, node):
KalturaESearchParams.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryParams.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchParams.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryParams")
kparams.addObjectIfDefined("searchOperator", self.searchOperator)
return kparams
def getSearchOperator(self):
return self.searchOperator
def setSearchOperator(self, newSearchOperator):
self.searchOperator = newSearchOperator
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryResponse(KalturaESearchResponse):
def __init__(self,
totalCount=NotImplemented,
objects=NotImplemented):
KalturaESearchResponse.__init__(self,
totalCount)
# @var array of KalturaESearchCategoryResult
# @readonly
self.objects = objects
PROPERTY_LOADERS = {
'objects': (KalturaObjectFactory.createArray, 'KalturaESearchCategoryResult'),
}
def fromXml(self, node):
KalturaESearchResponse.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryResponse.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResponse.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryResponse")
return kparams
def getObjects(self):
return self.objects
# @package Kaltura
# @subpackage Client
class KalturaESearchCuePointItemData(KalturaESearchItemData):
def __init__(self,
highlight=NotImplemented,
cuePointType=NotImplemented,
id=NotImplemented,
name=NotImplemented,
text=NotImplemented,
tags=NotImplemented,
startTime=NotImplemented,
endTime=NotImplemented,
subType=NotImplemented,
question=NotImplemented,
answers=NotImplemented,
hint=NotImplemented,
explanation=NotImplemented,
assetId=NotImplemented):
KalturaESearchItemData.__init__(self,
highlight)
# @var string
self.cuePointType = cuePointType
# @var string
self.id = id
# @var string
self.name = name
# @var string
self.text = text
# @var array of KalturaString
self.tags = tags
# @var string
self.startTime = startTime
# @var string
self.endTime = endTime
# @var string
self.subType = subType
# @var string
self.question = question
# @var array of KalturaString
self.answers = answers
# @var string
self.hint = hint
# @var string
self.explanation = explanation
# @var string
self.assetId = assetId
PROPERTY_LOADERS = {
'cuePointType': getXmlNodeText,
'id': getXmlNodeText,
'name': getXmlNodeText,
'text': getXmlNodeText,
'tags': (KalturaObjectFactory.createArray, 'KalturaString'),
'startTime': getXmlNodeText,
'endTime': getXmlNodeText,
'subType': getXmlNodeText,
'question': getXmlNodeText,
'answers': (KalturaObjectFactory.createArray, 'KalturaString'),
'hint': getXmlNodeText,
'explanation': getXmlNodeText,
'assetId': getXmlNodeText,
}
def fromXml(self, node):
KalturaESearchItemData.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCuePointItemData.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchItemData.toParams(self)
kparams.put("objectType", "KalturaESearchCuePointItemData")
kparams.addStringIfDefined("cuePointType", self.cuePointType)
kparams.addStringIfDefined("id", self.id)
kparams.addStringIfDefined("name", self.name)
kparams.addStringIfDefined("text", self.text)
kparams.addArrayIfDefined("tags", self.tags)
kparams.addStringIfDefined("startTime", self.startTime)
kparams.addStringIfDefined("endTime", self.endTime)
kparams.addStringIfDefined("subType", self.subType)
kparams.addStringIfDefined("question", self.question)
kparams.addArrayIfDefined("answers", self.answers)
kparams.addStringIfDefined("hint", self.hint)
kparams.addStringIfDefined("explanation", self.explanation)
kparams.addStringIfDefined("assetId", self.assetId)
return kparams
def getCuePointType(self):
return self.cuePointType
def setCuePointType(self, newCuePointType):
self.cuePointType = newCuePointType
def getId(self):
return self.id
def setId(self, newId):
self.id = newId
def getName(self):
return self.name
def setName(self, newName):
self.name = newName
def getText(self):
return self.text
def setText(self, newText):
self.text = newText
def getTags(self):
return self.tags
def setTags(self, newTags):
self.tags = newTags
def getStartTime(self):
return self.startTime
def setStartTime(self, newStartTime):
self.startTime = newStartTime
def getEndTime(self):
return self.endTime
def setEndTime(self, newEndTime):
self.endTime = newEndTime
def getSubType(self):
return self.subType
def setSubType(self, newSubType):
self.subType = newSubType
def getQuestion(self):
return self.question
def setQuestion(self, newQuestion):
self.question = newQuestion
def getAnswers(self):
return self.answers
def setAnswers(self, newAnswers):
self.answers = newAnswers
def getHint(self):
return self.hint
def setHint(self, newHint):
self.hint = newHint
def getExplanation(self):
return self.explanation
def setExplanation(self, newExplanation):
self.explanation = newExplanation
def getAssetId(self):
return self.assetId
def setAssetId(self, newAssetId):
self.assetId = newAssetId
# @package Kaltura
# @subpackage Client
class KalturaESearchCuepointsAggregationItem(KalturaESearchAggregationItem):
def __init__(self,
size=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAggregationItem.__init__(self,
size)
# @var KalturaESearchCuePointAggregateByFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCuePointAggregateByFieldName"),
}
def fromXml(self, node):
KalturaESearchAggregationItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCuepointsAggregationItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAggregationItem.toParams(self)
kparams.put("objectType", "KalturaESearchCuepointsAggregationItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryAggregationItem(KalturaESearchAggregationItem):
def __init__(self,
size=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAggregationItem.__init__(self,
size)
# @var KalturaESearchEntryAggregateByFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchEntryAggregateByFieldName"),
}
def fromXml(self, node):
KalturaESearchAggregationItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryAggregationItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAggregationItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryAggregationItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryOrderByItem(KalturaESearchOrderByItem):
def __init__(self,
sortOrder=NotImplemented,
sortField=NotImplemented):
KalturaESearchOrderByItem.__init__(self,
sortOrder)
# @var KalturaESearchEntryOrderByFieldName
self.sortField = sortField
PROPERTY_LOADERS = {
'sortField': (KalturaEnumsFactory.createString, "KalturaESearchEntryOrderByFieldName"),
}
def fromXml(self, node):
KalturaESearchOrderByItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryOrderByItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchOrderByItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryOrderByItem")
kparams.addStringEnumIfDefined("sortField", self.sortField)
return kparams
def getSortField(self):
return self.sortField
def setSortField(self, newSortField):
self.sortField = newSortField
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryParams(KalturaESearchParams):
def __init__(self,
objectStatuses=NotImplemented,
objectId=NotImplemented,
orderBy=NotImplemented,
searchOperator=NotImplemented,
aggregations=NotImplemented):
KalturaESearchParams.__init__(self,
objectStatuses,
objectId,
orderBy)
# @var KalturaESearchEntryOperator
self.searchOperator = searchOperator
# @var KalturaESearchAggregation
self.aggregations = aggregations
PROPERTY_LOADERS = {
'searchOperator': (KalturaObjectFactory.create, 'KalturaESearchEntryOperator'),
'aggregations': (KalturaObjectFactory.create, 'KalturaESearchAggregation'),
}
def fromXml(self, node):
KalturaESearchParams.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryParams.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchParams.toParams(self)
kparams.put("objectType", "KalturaESearchEntryParams")
kparams.addObjectIfDefined("searchOperator", self.searchOperator)
kparams.addObjectIfDefined("aggregations", self.aggregations)
return kparams
def getSearchOperator(self):
return self.searchOperator
def setSearchOperator(self, newSearchOperator):
self.searchOperator = newSearchOperator
def getAggregations(self):
return self.aggregations
def setAggregations(self, newAggregations):
self.aggregations = newAggregations
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryResponse(KalturaESearchResponse):
def __init__(self,
totalCount=NotImplemented,
objects=NotImplemented,
aggregations=NotImplemented):
KalturaESearchResponse.__init__(self,
totalCount)
# @var array of KalturaESearchEntryResult
# @readonly
self.objects = objects
# @var array of KalturaESearchAggregationResponseItem
# @readonly
self.aggregations = aggregations
PROPERTY_LOADERS = {
'objects': (KalturaObjectFactory.createArray, 'KalturaESearchEntryResult'),
'aggregations': (KalturaObjectFactory.createArray, 'KalturaESearchAggregationResponseItem'),
}
def fromXml(self, node):
KalturaESearchResponse.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryResponse.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResponse.toParams(self)
kparams.put("objectType", "KalturaESearchEntryResponse")
return kparams
def getObjects(self):
return self.objects
def getAggregations(self):
return self.aggregations
# @package Kaltura
# @subpackage Client
class KalturaESearchGroupOrderByItem(KalturaESearchOrderByItem):
def __init__(self,
sortOrder=NotImplemented,
sortField=NotImplemented):
KalturaESearchOrderByItem.__init__(self,
sortOrder)
# @var KalturaESearchGroupOrderByFieldName
self.sortField = sortField
PROPERTY_LOADERS = {
'sortField': (KalturaEnumsFactory.createString, "KalturaESearchGroupOrderByFieldName"),
}
def fromXml(self, node):
KalturaESearchOrderByItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchGroupOrderByItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchOrderByItem.toParams(self)
kparams.put("objectType", "KalturaESearchGroupOrderByItem")
kparams.addStringEnumIfDefined("sortField", self.sortField)
return kparams
def getSortField(self):
return self.sortField
def setSortField(self, newSortField):
self.sortField = newSortField
# @package Kaltura
# @subpackage Client
class KalturaESearchGroupParams(KalturaESearchParams):
def __init__(self,
objectStatuses=NotImplemented,
objectId=NotImplemented,
orderBy=NotImplemented,
searchOperator=NotImplemented):
KalturaESearchParams.__init__(self,
objectStatuses,
objectId,
orderBy)
# @var KalturaESearchGroupOperator
self.searchOperator = searchOperator
PROPERTY_LOADERS = {
'searchOperator': (KalturaObjectFactory.create, 'KalturaESearchGroupOperator'),
}
def fromXml(self, node):
KalturaESearchParams.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchGroupParams.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchParams.toParams(self)
kparams.put("objectType", "KalturaESearchGroupParams")
kparams.addObjectIfDefined("searchOperator", self.searchOperator)
return kparams
def getSearchOperator(self):
return self.searchOperator
def setSearchOperator(self, newSearchOperator):
self.searchOperator = newSearchOperator
# @package Kaltura
# @subpackage Client
class KalturaESearchGroupResponse(KalturaESearchResponse):
def __init__(self,
totalCount=NotImplemented,
objects=NotImplemented):
KalturaESearchResponse.__init__(self,
totalCount)
# @var array of KalturaESearchGroupResult
# @readonly
self.objects = objects
PROPERTY_LOADERS = {
'objects': (KalturaObjectFactory.createArray, 'KalturaESearchGroupResult'),
}
def fromXml(self, node):
KalturaESearchResponse.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchGroupResponse.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResponse.toParams(self)
kparams.put("objectType", "KalturaESearchGroupResponse")
return kparams
def getObjects(self):
return self.objects
# @package Kaltura
# @subpackage Client
class KalturaESearchMetadataAggregationItem(KalturaESearchAggregationItem):
def __init__(self,
size=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAggregationItem.__init__(self,
size)
# @var KalturaESearchMetadataAggregateByFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchMetadataAggregateByFieldName"),
}
def fromXml(self, node):
KalturaESearchAggregationItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchMetadataAggregationItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAggregationItem.toParams(self)
kparams.put("objectType", "KalturaESearchMetadataAggregationItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchMetadataItemData(KalturaESearchItemData):
def __init__(self,
highlight=NotImplemented,
xpath=NotImplemented,
metadataProfileId=NotImplemented,
metadataFieldId=NotImplemented,
valueText=NotImplemented,
valueInt=NotImplemented):
KalturaESearchItemData.__init__(self,
highlight)
# @var string
self.xpath = xpath
# @var int
self.metadataProfileId = metadataProfileId
# @var int
self.metadataFieldId = metadataFieldId
# @var string
self.valueText = valueText
# @var int
self.valueInt = valueInt
PROPERTY_LOADERS = {
'xpath': getXmlNodeText,
'metadataProfileId': getXmlNodeInt,
'metadataFieldId': getXmlNodeInt,
'valueText': getXmlNodeText,
'valueInt': getXmlNodeInt,
}
def fromXml(self, node):
KalturaESearchItemData.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchMetadataItemData.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchItemData.toParams(self)
kparams.put("objectType", "KalturaESearchMetadataItemData")
kparams.addStringIfDefined("xpath", self.xpath)
kparams.addIntIfDefined("metadataProfileId", self.metadataProfileId)
kparams.addIntIfDefined("metadataFieldId", self.metadataFieldId)
kparams.addStringIfDefined("valueText", self.valueText)
kparams.addIntIfDefined("valueInt", self.valueInt)
return kparams
def getXpath(self):
return self.xpath
def setXpath(self, newXpath):
self.xpath = newXpath
def getMetadataProfileId(self):
return self.metadataProfileId
def setMetadataProfileId(self, newMetadataProfileId):
self.metadataProfileId = newMetadataProfileId
def getMetadataFieldId(self):
return self.metadataFieldId
def setMetadataFieldId(self, newMetadataFieldId):
self.metadataFieldId = newMetadataFieldId
def getValueText(self):
return self.valueText
def setValueText(self, newValueText):
self.valueText = newValueText
def getValueInt(self):
return self.valueInt
def setValueInt(self, newValueInt):
self.valueInt = newValueInt
# @package Kaltura
# @subpackage Client
class KalturaESearchMetadataOrderByItem(KalturaESearchOrderByItem):
def __init__(self,
sortOrder=NotImplemented,
xpath=NotImplemented,
metadataProfileId=NotImplemented):
KalturaESearchOrderByItem.__init__(self,
sortOrder)
# @var string
self.xpath = xpath
# @var int
self.metadataProfileId = metadataProfileId
PROPERTY_LOADERS = {
'xpath': getXmlNodeText,
'metadataProfileId': getXmlNodeInt,
}
def fromXml(self, node):
KalturaESearchOrderByItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchMetadataOrderByItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchOrderByItem.toParams(self)
kparams.put("objectType", "KalturaESearchMetadataOrderByItem")
kparams.addStringIfDefined("xpath", self.xpath)
kparams.addIntIfDefined("metadataProfileId", self.metadataProfileId)
return kparams
def getXpath(self):
return self.xpath
def setXpath(self, newXpath):
self.xpath = newXpath
def getMetadataProfileId(self):
return self.metadataProfileId
def setMetadataProfileId(self, newMetadataProfileId):
self.metadataProfileId = newMetadataProfileId
# @package Kaltura
# @subpackage Client
class KalturaESearchUserOrderByItem(KalturaESearchOrderByItem):
def __init__(self,
sortOrder=NotImplemented,
sortField=NotImplemented):
KalturaESearchOrderByItem.__init__(self,
sortOrder)
# @var KalturaESearchUserOrderByFieldName
self.sortField = sortField
PROPERTY_LOADERS = {
'sortField': (KalturaEnumsFactory.createString, "KalturaESearchUserOrderByFieldName"),
}
def fromXml(self, node):
KalturaESearchOrderByItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserOrderByItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchOrderByItem.toParams(self)
kparams.put("objectType", "KalturaESearchUserOrderByItem")
kparams.addStringEnumIfDefined("sortField", self.sortField)
return kparams
def getSortField(self):
return self.sortField
def setSortField(self, newSortField):
self.sortField = newSortField
# @package Kaltura
# @subpackage Client
class KalturaESearchUserOperator(KalturaESearchUserBaseItem):
def __init__(self,
operator=NotImplemented,
searchItems=NotImplemented):
KalturaESearchUserBaseItem.__init__(self)
# @var KalturaESearchOperatorType
self.operator = operator
# @var array of KalturaESearchUserBaseItem
self.searchItems = searchItems
PROPERTY_LOADERS = {
'operator': (KalturaEnumsFactory.createInt, "KalturaESearchOperatorType"),
'searchItems': (KalturaObjectFactory.createArray, 'KalturaESearchUserBaseItem'),
}
def fromXml(self, node):
KalturaESearchUserBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserOperator.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchUserBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchUserOperator")
kparams.addIntEnumIfDefined("operator", self.operator)
kparams.addArrayIfDefined("searchItems", self.searchItems)
return kparams
def getOperator(self):
return self.operator
def setOperator(self, newOperator):
self.operator = newOperator
def getSearchItems(self):
return self.searchItems
def setSearchItems(self, newSearchItems):
self.searchItems = newSearchItems
# @package Kaltura
# @subpackage Client
class KalturaESearchUserParams(KalturaESearchParams):
def __init__(self,
objectStatuses=NotImplemented,
objectId=NotImplemented,
orderBy=NotImplemented,
searchOperator=NotImplemented):
KalturaESearchParams.__init__(self,
objectStatuses,
objectId,
orderBy)
# @var KalturaESearchUserOperator
self.searchOperator = searchOperator
PROPERTY_LOADERS = {
'searchOperator': (KalturaObjectFactory.create, 'KalturaESearchUserOperator'),
}
def fromXml(self, node):
KalturaESearchParams.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserParams.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchParams.toParams(self)
kparams.put("objectType", "KalturaESearchUserParams")
kparams.addObjectIfDefined("searchOperator", self.searchOperator)
return kparams
def getSearchOperator(self):
return self.searchOperator
def setSearchOperator(self, newSearchOperator):
self.searchOperator = newSearchOperator
# @package Kaltura
# @subpackage Client
class KalturaESearchUserResponse(KalturaESearchResponse):
def __init__(self,
totalCount=NotImplemented,
objects=NotImplemented):
KalturaESearchResponse.__init__(self,
totalCount)
# @var array of KalturaESearchUserResult
# @readonly
self.objects = objects
PROPERTY_LOADERS = {
'objects': (KalturaObjectFactory.createArray, 'KalturaESearchUserResult'),
}
def fromXml(self, node):
KalturaESearchResponse.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserResponse.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchResponse.toParams(self)
kparams.put("objectType", "KalturaESearchUserResponse")
return kparams
def getObjects(self):
return self.objects
# @package Kaltura
# @subpackage Client
class KalturaEntryCaptionAdvancedFilter(KalturaSearchItem):
def __init__(self,
hasCaption=NotImplemented):
KalturaSearchItem.__init__(self)
# @var KalturaNullableBoolean
self.hasCaption = hasCaption
PROPERTY_LOADERS = {
'hasCaption': (KalturaEnumsFactory.createInt, "KalturaNullableBoolean"),
}
def fromXml(self, node):
KalturaSearchItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaEntryCaptionAdvancedFilter.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaSearchItem.toParams(self)
kparams.put("objectType", "KalturaEntryCaptionAdvancedFilter")
kparams.addIntEnumIfDefined("hasCaption", self.hasCaption)
return kparams
def getHasCaption(self):
return self.hasCaption
def setHasCaption(self, newHasCaption):
self.hasCaption = newHasCaption
# @package Kaltura
# @subpackage Client
class KalturaBeaconAbstractScheduledResourceItem(KalturaBeaconScheduledResourceBaseItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented):
KalturaBeaconScheduledResourceBaseItem.__init__(self)
# @var string
self.searchTerm = searchTerm
# @var KalturaESearchItemType
self.itemType = itemType
# @var KalturaESearchRange
self.range = range
PROPERTY_LOADERS = {
'searchTerm': getXmlNodeText,
'itemType': (KalturaEnumsFactory.createInt, "KalturaESearchItemType"),
'range': (KalturaObjectFactory.create, 'KalturaESearchRange'),
}
def fromXml(self, node):
KalturaBeaconScheduledResourceBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaBeaconAbstractScheduledResourceItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaBeaconScheduledResourceBaseItem.toParams(self)
kparams.put("objectType", "KalturaBeaconAbstractScheduledResourceItem")
kparams.addStringIfDefined("searchTerm", self.searchTerm)
kparams.addIntEnumIfDefined("itemType", self.itemType)
kparams.addObjectIfDefined("range", self.range)
return kparams
def getSearchTerm(self):
return self.searchTerm
def setSearchTerm(self, newSearchTerm):
self.searchTerm = newSearchTerm
def getItemType(self):
return self.itemType
def setItemType(self, newItemType):
self.itemType = newItemType
def getRange(self):
return self.range
def setRange(self, newRange):
self.range = newRange
# @package Kaltura
# @subpackage Client
class KalturaESearchAbstractCategoryItem(KalturaESearchCategoryBaseItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented):
KalturaESearchCategoryBaseItem.__init__(self)
# @var string
self.searchTerm = searchTerm
# @var KalturaESearchItemType
self.itemType = itemType
# @var KalturaESearchRange
self.range = range
# @var bool
self.addHighlight = addHighlight
PROPERTY_LOADERS = {
'searchTerm': getXmlNodeText,
'itemType': (KalturaEnumsFactory.createInt, "KalturaESearchItemType"),
'range': (KalturaObjectFactory.create, 'KalturaESearchRange'),
'addHighlight': getXmlNodeBool,
}
def fromXml(self, node):
KalturaESearchCategoryBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAbstractCategoryItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchCategoryBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchAbstractCategoryItem")
kparams.addStringIfDefined("searchTerm", self.searchTerm)
kparams.addIntEnumIfDefined("itemType", self.itemType)
kparams.addObjectIfDefined("range", self.range)
kparams.addBoolIfDefined("addHighlight", self.addHighlight)
return kparams
def getSearchTerm(self):
return self.searchTerm
def setSearchTerm(self, newSearchTerm):
self.searchTerm = newSearchTerm
def getItemType(self):
return self.itemType
def setItemType(self, newItemType):
self.itemType = newItemType
def getRange(self):
return self.range
def setRange(self, newRange):
self.range = newRange
def getAddHighlight(self):
return self.addHighlight
def setAddHighlight(self, newAddHighlight):
self.addHighlight = newAddHighlight
# @package Kaltura
# @subpackage Client
class KalturaESearchAbstractEntryItem(KalturaESearchEntryBaseItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented):
KalturaESearchEntryBaseItem.__init__(self)
# @var string
self.searchTerm = searchTerm
# @var KalturaESearchItemType
self.itemType = itemType
# @var KalturaESearchRange
self.range = range
# @var bool
self.addHighlight = addHighlight
PROPERTY_LOADERS = {
'searchTerm': getXmlNodeText,
'itemType': (KalturaEnumsFactory.createInt, "KalturaESearchItemType"),
'range': (KalturaObjectFactory.create, 'KalturaESearchRange'),
'addHighlight': getXmlNodeBool,
}
def fromXml(self, node):
KalturaESearchEntryBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAbstractEntryItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchAbstractEntryItem")
kparams.addStringIfDefined("searchTerm", self.searchTerm)
kparams.addIntEnumIfDefined("itemType", self.itemType)
kparams.addObjectIfDefined("range", self.range)
kparams.addBoolIfDefined("addHighlight", self.addHighlight)
return kparams
def getSearchTerm(self):
return self.searchTerm
def setSearchTerm(self, newSearchTerm):
self.searchTerm = newSearchTerm
def getItemType(self):
return self.itemType
def setItemType(self, newItemType):
self.itemType = newItemType
def getRange(self):
return self.range
def setRange(self, newRange):
self.range = newRange
def getAddHighlight(self):
return self.addHighlight
def setAddHighlight(self, newAddHighlight):
self.addHighlight = newAddHighlight
# @package Kaltura
# @subpackage Client
class KalturaESearchAbstractUserItem(KalturaESearchUserBaseItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented):
KalturaESearchUserBaseItem.__init__(self)
# @var string
self.searchTerm = searchTerm
# @var KalturaESearchItemType
self.itemType = itemType
# @var KalturaESearchRange
self.range = range
# @var bool
self.addHighlight = addHighlight
PROPERTY_LOADERS = {
'searchTerm': getXmlNodeText,
'itemType': (KalturaEnumsFactory.createInt, "KalturaESearchItemType"),
'range': (KalturaObjectFactory.create, 'KalturaESearchRange'),
'addHighlight': getXmlNodeBool,
}
def fromXml(self, node):
KalturaESearchUserBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchAbstractUserItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchUserBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchAbstractUserItem")
kparams.addStringIfDefined("searchTerm", self.searchTerm)
kparams.addIntEnumIfDefined("itemType", self.itemType)
kparams.addObjectIfDefined("range", self.range)
kparams.addBoolIfDefined("addHighlight", self.addHighlight)
return kparams
def getSearchTerm(self):
return self.searchTerm
def setSearchTerm(self, newSearchTerm):
self.searchTerm = newSearchTerm
def getItemType(self):
return self.itemType
def setItemType(self, newItemType):
self.itemType = newItemType
def getRange(self):
return self.range
def setRange(self, newRange):
self.range = newRange
def getAddHighlight(self):
return self.addHighlight
def setAddHighlight(self, newAddHighlight):
self.addHighlight = newAddHighlight
# @package Kaltura
# @subpackage Client
class KalturaMediaEsearchExportToCsvJobData(KalturaExportCsvJobData):
def __init__(self,
userName=NotImplemented,
userMail=NotImplemented,
outputPath=NotImplemented,
sharedOutputPath=NotImplemented,
searchParams=NotImplemented,
options=NotImplemented):
KalturaExportCsvJobData.__init__(self,
userName,
userMail,
outputPath,
sharedOutputPath)
# Esearch parameters for the entry search
# @var KalturaESearchEntryParams
self.searchParams = searchParams
# options
# @var array of KalturaExportToCsvOptions
self.options = options
PROPERTY_LOADERS = {
'searchParams': (KalturaObjectFactory.create, 'KalturaESearchEntryParams'),
'options': (KalturaObjectFactory.createArray, 'KalturaExportToCsvOptions'),
}
def fromXml(self, node):
KalturaExportCsvJobData.fromXml(self, node)
self.fromXmlImpl(node, KalturaMediaEsearchExportToCsvJobData.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaExportCsvJobData.toParams(self)
kparams.put("objectType", "KalturaMediaEsearchExportToCsvJobData")
kparams.addObjectIfDefined("searchParams", self.searchParams)
kparams.addArrayIfDefined("options", self.options)
return kparams
def getSearchParams(self):
return self.searchParams
def setSearchParams(self, newSearchParams):
self.searchParams = newSearchParams
def getOptions(self):
return self.options
def setOptions(self, newOptions):
self.options = newOptions
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryEntryItem(KalturaESearchAbstractEntryItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented,
categoryEntryStatus=NotImplemented):
KalturaESearchAbstractEntryItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchCategoryEntryFieldName
self.fieldName = fieldName
# @var KalturaCategoryEntryStatus
self.categoryEntryStatus = categoryEntryStatus
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCategoryEntryFieldName"),
'categoryEntryStatus': (KalturaEnumsFactory.createInt, "KalturaCategoryEntryStatus"),
}
def fromXml(self, node):
KalturaESearchAbstractEntryItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryEntryItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractEntryItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryEntryItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
kparams.addIntEnumIfDefined("categoryEntryStatus", self.categoryEntryStatus)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
def getCategoryEntryStatus(self):
return self.categoryEntryStatus
def setCategoryEntryStatus(self, newCategoryEntryStatus):
self.categoryEntryStatus = newCategoryEntryStatus
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryItem(KalturaESearchAbstractCategoryItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAbstractCategoryItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchCategoryFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCategoryFieldName"),
}
def fromXml(self, node):
KalturaESearchAbstractCategoryItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractCategoryItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryMetadataItem(KalturaESearchAbstractCategoryItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
xpath=NotImplemented,
metadataProfileId=NotImplemented,
metadataFieldId=NotImplemented):
KalturaESearchAbstractCategoryItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var string
self.xpath = xpath
# @var int
self.metadataProfileId = metadataProfileId
# @var int
self.metadataFieldId = metadataFieldId
PROPERTY_LOADERS = {
'xpath': getXmlNodeText,
'metadataProfileId': getXmlNodeInt,
'metadataFieldId': getXmlNodeInt,
}
def fromXml(self, node):
KalturaESearchAbstractCategoryItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryMetadataItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractCategoryItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryMetadataItem")
kparams.addStringIfDefined("xpath", self.xpath)
kparams.addIntIfDefined("metadataProfileId", self.metadataProfileId)
kparams.addIntIfDefined("metadataFieldId", self.metadataFieldId)
return kparams
def getXpath(self):
return self.xpath
def setXpath(self, newXpath):
self.xpath = newXpath
def getMetadataProfileId(self):
return self.metadataProfileId
def setMetadataProfileId(self, newMetadataProfileId):
self.metadataProfileId = newMetadataProfileId
def getMetadataFieldId(self):
return self.metadataFieldId
def setMetadataFieldId(self, newMetadataFieldId):
self.metadataFieldId = newMetadataFieldId
# @package Kaltura
# @subpackage Client
class KalturaESearchCategoryUserItem(KalturaESearchAbstractCategoryItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented,
permissionLevel=NotImplemented,
permissionName=NotImplemented):
KalturaESearchAbstractCategoryItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchCategoryUserFieldName
self.fieldName = fieldName
# @var KalturaCategoryUserPermissionLevel
self.permissionLevel = permissionLevel
# @var string
self.permissionName = permissionName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCategoryUserFieldName"),
'permissionLevel': (KalturaEnumsFactory.createInt, "KalturaCategoryUserPermissionLevel"),
'permissionName': getXmlNodeText,
}
def fromXml(self, node):
KalturaESearchAbstractCategoryItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCategoryUserItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractCategoryItem.toParams(self)
kparams.put("objectType", "KalturaESearchCategoryUserItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
kparams.addIntEnumIfDefined("permissionLevel", self.permissionLevel)
kparams.addStringIfDefined("permissionName", self.permissionName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
def getPermissionLevel(self):
return self.permissionLevel
def setPermissionLevel(self, newPermissionLevel):
self.permissionLevel = newPermissionLevel
def getPermissionName(self):
return self.permissionName
def setPermissionName(self, newPermissionName):
self.permissionName = newPermissionName
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryItem(KalturaESearchAbstractEntryItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAbstractEntryItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchEntryFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchEntryFieldName"),
}
def fromXml(self, node):
KalturaESearchAbstractEntryItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractEntryItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchGroupUserItem(KalturaESearchAbstractUserItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented,
creationMode=NotImplemented):
KalturaESearchAbstractUserItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaEsearchGroupUserFieldName
self.fieldName = fieldName
# @var KalturaGroupUserCreationMode
self.creationMode = creationMode
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaEsearchGroupUserFieldName"),
'creationMode': (KalturaEnumsFactory.createInt, "KalturaGroupUserCreationMode"),
}
def fromXml(self, node):
KalturaESearchAbstractUserItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchGroupUserItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractUserItem.toParams(self)
kparams.put("objectType", "KalturaESearchGroupUserItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
kparams.addIntEnumIfDefined("creationMode", self.creationMode)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
def getCreationMode(self):
return self.creationMode
def setCreationMode(self, newCreationMode):
self.creationMode = newCreationMode
# @package Kaltura
# @subpackage Client
class KalturaESearchUnifiedItem(KalturaESearchAbstractEntryItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented):
KalturaESearchAbstractEntryItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
PROPERTY_LOADERS = {
}
def fromXml(self, node):
KalturaESearchAbstractEntryItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUnifiedItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractEntryItem.toParams(self)
kparams.put("objectType", "KalturaESearchUnifiedItem")
return kparams
# @package Kaltura
# @subpackage Client
class KalturaESearchUserItem(KalturaESearchAbstractUserItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented):
KalturaESearchAbstractUserItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchUserFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchUserFieldName"),
}
def fromXml(self, node):
KalturaESearchAbstractUserItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractUserItem.toParams(self)
kparams.put("objectType", "KalturaESearchUserItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchUserMetadataItem(KalturaESearchAbstractUserItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
xpath=NotImplemented,
metadataProfileId=NotImplemented,
metadataFieldId=NotImplemented):
KalturaESearchAbstractUserItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var string
self.xpath = xpath
# @var int
self.metadataProfileId = metadataProfileId
# @var int
self.metadataFieldId = metadataFieldId
PROPERTY_LOADERS = {
'xpath': getXmlNodeText,
'metadataProfileId': getXmlNodeInt,
'metadataFieldId': getXmlNodeInt,
}
def fromXml(self, node):
KalturaESearchAbstractUserItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchUserMetadataItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchAbstractUserItem.toParams(self)
kparams.put("objectType", "KalturaESearchUserMetadataItem")
kparams.addStringIfDefined("xpath", self.xpath)
kparams.addIntIfDefined("metadataProfileId", self.metadataProfileId)
kparams.addIntIfDefined("metadataFieldId", self.metadataFieldId)
return kparams
def getXpath(self):
return self.xpath
def setXpath(self, newXpath):
self.xpath = newXpath
def getMetadataProfileId(self):
return self.metadataProfileId
def setMetadataProfileId(self, newMetadataProfileId):
self.metadataProfileId = newMetadataProfileId
def getMetadataFieldId(self):
return self.metadataFieldId
def setMetadataFieldId(self, newMetadataFieldId):
self.metadataFieldId = newMetadataFieldId
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryAbstractNestedItem(KalturaESearchEntryNestedBaseItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented):
KalturaESearchEntryNestedBaseItem.__init__(self)
# @var string
self.searchTerm = searchTerm
# @var KalturaESearchItemType
self.itemType = itemType
# @var KalturaESearchRange
self.range = range
# @var bool
self.addHighlight = addHighlight
PROPERTY_LOADERS = {
'searchTerm': getXmlNodeText,
'itemType': (KalturaEnumsFactory.createInt, "KalturaESearchItemType"),
'range': (KalturaObjectFactory.create, 'KalturaESearchRange'),
'addHighlight': getXmlNodeBool,
}
def fromXml(self, node):
KalturaESearchEntryNestedBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryAbstractNestedItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryNestedBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryAbstractNestedItem")
kparams.addStringIfDefined("searchTerm", self.searchTerm)
kparams.addIntEnumIfDefined("itemType", self.itemType)
kparams.addObjectIfDefined("range", self.range)
kparams.addBoolIfDefined("addHighlight", self.addHighlight)
return kparams
def getSearchTerm(self):
return self.searchTerm
def setSearchTerm(self, newSearchTerm):
self.searchTerm = newSearchTerm
def getItemType(self):
return self.itemType
def setItemType(self, newItemType):
self.itemType = newItemType
def getRange(self):
return self.range
def setRange(self, newRange):
self.range = newRange
def getAddHighlight(self):
return self.addHighlight
def setAddHighlight(self, newAddHighlight):
self.addHighlight = newAddHighlight
# @package Kaltura
# @subpackage Client
class KalturaESearchNestedOperator(KalturaESearchEntryNestedBaseItem):
def __init__(self,
operator=NotImplemented,
searchItems=NotImplemented):
KalturaESearchEntryNestedBaseItem.__init__(self)
# @var KalturaESearchOperatorType
self.operator = operator
# @var array of KalturaESearchEntryNestedBaseItem
self.searchItems = searchItems
PROPERTY_LOADERS = {
'operator': (KalturaEnumsFactory.createInt, "KalturaESearchOperatorType"),
'searchItems': (KalturaObjectFactory.createArray, 'KalturaESearchEntryNestedBaseItem'),
}
def fromXml(self, node):
KalturaESearchEntryNestedBaseItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchNestedOperator.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryNestedBaseItem.toParams(self)
kparams.put("objectType", "KalturaESearchNestedOperator")
kparams.addIntEnumIfDefined("operator", self.operator)
kparams.addArrayIfDefined("searchItems", self.searchItems)
return kparams
def getOperator(self):
return self.operator
def setOperator(self, newOperator):
self.operator = newOperator
def getSearchItems(self):
return self.searchItems
def setSearchItems(self, newSearchItems):
self.searchItems = newSearchItems
# @package Kaltura
# @subpackage Client
class KalturaESearchCaptionItem(KalturaESearchEntryAbstractNestedItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented):
KalturaESearchEntryAbstractNestedItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchCaptionFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCaptionFieldName"),
}
def fromXml(self, node):
KalturaESearchEntryAbstractNestedItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCaptionItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryAbstractNestedItem.toParams(self)
kparams.put("objectType", "KalturaESearchCaptionItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchCuePointItem(KalturaESearchEntryAbstractNestedItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
fieldName=NotImplemented):
KalturaESearchEntryAbstractNestedItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var KalturaESearchCuePointFieldName
self.fieldName = fieldName
PROPERTY_LOADERS = {
'fieldName': (KalturaEnumsFactory.createString, "KalturaESearchCuePointFieldName"),
}
def fromXml(self, node):
KalturaESearchEntryAbstractNestedItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchCuePointItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryAbstractNestedItem.toParams(self)
kparams.put("objectType", "KalturaESearchCuePointItem")
kparams.addStringEnumIfDefined("fieldName", self.fieldName)
return kparams
def getFieldName(self):
return self.fieldName
def setFieldName(self, newFieldName):
self.fieldName = newFieldName
# @package Kaltura
# @subpackage Client
class KalturaESearchEntryMetadataItem(KalturaESearchEntryAbstractNestedItem):
def __init__(self,
searchTerm=NotImplemented,
itemType=NotImplemented,
range=NotImplemented,
addHighlight=NotImplemented,
xpath=NotImplemented,
metadataProfileId=NotImplemented,
metadataFieldId=NotImplemented):
KalturaESearchEntryAbstractNestedItem.__init__(self,
searchTerm,
itemType,
range,
addHighlight)
# @var string
self.xpath = xpath
# @var int
self.metadataProfileId = metadataProfileId
# @var int
self.metadataFieldId = metadataFieldId
PROPERTY_LOADERS = {
'xpath': getXmlNodeText,
'metadataProfileId': getXmlNodeInt,
'metadataFieldId': getXmlNodeInt,
}
def fromXml(self, node):
KalturaESearchEntryAbstractNestedItem.fromXml(self, node)
self.fromXmlImpl(node, KalturaESearchEntryMetadataItem.PROPERTY_LOADERS)
def toParams(self):
kparams = KalturaESearchEntryAbstractNestedItem.toParams(self)
kparams.put("objectType", "KalturaESearchEntryMetadataItem")
kparams.addStringIfDefined("xpath", self.xpath)
kparams.addIntIfDefined("metadataProfileId", self.metadataProfileId)
kparams.addIntIfDefined("metadataFieldId", self.metadataFieldId)
return kparams
def getXpath(self):
return self.xpath
def setXpath(self, newXpath):
self.xpath = newXpath
def getMetadataProfileId(self):
return self.metadataProfileId
def setMetadataProfileId(self, newMetadataProfileId):
self.metadataProfileId = newMetadataProfileId
def getMetadataFieldId(self):
return self.metadataFieldId
def setMetadataFieldId(self, newMetadataFieldId):
self.metadataFieldId = newMetadataFieldId
########## services ##########
# @package Kaltura
# @subpackage Client
class KalturaESearchService(KalturaServiceBase):
def __init__(self, client = None):
KalturaServiceBase.__init__(self, client)
def searchCategory(self, searchParams, pager = NotImplemented):
kparams = KalturaParams()
kparams.addObjectIfDefined("searchParams", searchParams)
kparams.addObjectIfDefined("pager", pager)
self.client.queueServiceActionCall("elasticsearch_esearch", "searchCategory", "KalturaESearchCategoryResponse", kparams)
if self.client.isMultiRequest():
return self.client.getMultiRequestResult()
resultNode = self.client.doQueue()
return KalturaObjectFactory.create(resultNode, 'KalturaESearchCategoryResponse')
def searchEntry(self, searchParams, pager = NotImplemented):
kparams = KalturaParams()
kparams.addObjectIfDefined("searchParams", searchParams)
kparams.addObjectIfDefined("pager", pager)
self.client.queueServiceActionCall("elasticsearch_esearch", "searchEntry", "KalturaESearchEntryResponse", kparams)
if self.client.isMultiRequest():
return self.client.getMultiRequestResult()
resultNode = self.client.doQueue()
return KalturaObjectFactory.create(resultNode, 'KalturaESearchEntryResponse')
def searchGroup(self, searchParams, pager = NotImplemented):
kparams = KalturaParams()
kparams.addObjectIfDefined("searchParams", searchParams)
kparams.addObjectIfDefined("pager", pager)
self.client.queueServiceActionCall("elasticsearch_esearch", "searchGroup", "KalturaESearchGroupResponse", kparams)
if self.client.isMultiRequest():
return self.client.getMultiRequestResult()
resultNode = self.client.doQueue()
return KalturaObjectFactory.create(resultNode, 'KalturaESearchGroupResponse')
def searchUser(self, searchParams, pager = NotImplemented):
kparams = KalturaParams()
kparams.addObjectIfDefined("searchParams", searchParams)
kparams.addObjectIfDefined("pager", pager)
self.client.queueServiceActionCall("elasticsearch_esearch", "searchUser", "KalturaESearchUserResponse", kparams)
if self.client.isMultiRequest():
return self.client.getMultiRequestResult()
resultNode = self.client.doQueue()
return KalturaObjectFactory.create(resultNode, 'KalturaESearchUserResponse')
########## main ##########
class KalturaElasticSearchClientPlugin(KalturaClientPlugin):
# KalturaElasticSearchClientPlugin
instance = None
# @return KalturaElasticSearchClientPlugin
@staticmethod
def get():
if KalturaElasticSearchClientPlugin.instance == None:
KalturaElasticSearchClientPlugin.instance = KalturaElasticSearchClientPlugin()
return KalturaElasticSearchClientPlugin.instance
# @return array<KalturaServiceBase>
def getServices(self):
return {
'eSearch': KalturaESearchService,
}
def getEnums(self):
return {
'KalturaESearchItemType': KalturaESearchItemType,
'KalturaESearchOperatorType': KalturaESearchOperatorType,
'KalturaESearchCaptionFieldName': KalturaESearchCaptionFieldName,
'KalturaESearchCategoryAggregateByFieldName': KalturaESearchCategoryAggregateByFieldName,
'KalturaESearchCategoryEntryFieldName': KalturaESearchCategoryEntryFieldName,
'KalturaESearchCategoryFieldName': KalturaESearchCategoryFieldName,
'KalturaESearchCategoryOrderByFieldName': KalturaESearchCategoryOrderByFieldName,
'KalturaESearchCategoryUserFieldName': KalturaESearchCategoryUserFieldName,
'KalturaESearchCuePointAggregateByFieldName': KalturaESearchCuePointAggregateByFieldName,
'KalturaESearchCuePointFieldName': KalturaESearchCuePointFieldName,
'KalturaESearchEntryAggregateByFieldName': KalturaESearchEntryAggregateByFieldName,
'KalturaESearchEntryFieldName': KalturaESearchEntryFieldName,
'KalturaESearchEntryOrderByFieldName': KalturaESearchEntryOrderByFieldName,
'KalturaESearchMetadataAggregateByFieldName': KalturaESearchMetadataAggregateByFieldName,
'KalturaESearchSortOrder': KalturaESearchSortOrder,
'KalturaESearchUserFieldName': KalturaESearchUserFieldName,
'KalturaESearchUserOrderByFieldName': KalturaESearchUserOrderByFieldName,
'KalturaEsearchGroupUserFieldName': KalturaEsearchGroupUserFieldName,
}
def getTypes(self):
return {
'KalturaESearchBaseItem': KalturaESearchBaseItem,
'KalturaBeaconScheduledResourceBaseItem': KalturaBeaconScheduledResourceBaseItem,
'KalturaESearchOrderByItem': KalturaESearchOrderByItem,
'KalturaESearchAggregationItem': KalturaESearchAggregationItem,
'KalturaESearchAggregation': KalturaESearchAggregation,
'KalturaESearchAggregationBucket': KalturaESearchAggregationBucket,
'KalturaESearchAggregationResponseItem': KalturaESearchAggregationResponseItem,
'KalturaESearchBaseFilter': KalturaESearchBaseFilter,
'KalturaESearchCategoryBaseItem': KalturaESearchCategoryBaseItem,
'KalturaESearchHighlight': KalturaESearchHighlight,
'KalturaESearchItemData': KalturaESearchItemData,
'KalturaESearchItemDataResult': KalturaESearchItemDataResult,
'KalturaESearchResult': KalturaESearchResult,
'KalturaESearchCategoryResult': KalturaESearchCategoryResult,
'KalturaESearchEntryBaseItem': KalturaESearchEntryBaseItem,
'KalturaESearchEntryBaseNestedObject': KalturaESearchEntryBaseNestedObject,
'KalturaESearchEntryNestedBaseItem': KalturaESearchEntryNestedBaseItem,
'KalturaESearchEntryResult': KalturaESearchEntryResult,
'KalturaESearchGroupResult': KalturaESearchGroupResult,
'KalturaESearchOrderBy': KalturaESearchOrderBy,
'KalturaESearchParams': KalturaESearchParams,
'KalturaESearchRange': KalturaESearchRange,
'KalturaESearchResponse': KalturaESearchResponse,
'KalturaESearchUserBaseItem': KalturaESearchUserBaseItem,
'KalturaESearchUserResult': KalturaESearchUserResult,
'KalturaESearchEntryOperator': KalturaESearchEntryOperator,
'KalturaESearchCaptionItemData': KalturaESearchCaptionItemData,
'KalturaESearchCategoryAggregationItem': KalturaESearchCategoryAggregationItem,
'KalturaESearchCategoryOrderByItem': KalturaESearchCategoryOrderByItem,
'KalturaESearchCategoryOperator': KalturaESearchCategoryOperator,
'KalturaESearchCategoryParams': KalturaESearchCategoryParams,
'KalturaESearchCategoryResponse': KalturaESearchCategoryResponse,
'KalturaESearchCuePointItemData': KalturaESearchCuePointItemData,
'KalturaESearchCuepointsAggregationItem': KalturaESearchCuepointsAggregationItem,
'KalturaESearchEntryAggregationItem': KalturaESearchEntryAggregationItem,
'KalturaESearchEntryOrderByItem': KalturaESearchEntryOrderByItem,
'KalturaESearchEntryParams': KalturaESearchEntryParams,
'KalturaESearchEntryResponse': KalturaESearchEntryResponse,
'KalturaESearchGroupOrderByItem': KalturaESearchGroupOrderByItem,
'KalturaESearchGroupParams': KalturaESearchGroupParams,
'KalturaESearchGroupResponse': KalturaESearchGroupResponse,
'KalturaESearchMetadataAggregationItem': KalturaESearchMetadataAggregationItem,
'KalturaESearchMetadataItemData': KalturaESearchMetadataItemData,
'KalturaESearchMetadataOrderByItem': KalturaESearchMetadataOrderByItem,
'KalturaESearchUserOrderByItem': KalturaESearchUserOrderByItem,
'KalturaESearchUserOperator': KalturaESearchUserOperator,
'KalturaESearchUserParams': KalturaESearchUserParams,
'KalturaESearchUserResponse': KalturaESearchUserResponse,
'KalturaEntryCaptionAdvancedFilter': KalturaEntryCaptionAdvancedFilter,
'KalturaBeaconAbstractScheduledResourceItem': KalturaBeaconAbstractScheduledResourceItem,
'KalturaESearchAbstractCategoryItem': KalturaESearchAbstractCategoryItem,
'KalturaESearchAbstractEntryItem': KalturaESearchAbstractEntryItem,
'KalturaESearchAbstractUserItem': KalturaESearchAbstractUserItem,
'KalturaMediaEsearchExportToCsvJobData': KalturaMediaEsearchExportToCsvJobData,
'KalturaESearchCategoryEntryItem': KalturaESearchCategoryEntryItem,
'KalturaESearchCategoryItem': KalturaESearchCategoryItem,
'KalturaESearchCategoryMetadataItem': KalturaESearchCategoryMetadataItem,
'KalturaESearchCategoryUserItem': KalturaESearchCategoryUserItem,
'KalturaESearchEntryItem': KalturaESearchEntryItem,
'KalturaESearchGroupUserItem': KalturaESearchGroupUserItem,
'KalturaESearchUnifiedItem': KalturaESearchUnifiedItem,
'KalturaESearchUserItem': KalturaESearchUserItem,
'KalturaESearchUserMetadataItem': KalturaESearchUserMetadataItem,
'KalturaESearchEntryAbstractNestedItem': KalturaESearchEntryAbstractNestedItem,
'KalturaESearchNestedOperator': KalturaESearchNestedOperator,
'KalturaESearchCaptionItem': KalturaESearchCaptionItem,
'KalturaESearchCuePointItem': KalturaESearchCuePointItem,
'KalturaESearchEntryMetadataItem': KalturaESearchEntryMetadataItem,
}
# @return string
def getName(self):
return 'elasticSearch' | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_ps.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u063a.\u0645.",
"\u063a.\u0648."
],
"DAY": [
"\u06cc\u06a9\u0634\u0646\u0628\u0647",
"\u062f\u0648\u0634\u0646\u0628\u0647",
"\u0633\u0647\u200c\u0634\u0646\u0628\u0647",
"\u0686\u0647\u0627\u0631\u0634\u0646\u0628\u0647",
"\u067e\u0646\u062c\u0634\u0646\u0628\u0647",
"\u062c\u0645\u0639\u0647",
"\u0634\u0646\u0628\u0647"
],
"MONTH": [
"\u062c\u0646\u0648\u0631\u064a",
"\u0641\u0628\u0631\u0648\u0631\u064a",
"\u0645\u0627\u0631\u0686",
"\u0627\u067e\u0631\u06cc\u0644",
"\u0645\u06cc",
"\u062c\u0648\u0646",
"\u062c\u0648\u0644\u0627\u06cc",
"\u0627\u06ab\u0633\u062a",
"\u0633\u067e\u062a\u0645\u0628\u0631",
"\u0627\u06a9\u062a\u0648\u0628\u0631",
"\u0646\u0648\u0645\u0628\u0631",
"\u062f\u0633\u0645\u0628\u0631"
],
"SHORTDAY": [
"\u06cc\u06a9\u0634\u0646\u0628\u0647",
"\u062f\u0648\u0634\u0646\u0628\u0647",
"\u0633\u0647\u200c\u0634\u0646\u0628\u0647",
"\u0686\u0647\u0627\u0631\u0634\u0646\u0628\u0647",
"\u067e\u0646\u062c\u0634\u0646\u0628\u0647",
"\u062c\u0645\u0639\u0647",
"\u0634\u0646\u0628\u0647"
],
"SHORTMONTH": [
"\u062c\u0646\u0648\u0631\u064a",
"\u0641\u0628\u0631\u0648\u0631\u064a",
"\u0645\u0627\u0631\u0686",
"\u0627\u067e\u0631\u06cc\u0644",
"\u0645\u06cc",
"\u062c\u0648\u0646",
"\u062c\u0648\u0644\u0627\u06cc",
"\u0627\u06ab\u0633\u062a",
"\u0633\u067e\u062a\u0645\u0628\u0631",
"\u0627\u06a9\u062a\u0648\u0628\u0631",
"\u0646\u0648\u0645\u0628\u0631",
"\u062f\u0633\u0645\u0628\u0631"
],
"fullDate": "EEEE \u062f y \u062f MMMM d",
"longDate": "\u062f y \u062f MMMM d",
"medium": "d MMM y H:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "H:mm:ss",
"short": "y/M/d H:mm",
"shortDate": "y/M/d",
"shortTime": "H:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "Af.",
"DECIMAL_SEP": "\u066b",
"GROUP_SEP": "\u066c",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "ps",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/ModelTool-0.8.5.tar.gz/ModelTool-0.8.5/modeltool/command.py | import modeltool
from modeltool.lambda_creator import LambdaCreator
from modeltool.lambda_deployer import LambdaDeployer
import click
import boto3
import logging
import sys
import os
import json
default_stage = 'dev'
fresh_notes = '''A skeleton of the new lambda, {}, has been created.
In {}/{}/config you will find a config.ini file that you should
fill in with parameters for your own account.
Develop the lambda function as needed then you can deploy it with:
modeltool deploy. The lambda has been started in main.py.
'''
@click.group()
@click.version_option(version='0.8.5')
def cli():
pass
@cli.command()
@click.option('-d', '--directory', help='target directory for new Lambda, defaults to current directory')
@click.option('-n', '--name', help='name of the new lambda skeleton', required=True)
@click.option('-s', '--service', help='create a flask like micro-service', is_flag=True)
@click.option('-p', '--profile', help='AWS CLI profile to use in the deployment, more details at http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html')
@click.option('-r', '--region', help='target region, defaults to your credentials default region')
def new(directory, name, service, profile, region):
command_line = {}
command_line['name'] = name
if service:
command_line['template_directory'] = '{}/template/service'.format(modeltool.__path__[0])
else:
command_line['template_directory'] = '{}/template/simple'.format(modeltool.__path__[0])
if directory:
command_line['directory'] = directory
else:
command_line['directory'] = '.'
if profile:
command_line['profile'] = profile
else:
command_line['profile'] = None
if region:
command_line['region'] = region
else:
command_line['region'] = None
command_line['service'] = service
if start_new_lambda(command_line):
sys.exit(0)
else:
sys.exit(1)
@cli.command()
@click.option('-d', '--directory', help='scratch directory for deploy, defaults to /tmp')
@click.option('-s', '--stage', help='environment/stage used to name and deploy the Lambda function, defaults to dev')
@click.option('-p', '--profile', help='AWS CLI profile to use in the deployment, more details at http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html')
@click.option('-r', '--region', help='target region, defaults to your credentials default region')
def deploy(directory, stage, profile, region):
command_line = {}
if directory:
command_line['work_directory'] = directory
else:
command_line['work_directory'] = '/tmp'
if stage:
command_line['stage'] = stage
else:
command_line['stage'] = default_stage
if profile:
command_line['profile'] = profile
else:
command_line['profile'] = None
if region:
command_line['region'] = region
else:
command_line['region'] = None
command_line['template_directory'] = '{}/template'.format(modeltool.__path__[0])
logging.info('command_line: {}'.format(json.dumps(command_line, indent=2)))
if deploy_lambda(command_line):
sys.exit(0)
else:
sys.exit(1)
@cli.command()
@click.option('-s', '--stage', help='environment/stage of interest', required=True)
def print_env(stage):
config_file = f'config/{stage}/function.properties'
if os.path.isfile(config_file):
with open(config_file, 'r') as f:
tmp = f.readline()
while tmp:
food = tmp.strip()
print(f'export {food}')
tmp = f.readline()
def start_new_lambda(command_line):
try:
tool = LambdaCreator(command_line)
except Exception:
sys.exit(1)
if tool.create_lambda():
logging.info('create_new_lambda() went well')
print('\n\n\n\n')
print('********************************************************************************')
print(fresh_notes.format(
command_line['name'],
command_line['directory'],
command_line['name'])
)
else:
logging.error('create_new_lambda() did not go well')
sys.exit(1)
def deploy_lambda(command_line):
try:
tool = LambdaDeployer(command_line)
except Exception:
sys.exit(1)
if tool.deploy_lambda():
logging.info('deploy_lambda() went well')
return True
else:
logging.error('deploy_lambda() did not go well')
sys.exit(1)
def find_myself():
s = boto3.session.Session()
return s.region_name | PypiClean |
/NeuroRuler-1.7.tar.gz/NeuroRuler-1.7/README.md | # NeuroRuler



> A program that calculates head circumference from MRI data (`.nii`, `.nii.gz`, `.nrrd`).
<p align="center">
<img src="https://i.imgur.com/nqwqHq8.gif" alt="GUI demo"/>
</p>
## Cite this tool
If you want 😉 format is bibtex.
```bibtex
@misc{neuroruler,
title={NeuroRuler},
author={Wei, Jesse and Lester, Madison and He, Peifeng and Schneider, Eric and Styner, Martin},
howpublished={\url{https://github.com/COMP523TeamD/HeadCircumferenceTool}},
year={2023}
}
```
## Install
Your Python version needs to be 3.8+. Check with `python --version`. Clone this repo, and install the Python dependencies.
```sh
pip install -r requirements.txt
pip install -i https://test.pypi.org/simple/ NeuroRuler
```
If `pip` doesn't work, try `pip3` or `python3 -m pip`.
If contributing to this repo, please also run `pre-commit install` to run pre-commit actions (i.e., autoformat) on your code before commits.
## Start GUI
Run these commands in a Python terminal:
```py
from GUI import gui
gui()
```
Note: If you make changes to the repo, then use the [`gui.py`](https://github.com/COMP523TeamD/HeadCircumferenceTool/blob/main/gui.py) script to run the GUI. Changes you make will not be reflected in the package from pip until uploaded to PyPi.
## Configure settings
Edit [`config.json`](config.json).
You can also supply CLI arguments, which override settings in `config.json`.
```text
usage: gui.py [-h] [-d] [-e] [-t THEME] [-c COLOR]
options:
-h, --help show this help message and exit
-d, --debug print debug info
-e, --export-index exported file names use the index displayed in the GUI instead of the original file name
-t THEME, --theme THEME
configure theme, options are dark, dark-green, dark-hct, dark-purple, light, light-green, light-hct, or
light-purple
-c COLOR, --color COLOR
contour color as name (e.g. red) or hex color code rrggbb
```
## Run tests
`pytest`
## Documentation
[https://headcircumferencetool.readthedocs.io](https://headcircumferencetool.readthedocs.io)
See [`.readthedocs.yaml`](.readthedocs.yaml) and [`docs/`](docs/).
## Pre-commit actions
Run `pre-commit install` to enable pre-commit actions.
Before each commit, the actions in [`.pre-commit-config.yaml`](.pre-commit-config.yaml) will be run. Specifically, code will be reformatted with `black`. Note that some file names are excluded, so don't name any source code files those names.
| PypiClean |
/AyiinXd-0.0.8-cp311-cp311-macosx_10_9_universal2.whl/fipper/methods/messages/__init__.py |
from .copy_media_group import CopyMediaGroup
from .copy_message import CopyMessage
from .delete_messages import DeleteMessages
from .download_media import DownloadMedia
from .edit_inline_caption import EditInlineCaption
from .edit_inline_media import EditInlineMedia
from .edit_inline_reply_markup import EditInlineReplyMarkup
from .edit_inline_text import EditInlineText
from .edit_message_caption import EditMessageCaption
from .edit_message_media import EditMessageMedia
from .edit_message_reply_markup import EditMessageReplyMarkup
from .edit_message_text import EditMessageText
from .forward_messages import ForwardMessages
from .get_chat_history import GetChatHistory
from .get_chat_history_count import GetChatHistoryCount
from .get_custom_emoji_stickers import GetCustomEmojiStickers
from .get_discussion_message import GetDiscussionMessage
from .get_discussion_replies import GetDiscussionReplies
from .get_discussion_replies_count import GetDiscussionRepliesCount
from .get_media_group import GetMediaGroup
from .get_messages import GetMessages
from .read_chat_history import ReadChatHistory
from .retract_vote import RetractVote
from .search_global import SearchGlobal
from .search_global_count import SearchGlobalCount
from .search_messages import SearchMessages
from .search_messages_count import SearchMessagesCount
from .send_animation import SendAnimation
from .send_audio import SendAudio
from .send_cached_media import SendCachedMedia
from .send_chat_action import SendChatAction
from .send_contact import SendContact
from .send_dice import SendDice
from .send_document import SendDocument
from .send_location import SendLocation
from .send_media_group import SendMediaGroup
from .send_message import SendMessage
from .send_photo import SendPhoto
from .send_poll import SendPoll
from .send_reaction import SendReaction
from .send_sticker import SendSticker
from .send_venue import SendVenue
from .send_video import SendVideo
from .send_video_note import SendVideoNote
from .send_voice import SendVoice
from .stop_poll import StopPoll
from .stream_media import StreamMedia
from .vote_poll import VotePoll
from .wait_for_callback_query import WaitForCallbackQuery
from .wait_for_message import WaitForMessage
class Messages(
DeleteMessages,
EditMessageCaption,
EditMessageReplyMarkup,
EditMessageMedia,
EditMessageText,
ForwardMessages,
GetMediaGroup,
GetMessages,
SendAudio,
SendChatAction,
SendContact,
SendDocument,
SendAnimation,
SendLocation,
SendMediaGroup,
SendMessage,
SendPhoto,
SendSticker,
SendVenue,
SendVideo,
SendVideoNote,
SendVoice,
SendPoll,
VotePoll,
StopPoll,
RetractVote,
DownloadMedia,
GetChatHistory,
SendCachedMedia,
GetChatHistoryCount,
ReadChatHistory,
EditInlineText,
EditInlineCaption,
EditInlineMedia,
EditInlineReplyMarkup,
SendDice,
SearchMessages,
SearchGlobal,
CopyMessage,
CopyMediaGroup,
SearchMessagesCount,
SearchGlobalCount,
GetDiscussionMessage,
SendReaction,
GetDiscussionReplies,
GetDiscussionRepliesCount,
StreamMedia,
GetCustomEmojiStickers,
WaitForCallbackQuery,
WaitForMessage,
):
pass | PypiClean |
/dirtrav-1.0.0.tar.gz/dirtrav-1.0.0/docs/deploying/apache-httpd.rst | Apache httpd
============
`Apache httpd`_ is a fast, production level HTTP server. When serving
your application with one of the WSGI servers listed in :doc:`index`, it
is often good or necessary to put a dedicated HTTP server in front of
it. This "reverse proxy" can handle incoming requests, TLS, and other
security and performance concerns better than the WSGI server.
httpd can be installed using your system package manager, or a pre-built
executable for Windows. Installing and running httpd itself is outside
the scope of this doc. This page outlines the basics of configuring
httpd to proxy your application. Be sure to read its documentation to
understand what features are available.
.. _Apache httpd: https://httpd.apache.org/
Domain Name
-----------
Acquiring and configuring a domain name is outside the scope of this
doc. In general, you will buy a domain name from a registrar, pay for
server space with a hosting provider, and then point your registrar
at the hosting provider's name servers.
To simulate this, you can also edit your ``hosts`` file, located at
``/etc/hosts`` on Linux. Add a line that associates a name with the
local IP.
Modern Linux systems may be configured to treat any domain name that
ends with ``.localhost`` like this without adding it to the ``hosts``
file.
.. code-block:: python
:caption: ``/etc/hosts``
127.0.0.1 hello.localhost
Configuration
-------------
The httpd configuration is located at ``/etc/httpd/conf/httpd.conf`` on
Linux. It may be different depending on your operating system. Check the
docs and look for ``httpd.conf``.
Remove or comment out any existing ``DocumentRoot`` directive. Add the
config lines below. We'll assume the WSGI server is listening locally at
``http://127.0.0.1:8000``.
.. code-block:: apache
:caption: ``/etc/httpd/conf/httpd.conf``
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
ProxyPass / http://127.0.0.1:8000/
RequestHeader set X-Forwarded-Proto http
RequestHeader set X-Forwarded-Prefix /
The ``LoadModule`` lines might already exist. If so, make sure they are
uncommented instead of adding them manually.
Then :doc:`proxy_fix` so that your application uses the ``X-Forwarded``
headers. ``X-Forwarded-For`` and ``X-Forwarded-Host`` are automatically
set by ``ProxyPass``.
| PypiClean |
/MezzanineFor1.7-3.1.10.tar.gz/MezzanineFor1.7-3.1.10/mezzanine/utils/device.py | from __future__ import unicode_literals
def device_from_request(request):
"""
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
"""
from mezzanine.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES["mezzanine-device"]:
return device
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META["HTTP_USER_AGENT"].lower()
except KeyError:
pass
else:
try:
user_agent = user_agent.decode("utf-8")
except (AttributeError, UnicodeDecodeError):
pass
for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device
return ""
def templates_for_device(request, templates):
"""
Given a template name (or list of them), returns the template names
as a list, with each name prefixed with the device directory
inserted before it's associate default in the list.
"""
from mezzanine.conf import settings
if not isinstance(templates, (list, tuple)):
templates = [templates]
device = device_from_request(request)
device_templates = []
for template in templates:
if device:
device_templates.append("%s/%s" % (device, template))
if settings.DEVICE_DEFAULT and settings.DEVICE_DEFAULT != device:
default = "%s/%s" % (settings.DEVICE_DEFAULT, template)
device_templates.append(default)
device_templates.append(template)
return device_templates | PypiClean |
/HfCh5Levi-1.0.4.tar.gz/HfCh5Levi-1.0.4/HfCh5Levi.py | import os;
os.getcwd()
os.chdir('/Users/AnQiuPing/Documents/Python/HfCh5Levi')
julieList = []
jamesList = []
sarahList = []
mikeyList = []
'''four new lists for storing the ordered and uniformed lists from original lists'''
sanitizedJames = []
sanitizedJulie = []
sanitizedMikey = []
sanitizedSarah = []
'''new lists for removing the duplicates and displaying the top 3 plays' time'''
uniqueJames = []
uniqueJulie = []
uniqueMikey = []
uniqueSarah = []
def sanitize(time_string):
if '-' in time_string:
splitter = '-'
elif ':' in time_string:
splitter = ':'
else:
return(time_string)
(mins, secs) = time_string.split(splitter)
return (mins + '.' + secs)
def getFiles(file_name):
try:
with open(file_name) as fileSubject:
data = fileSubject.readline()
return data.strip().split(',')
except IOError as err:
print("File IO Error:", + str(err))
def getData():
try:
jamesList = getFiles('james.txt')
julieList = getFiles('julie.txt')
mikeyList = getFiles('mikey.txt')
sarahList = getFiles('sarah.txt')
'''
with open('james.txt') as james:
data = james.readline()
jamesList = data.strip().split(',')
with open('julie.txt') as julie:
data = julie.readline()
julieList = data.strip().split(',')
with open('mikey.txt') as mikey:
data = mikey.readline()
mikeyList = data.strip().split(',')
with open('sarah.txt') as sarah:
data = sarah.readline()
sarahList = data.strip().split(',')
'''
'''for item in jamesList:
sanitizedJames.append(sanitize(item))'''
'''the below code is a new way for list comprehension'''
sanitizedJames = sorted([sanitize(each_item) for each_item in jamesList])
'''for item in julieList:
sanitizedJulie.append(sanitize(item))'''
sanitizedJulie = sorted([sanitize(each_item) for each_item in julieList])
'''for item in mikeyList:
sanitizedMikey.append(sanitize(item))'''
sanitizedMikey = sorted([sanitize(each_item) for each_item in mikeyList])
'''for item in sarahList:
sanitizedSarah.append(sanitize(item))'''
sanitizedSarah = sorted([sanitize(each_item) for each_item in sarahList])
print("now print the non-duplication result using set() rather than list()")
print(sorted(set(sanitize(t) for t in jamesList))[0:3])
print(sorted(set(sanitize(t) for t in julieList))[0:3])
print(sorted(set(sanitize(t) for t in mikeyList))[0:3])
print(sorted(set(sanitize(t) for t in sarahList))[0:3])
'''now refiene the code for removing the duplicates using set() rather than list()'''
for each_item in sanitizedJames:
if each_item not in uniqueJames:
uniqueJames.append(each_item)
for each_item in sanitizedJulie:
if each_item not in uniqueJulie:
uniqueJulie.append(each_item)
for each_item in sanitizedMikey:
if each_item not in uniqueMikey:
uniqueMikey.append(each_item)
for each_item in sanitizedSarah:
if each_item not in uniqueSarah:
uniqueSarah.append(each_item)
print("now print the top 3 fastest time from different players:")
print("James:", uniqueJames[0:3])
print("Julie:", uniqueJulie[0:3])
print("James:", uniqueMikey[0:3])
print("Julie:", uniqueSarah[0:3])
'''
print("now print the sorted athletes' list in asending order:")
print(sanitizedJames)
print(sanitizedJulie)
print(sanitizedMikey)
print(sanitizedSarah)
print("now print the sorted athletes' list in descending order:")
print(sorted(sanitizedJames, reverse = True))
print(sorted(sanitizedJulie, reverse = True))
print(sorted(sanitizedMikey, reverse = True))
print(sorted(sanitizedSarah, reverse = True))
'''
# print(jamesList)
# print(sorted(jamesList))
# print(julieList)
# print(sorted(julieList))
# print(mikeyList)
# print(sorted(mikeyList))
# print(sarahList)
# print(sorted(sarahList))
except IOError as err:
print('File Error:' + str(err))
getData() | PypiClean |
/Electrum-VTC-2.9.3.3.tar.gz/Electrum-VTC-2.9.3.3/gui/vtc/request_list.py |
from electrum_vtc.i18n import _
from electrum_vtc.util import block_explorer_URL, format_satoshis, format_time, age
from electrum_vtc.plugins import run_hook
from electrum_vtc.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from util import MyTreeWidget, pr_tooltips, pr_icons
class RequestList(MyTreeWidget):
filter_columns = [0, 1, 2, 3, 4] # Date, Account, Address, Description, Amount
def __init__(self, parent):
MyTreeWidget.__init__(self, parent, self.create_menu, [_('Date'), _('Address'), '', _('Description'), _('Amount'), _('Status')], 3)
self.currentItemChanged.connect(self.item_changed)
self.itemClicked.connect(self.item_changed)
self.setSortingEnabled(True)
self.setColumnWidth(0, 180)
self.hideColumn(1)
def item_changed(self, item):
if item is None:
return
if not self.isItemSelected(item):
return
addr = str(item.text(1))
req = self.wallet.receive_requests[addr]
expires = age(req['time'] + req['exp']) if req.get('exp') else _('Never')
amount = req['amount']
message = self.wallet.labels.get(addr, '')
self.parent.receive_address_e.setText(addr)
self.parent.receive_message_e.setText(message)
self.parent.receive_amount_e.setAmount(amount)
self.parent.expires_combo.hide()
self.parent.expires_label.show()
self.parent.expires_label.setText(expires)
self.parent.new_request_button.setEnabled(True)
def on_update(self):
self.wallet = self.parent.wallet
# hide receive tab if no receive requests available
b = len(self.wallet.receive_requests) > 0
self.setVisible(b)
self.parent.receive_requests_label.setVisible(b)
if not b:
self.parent.expires_label.hide()
self.parent.expires_combo.show()
# update the receive address if necessary
current_address = self.parent.receive_address_e.text()
domain = self.wallet.get_receiving_addresses()
addr = self.wallet.get_unused_address()
if not current_address in domain and addr:
self.parent.set_receive_address(addr)
self.parent.new_request_button.setEnabled(addr != current_address)
# clear the list and fill it again
self.clear()
for req in self.wallet.get_sorted_requests(self.config):
address = req['address']
if address not in domain:
continue
timestamp = req.get('time', 0)
amount = req.get('amount')
expiration = req.get('exp', None)
message = req.get('memo', '')
date = format_time(timestamp)
status = req.get('status')
signature = req.get('sig')
requestor = req.get('name', '')
amount_str = self.parent.format_amount(amount) if amount else ""
item = QTreeWidgetItem([date, address, '', message, amount_str, pr_tooltips.get(status,'')])
if signature is not None:
item.setIcon(2, QIcon(":icons/seal.png"))
item.setToolTip(2, 'signed by '+ requestor)
if status is not PR_UNKNOWN:
item.setIcon(6, QIcon(pr_icons.get(status)))
self.addTopLevelItem(item)
def create_menu(self, position):
item = self.itemAt(position)
if not item:
return
addr = str(item.text(1))
req = self.wallet.receive_requests[addr]
column = self.currentColumn()
column_title = self.headerItem().text(column)
column_data = item.text(column)
menu = QMenu(self)
menu.addAction(_("Copy %s")%column_title, lambda: self.parent.app.clipboard().setText(column_data))
menu.addAction(_("Copy URI"), lambda: self.parent.view_and_paste('URI', '', self.parent.get_request_URI(addr)))
menu.addAction(_("Save as BIP70 file"), lambda: self.parent.export_payment_request(addr))
menu.addAction(_("Delete"), lambda: self.parent.delete_payment_request(addr))
run_hook('receive_list_menu', menu, addr)
menu.exec_(self.viewport().mapToGlobal(position)) | PypiClean |
/KratosCoSimulationApplication-9.4-cp39-cp39-win_amd64.whl/KratosMultiphysics/CoSimulationApplication/solver_wrappers/external/flower_wrapper.py | import KratosMultiphysics as KM
# Importing the base class
from KratosMultiphysics.CoSimulationApplication.base_classes.co_simulation_solver_wrapper import CoSimulationSolverWrapper
# Other imports
from KratosMultiphysics.CoSimulationApplication.utilities import model_part_utilities
from KratosMultiphysics.CoSimulationApplication.utilities.data_communicator_utilities import GetRankZeroDataCommunicator
def Create(settings, model, solver_name):
return FLOWerWrapper(settings, model, solver_name)
class FLOWerWrapper(CoSimulationSolverWrapper):
"""This class serves as wrapper for the CFD solver FLOWer
"""
def __init__(self, settings, model, solver_name):
super().__init__(settings, model, solver_name)
settings_defaults = KM.Parameters("""{
"model_parts_read" : { },
"model_parts_send" : { },
"model_parts_recv" : { },
"export_data" : [ ],
"import_data" : [ ],
"write_received_meshes" : false
}""")
self.settings["solver_wrapper_settings"].ValidateAndAssignDefaults(settings_defaults)
model_part_utilities.CreateMainModelPartsFromCouplingDataSettings(self.settings["data"], self.model, self.name)
model_part_utilities.AllocateHistoricalVariablesFromCouplingDataSettings(self.settings["data"], self.model, self.name)
def Initialize(self):
for main_model_part_name, mdpa_file_name in self.settings["solver_wrapper_settings"]["model_parts_read"].items():
KM.ModelPartIO(mdpa_file_name.GetString()).ReadModelPart(self.model[main_model_part_name])
for model_part_name, comm_name in self.settings["solver_wrapper_settings"]["model_parts_send"].items():
interface_config = {
"comm_name" : comm_name.GetString(),
"model_part_name" : model_part_name
}
self.ExportCouplingInterface(interface_config)
for model_part_name, comm_name in self.settings["solver_wrapper_settings"]["model_parts_recv"].items():
interface_config = {
"comm_name" : comm_name.GetString(),
"model_part_name" : model_part_name
}
self.ImportCouplingInterface(interface_config)
if self.settings["solver_wrapper_settings"]["write_received_meshes"].GetBool():
KM.ModelPartIO(model_part_name, KM.IO.WRITE | KM.IO.MESH_ONLY | KM.IO.SKIP_TIMER).WriteModelPart(self.model[model_part_name])
super().Initialize()
def SolveSolutionStep(self):
for data_name in self.settings["solver_wrapper_settings"]["export_data"].GetStringArray():
data_config = {
"type" : "coupling_interface_data",
"interface_data" : self.GetInterfaceData(data_name)
}
self.ExportData(data_config)
super().SolveSolutionStep()
for data_name in self.settings["solver_wrapper_settings"]["import_data"].GetStringArray():
data_config = {
"type" : "coupling_interface_data",
"interface_data" : self.GetInterfaceData(data_name)
}
self.ImportData(data_config)
def AdvanceInTime(self, current_time):
return 0.0 # TODO find a better solution here... maybe get time from solver through IO
def _GetIOType(self):
return "empire_io" # FLOWer currently only supports the EmpireIO
def _GetDataCommunicator(self):
# this solver does not support MPI
# more specifically the EmpireIO does not support MPI
# since FLOWer only uses the EmpireIO this has to be hardcoded (for now)
return GetRankZeroDataCommunicator() | PypiClean |
/DjangoDjangoAppCenter-0.0.11-py3-none-any.whl/DjangoAppCenter/simpleui/static/admin/simpleui-x/elementui/umd/locale/cs-CZ.js | (function (global, factory) {
if (typeof define === "function" && define.amd) {
define('element/locale/cs-CZ', ['module', 'exports'], factory);
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.ELEMENT.lang = global.ELEMENT.lang || {};
global.ELEMENT.lang.csCZ = mod.exports;
}
})(this, function (module, exports) {
'use strict';
exports.__esModule = true;
exports.default = {
el: {
colorpicker: {
confirm: 'OK',
clear: 'Vymazat'
},
datepicker: {
now: 'Teď',
today: 'Dnes',
cancel: 'Zrušit',
clear: 'Vymazat',
confirm: 'OK',
selectDate: 'Vybrat datum',
selectTime: 'Vybrat čas',
startDate: 'Datum začátku',
startTime: 'Čas začátku',
endDate: 'Datum konce',
endTime: 'Čas konce',
prevYear: 'Předchozí rok',
nextYear: 'Příští rok',
prevMonth: 'Předchozí měsíc',
nextMonth: 'Příští měsíc',
day: 'Den',
week: 'Týden',
month: 'Měsíc',
year: 'Rok',
month1: 'Leden',
month2: 'Únor',
month3: 'Březen',
month4: 'Duben',
month5: 'Květen',
month6: 'Červen',
month7: 'Červenec',
month8: 'Srpen',
month9: 'Září',
month10: 'Říjen',
month11: 'Listopad',
month12: 'Prosinec',
weeks: {
sun: 'Ne',
mon: 'Po',
tue: 'Út',
wed: 'St',
thu: 'Čt',
fri: 'Pá',
sat: 'So'
},
months: {
jan: 'Led',
feb: 'Úno',
mar: 'Bře',
apr: 'Dub',
may: 'Kvě',
jun: 'Čer',
jul: 'Čvc',
aug: 'Srp',
sep: 'Zář',
oct: 'Říj',
nov: 'Lis',
dec: 'Pro'
}
},
select: {
loading: 'Načítání',
noMatch: 'Žádná shoda',
noData: 'Žádná data',
placeholder: 'Vybrat'
},
cascader: {
noMatch: 'Žádná shoda',
loading: 'Načítání',
placeholder: 'Vybrat',
noData: 'Žádná data'
},
pagination: {
goto: 'Jít na',
pagesize: 'na stranu',
total: 'Celkem {total}',
pageClassifier: ''
},
messagebox: {
title: 'Zpráva',
confirm: 'OK',
cancel: 'Zrušit',
error: 'Neplatný vstup'
},
upload: {
deleteTip: 'Stisknout pro smazání',
delete: 'Vymazat',
preview: 'Náhled',
continue: 'Pokračovat'
},
table: {
emptyText: 'Žádná data',
confirmFilter: 'Potvrdit',
resetFilter: 'Resetovat',
clearFilter: 'Vše',
sumText: 'Celkem'
},
tree: {
emptyText: 'Žádná data'
},
transfer: {
noMatch: 'Žádná shoda',
noData: 'Žádná data',
titles: ['Seznam 1', 'Seznam 2'],
filterPlaceholder: 'Klíčové slovo',
noCheckedFormat: '{total} položek',
hasCheckedFormat: '{checked}/{total} vybráno'
},
image: {
error: 'FAILED' // to be translated
},
pageHeader: {
title: 'Back' // to be translated
}
}
};
module.exports = exports['default'];
}); | PypiClean |
/Mezzanine-6.0.0.tar.gz/Mezzanine-6.0.0/docs/deployment.rst | ==========
Deployment
==========
Deployment of a Mezzanine site to production is mostly identical to
deploying a regular Django site. For serving static content, Mezzanine
makes full use of Django's ``staticfiles`` app. For more information,
see the Django docs for
`deployment <https://docs.djangoproject.com/en/dev/howto/deployment/>`_ and
`staticfiles <https://docs.djangoproject.com/en/dev/howto/static-files/>`_.
Mezzanine's only customization to the deployment process is adding built-in
support for a ``local_settings.py`` file. This file is not kept under version
control and you can use it to include production-only configuration.
.. versionchanged:: 5.0
Previously Mezzanine used to ship a fabfile for automatic deployments. It
has been removed in favor of regular Django deployment methods.
| PypiClean |
/Avalara.SDK-2.4.29.tar.gz/Avalara.SDK-2.4.29/Avalara/SDK/exceptions.py | class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
key_type=None):
""" Raises an exception for TypeErrors
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list): a list of keys an indices to get to the
current_item
None if unset
valid_classes (tuple): the primitive classes that current item
should be an instance of
None if unset
key_type (bool): False if our value is a value in a dict
True if it is a key in a dict
False if our item is an item in a list
None if unset
"""
self.path_to_item = path_to_item
self.valid_classes = valid_classes
self.key_type = key_type
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiTypeError, self).__init__(full_msg)
class ApiValueError(OpenApiException, ValueError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list) the path to the exception in the
received_data dict. None if unset
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiValueError, self).__init__(full_msg)
class ApiAttributeError(OpenApiException, AttributeError):
def __init__(self, msg, path_to_item=None):
"""
Raised when an attribute reference or assignment fails.
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiAttributeError, self).__init__(full_msg)
class ApiKeyError(OpenApiException, KeyError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiKeyError, self).__init__(full_msg)
class ApiException(OpenApiException):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
class NotFoundException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(NotFoundException, self).__init__(status, reason, http_resp)
class UnauthorizedException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(UnauthorizedException, self).__init__(status, reason, http_resp)
class ForbiddenException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ForbiddenException, self).__init__(status, reason, http_resp)
class ServiceException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ServiceException, self).__init__(status, reason, http_resp)
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
for pth in path_to_item:
if isinstance(pth, int):
result += "[{0}]".format(pth)
else:
result += "['{0}']".format(pth)
return result | PypiClean |
/Biomatters_Azimuth-0.2.6-py3-none-any.whl/azimuth/models/baselines.py | import numpy as np
import sklearn
from sklearn.svm import LinearSVC
from sklearn.linear_model import LogisticRegression
import sklearn.linear_model
import pandas
def mean_on_fold(feature_sets, train, test, y, y_all, inputs, dim, dimsum, learn_options):
return np.ones((test.sum(), 1))*y[train].mean(), None
def random_on_fold(feature_sets, train, test, y, y_all, inputs, dim, dimsum, learn_options):
return np.random.randn(test.sum(), 1), None
def xu_et_al_on_fold(feature_sets, train, test, y, y_all, X, dim, dimsum, learn_options):
coef = pandas.read_csv(learn_options['xu_matrix_file'], skiprows=1, delimiter='\t')
coef = coef[['A', 'T', 'C', 'G']] # swap columns so that they are in correct order
coef = coef.values.flatten()[:, None]
X = X.copy()
X = np.append(X, np.zeros((X.shape[0], 3*4)), axis=1)
X = X[:, 3*4:]
y_pred = 1./(1+np.exp(-np.dot(X[test], coef)))
return y_pred, coef
def doench_on_fold(feature_sets, train, test, y, y_all, X, dim, dimsum, learn_options):
auto_class_weight = None # 'auto'/None
verbose = False
penalty = [0.005*pow(1.15, x) for x in range(0, 45)] # ian's code: tvals = [0.005*pow(1.15,x) for x in range(0,45)]
y_bin = y_all[learn_options['binary target name']].values[:, None]
label_encoder = sklearn.preprocessing.LabelEncoder()
label_encoder.fit(y_all['Target gene'].values[train])
gene_classes = label_encoder.transform(y_all['Target gene'].values[train])
cv = sklearn.cross_validation.StratifiedKFold(gene_classes, n_folds=10, shuffle=True)
best_penalty = None
cv_results = np.zeros((10, len(penalty)))
for j, split in enumerate(cv):
train_inner, test_inner = split
for i, c in enumerate(penalty):
# fit an L1-penalized SVM classifier
clf = LinearSVC(penalty='l1', C=c, dual=False, class_weight=auto_class_weight)
clf.fit(X[train][train_inner], y_bin[train][train_inner].flatten())
# pass features with non-zero coeff to Logistic with l2 penalty (original code?)
non_zero_coeff = (clf.coef_ != 0.0)
if np.all(non_zero_coeff is False):
# if all are zero, turn one on so as to be able to run the code.
non_zero_coeff[0] = True
clf = LogisticRegression(penalty='l2', class_weight=auto_class_weight)
clf.fit(X[train][train_inner][:, non_zero_coeff.flatten()], y[train][train_inner].flatten())
y_test = clf.predict_proba(X[train][test_inner][:, non_zero_coeff.flatten()])[:, 1]
fpr, tpr, _ = sklearn.metrics.roc_curve(y_bin[train][test_inner], y_test)
assert np.nan not in fpr, "found nan fpr"
assert np.nan not in tpr, "found nan tpr"
roc_auc = sklearn.metrics.auc(fpr, tpr)
if verbose:
print(j, i, roc_auc)
cv_results[j][i] = roc_auc
best_penalty = penalty[np.argmax(np.mean(cv_results, axis=0))]
print("best AUC for penalty: ", np.median(cv_results, axis=0))
clf = LinearSVC(penalty='l1', C=best_penalty, dual=False, class_weight=auto_class_weight)
clf.fit(X[train], y_bin[train].flatten())
non_zero_coeff = (clf.coef_ != 0.0)
clf = LogisticRegression(penalty='l2', class_weight=auto_class_weight)
clf.fit(X[train][:, non_zero_coeff.flatten()], y[train].flatten())
y_pred = clf.predict_proba(X[test][:, non_zero_coeff.flatten()])[:, 1:2]
return y_pred, clf
def sgrna_from_doench_on_fold(feature_sets, train, test, y, y_all, X, dim, dimsum, learn_options):
assert len(list(feature_sets.keys())) == 1, "should only use sgRNA Score here"
assert list(feature_sets.keys())[0] == "sgRNA Score"
y_pred = X[test][:, 0]
return y_pred, None
def SVC_on_fold(feature_sets, train, test, y, y_all, X, dim, dimsum, learn_options):
y_bin = y_all[learn_options['binary target name']].values[:, None]
clf = LinearSVC(penalty='l2', dual=False)
clf.fit(X[train], y_bin[train].flatten())
#y_pred = clf.predict(X[test])[:, None] # this returns 0/1
y_pred = clf.decision_function(X[test])[:, None]
return y_pred, clf | PypiClean |
/Flask_JSONRPC-2.2.2-py3-none-any.whl/flask_jsonrpc/contrib/browse/static/js/libs/angular/angular-sanitize.min.js | (function(m,g,n){'use strict';function h(a){var d={};a=a.split(",");var c;for(c=0;c<a.length;c++)d[a[c]]=!0;return d}function D(a,d){function c(a,b,c,f){b=g.lowercase(b);if(r[b])for(;e.last()&&s[e.last()];)k("",e.last());t[b]&&e.last()==b&&k("",b);(f=u[b]||!!f)||e.push(b);var l={};c.replace(E,function(a,b,d,c,e){l[b]=p(d||c||e||"")});d.start&&d.start(b,l,f)}function k(a,b){var c=0,k;if(b=g.lowercase(b))for(c=e.length-1;0<=c&&e[c]!=b;c--);if(0<=c){for(k=e.length-1;k>=c;k--)d.end&&d.end(e[k]);e.length=
c}}var b,f,e=[],l=a;for(e.last=function(){return e[e.length-1]};a;){f=!0;if(e.last()&&v[e.last()])a=a.replace(RegExp("(.*)<\\s*\\/\\s*"+e.last()+"[^>]*>","i"),function(a,b){b=b.replace(F,"$1").replace(G,"$1");d.chars&&d.chars(p(b));return""}),k("",e.last());else{if(0===a.indexOf("\x3c!--"))b=a.indexOf("--",4),0<=b&&a.lastIndexOf("--\x3e",b)===b&&(d.comment&&d.comment(a.substring(4,b)),a=a.substring(b+3),f=!1);else if(w.test(a)){if(b=a.match(w))a=a.replace(b[0],""),f=!1}else if(H.test(a)){if(b=a.match(x))a=
a.substring(b[0].length),b[0].replace(x,k),f=!1}else I.test(a)&&(b=a.match(y))&&(a=a.substring(b[0].length),b[0].replace(y,c),f=!1);f&&(b=a.indexOf("<"),f=0>b?a:a.substring(0,b),a=0>b?"":a.substring(b),d.chars&&d.chars(p(f)))}if(a==l)throw J("badparse",a);l=a}k()}function p(a){q.innerHTML=a.replace(/</g,"<");return q.innerText||q.textContent||""}function z(a){return a.replace(/&/g,"&").replace(K,function(a){return"&#"+a.charCodeAt(0)+";"}).replace(/</g,"<").replace(/>/g,">")}function A(a){var d=
!1,c=g.bind(a,a.push);return{start:function(a,b,f){a=g.lowercase(a);!d&&v[a]&&(d=a);d||!0!==B[a]||(c("<"),c(a),g.forEach(b,function(a,b){var d=g.lowercase(b);!0!==L[d]||!0===C[d]&&!a.match(M)||(c(" "),c(b),c('="'),c(z(a)),c('"'))}),c(f?"/>":">"))},end:function(a){a=g.lowercase(a);d||!0!==B[a]||(c("</"),c(a),c(">"));a==d&&(d=!1)},chars:function(a){d||c(z(a))}}}var J=g.$$minErr("$sanitize"),y=/^<\s*([\w:-]+)((?:\s+[\w:-]+(?:\s*=\s*(?:(?:"[^"]*")|(?:'[^']*')|[^>\s]+))?)*)\s*(\/?)\s*>/,x=/^<\s*\/\s*([\w:-]+)[^>]*>/,
E=/([\w:-]+)(?:\s*=\s*(?:(?:"((?:[^"])*)")|(?:'((?:[^'])*)')|([^>\s]+)))?/g,I=/^</,H=/^<\s*\//,F=/\x3c!--(.*?)--\x3e/g,w=/<!DOCTYPE([^>]*?)>/i,G=/<!\[CDATA\[(.*?)]]\x3e/g,M=/^((ftp|https?):\/\/|mailto:|tel:|#)/i,K=/([^\#-~| |!])/g,u=h("area,br,col,hr,img,wbr");m=h("colgroup,dd,dt,li,p,tbody,td,tfoot,th,thead,tr");n=h("rp,rt");var t=g.extend({},n,m),r=g.extend({},m,h("address,article,aside,blockquote,caption,center,del,dir,div,dl,figure,figcaption,footer,h1,h2,h3,h4,h5,h6,header,hgroup,hr,ins,map,menu,nav,ol,pre,script,section,table,ul")),
s=g.extend({},n,h("a,abbr,acronym,b,bdi,bdo,big,br,cite,code,del,dfn,em,font,i,img,ins,kbd,label,map,mark,q,ruby,rp,rt,s,samp,small,span,strike,strong,sub,sup,time,tt,u,var")),v=h("script,style"),B=g.extend({},u,r,s,t),C=h("background,cite,href,longdesc,src,usemap"),L=g.extend({},C,h("abbr,align,alt,axis,bgcolor,border,cellpadding,cellspacing,class,clear,color,cols,colspan,compact,coords,dir,face,headers,height,hreflang,hspace,ismap,lang,language,nohref,nowrap,rel,rev,rows,rowspan,rules,scope,scrolling,shape,span,start,summary,target,title,type,valign,value,vspace,width")),
q=document.createElement("pre");g.module("ngSanitize",[]).value("$sanitize",function(a){var d=[];D(a,A(d));return d.join("")});g.module("ngSanitize").filter("linky",function(){var a=/((ftp|https?):\/\/|(mailto:)?[A-Za-z0-9._%+-]+@)\S*[^\s.;,(){}<>]/,d=/^mailto:/;return function(c,k){if(!c)return c;var b,f=c,e=[],l=A(e),h,m,n={};g.isDefined(k)&&(n.target=k);for(;b=f.match(a);)h=b[0],b[2]==b[3]&&(h="mailto:"+h),m=b.index,l.chars(f.substr(0,m)),n.href=h,l.start("a",n),l.chars(b[0].replace(d,"")),l.end("a"),
f=f.substring(m+b[0].length);l.chars(f);return e.join("")}})})(window,window.angular);
//# sourceMappingURL=angular-sanitize.min.js.map | PypiClean |
/GeoNode-3.2.0-py3-none-any.whl/geonode/security/oauth2_validators.py | from oauth2_provider.settings import oauth2_settings
from oauth2_provider.oauth2_validators import OAuth2Validator
import json
import base64
import hashlib
import logging
from datetime import datetime, timedelta
from django.utils import dateformat, timezone
from jwcrypto import jwk, jwt
log = logging.getLogger(__name__)
class OIDCValidator(OAuth2Validator):
""" e.g.
Check username and password correspond to a valid and active User, if fails
try Facebook token authentication
def validate_user(self, username, password, client, request, *args, **kwargs):
u = authenticate(username=username, password=password)
if u is None or not u.is_active:
u = authenticate_with_facebook()
if u is not none and u.is_active:
request.user = u
return True
return False
"""
def get_authorization_code_nonce(self, client_id, code, redirect_uri, request):
return None
def get_id_token(self, token, token_handler, request):
key = jwk.JWK.from_pem(oauth2_settings.OIDC_RSA_PRIVATE_KEY.encode("utf8"))
# TODO: http://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken2
# Save the id_token on database bound to code when the request come to
# Authorization Endpoint and return the same one when request come to
# Token Endpoint
# TODO: Check if at this point this request parameters are alredy validated
expiration_time = timezone.now() + timedelta(seconds=oauth2_settings.ID_TOKEN_EXPIRE_SECONDS)
# Required ID Token claims
claims = {
"iss": oauth2_settings.OIDC_ISS_ENDPOINT,
"sub": str(request.user.id),
"aud": request.client_id,
"exp": int(dateformat.format(expiration_time, "U")),
"iat": int(dateformat.format(datetime.utcnow(), "U")),
"auth_time": int(dateformat.format(request.user.last_login, "U"))
}
nonce = getattr(request, "nonce", None)
if nonce:
claims["nonce"] = nonce
# TODO: create a function to check if we should add at_hash
# http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken
# http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDToken
# if request.grant_type in 'authorization_code' and 'access_token' in token:
if (request.grant_type == "authorization_code" and "access_token" in token) or \
request.response_type == "code id_token token" or \
(request.response_type == "id_token token" and "access_token" in token):
acess_token = token["access_token"]
sha256 = hashlib.sha256(acess_token.encode("ascii"))
bits128 = sha256.hexdigest()[:16]
at_hash = base64.urlsafe_b64encode(bits128.encode("ascii"))
claims['at_hash'] = at_hash.decode("utf8")
# TODO: create a function to check if we should include c_hash
# http://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken
if request.response_type in ("code id_token", "code id_token token"):
code = token["code"]
sha256 = hashlib.sha256(code.encode("ascii"))
bits256 = sha256.hexdigest()[:32]
c_hash = base64.urlsafe_b64encode(bits256.encode("ascii"))
claims["c_hash"] = c_hash.decode("utf8")
jwt_token = jwt.JWT(header=json.dumps({"alg": "RS256"}, default=str), claims=json.dumps(claims, default=str))
jwt_token.make_signed_token(key)
id_token = self._save_id_token(jwt_token, request, expiration_time)
# this is needed by django rest framework
request.access_token = id_token
request.id_token = id_token
return jwt_token.serialize() | PypiClean |
/LEPL-5.1.3.zip/LEPL-5.1.3/src/lepl/support/_test/node.py | #from logging import basicConfig, DEBUG, INFO
from unittest import TestCase
from lepl import Delayed, Digit, Any, Node, make_error, node_throw, Or, Space, \
AnyBut, Eos
from lepl.support.graph import order, PREORDER, POSTORDER, LEAF
from lepl._test.base import assert_str
# pylint: disable-msg=C0103, C0111, C0301, W0702, C0324, C0102, C0321, R0201, R0903
# (dude this is just a test)
class NodeTest(TestCase):
def test_node(self):
#basicConfig(level=DEBUG)
class Term(Node): pass
class Factor(Node): pass
class Expression(Node): pass
expression = Delayed()
number = Digit()[1:,...] > 'number'
term = (number | '(' / expression / ')') > Term
muldiv = Any('*/') > 'operator'
factor = (term / (muldiv / term)[0::]) > Factor
addsub = Any('+-') > 'operator'
expression += (factor / (addsub / factor)[0::]) > Expression
p = expression.get_parse_string()
ast = p('1 + 2 * (3 + 4 - 5)')
assert_str(ast[0], """Expression
+- Factor
| +- Term
| | `- number '1'
| `- ' '
+- operator '+'
+- ' '
`- Factor
+- Term
| `- number '2'
+- ' '
+- operator '*'
+- ' '
`- Term
+- '('
+- Expression
| +- Factor
| | +- Term
| | | `- number '3'
| | `- ' '
| +- operator '+'
| +- ' '
| +- Factor
| | +- Term
| | | `- number '4'
| | `- ' '
| +- operator '-'
| +- ' '
| `- Factor
| `- Term
| `- number '5'
`- ')'""")
class ListTest(TestCase):
def test_list(self):
#basicConfig(level=DEBUG)
expression = Delayed()
number = Digit()[1:,...] > 'number'
term = (number | '(' / expression / ')') > list
muldiv = Any('*/') > 'operator'
factor = (term / (muldiv / term)[0:]) > list
addsub = Any('+-') > 'operator'
expression += (factor / (addsub / factor)[0:]) > list
ast = expression.parse_string('1 + 2 * (3 + 4 - 5)')
assert ast == [[[[('number', '1')], ' '], ('operator', '+'), ' ', [[('number', '2')], ' ', ('operator', '*'), ' ', ['(', [[[('number', '3')], ' '], ('operator', '+'), ' ', [[('number', '4')], ' '], ('operator', '-'), ' ', [[('number', '5')]]], ')']]]], ast
class ErrorTest(TestCase):
def test_error(self):
#basicConfig(level=INFO)
class Term(Node): pass
class Factor(Node): pass
class Expression(Node): pass
expression = Delayed()
number = Digit()[1:,...] > 'number'
term = Or(
AnyBut(Space() | Digit() | '(')[1:,...] ^ 'unexpected text: {results[0]}',
number > Term,
number ** make_error("no ( before {out_rest}") / ')' >> node_throw,
'(' / expression / ')' > Term,
('(' / expression / Eos()) ** make_error("no ) for {in_rest}") >> node_throw)
muldiv = Any('*/') > 'operator'
factor = (term / (muldiv / term)[0:,r'\s*']) > Factor
addsub = Any('+-') > 'operator'
expression += (factor / (addsub / factor)[0:,r'\s*']) > Expression
line = expression / Eos()
parser = line.get_parse_string()
try:
parser('1 + 2 * 3 + 4 - 5)')[0]
assert False, 'expected error'
except SyntaxError as e:
assert e.msg == "no ( before ')'", e.msg
try:
parser('1 + 2 * (3 + 4 - 5')
assert False, 'expected error'
except SyntaxError as e:
assert e.msg == "no ) for '(3 + 4 - 5'", e.msg
try:
parser('1 + 2 * foo')
assert False, 'expected error'
except SyntaxError as e:
assert e.msg == "unexpected text: foo", e.msg
class EqualityTest(TestCase):
def test_object_eq(self):
a = Node('a')
b = Node('a')
assert a != b
assert b != a
assert a is not b
assert b is not a
assert a == a
assert b == b
assert a is a
assert b is b
def test_recursive_eq(self):
a = Node('a', Node('b'))
b = Node('a', Node('b'))
c = Node('a', Node('c'))
assert a._recursively_eq(b)
assert not a._recursively_eq(c)
class ChildrenTest(TestCase):
def test_children(self):
a = Node('a')
for c in a:
assert c == 'a', c
class OrderTest(TestCase):
def tree(self):
return Node('a',
Node('b',
Node('c',
Node('d'),
Node('e')),
Node('f')),
Node('g'),
Node('h',
Node('i',
Node('j'),
Node('k')),
Node('l')))
def order(self, tree, flags):
return list(map(lambda x: x[0], order(tree, flags, Node, LEAF)))
def test_orders(self):
tree = self.tree()
ordered = self.order(tree, PREORDER)
assert ordered == ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l'], ordered
ordered = self.order(tree, POSTORDER)
assert ordered == ['d', 'e', 'c', 'f', 'b', 'g', 'j', 'k', 'i', 'l', 'h', 'a'], ordered
def test_str(self):
text = str(self.tree())
assert text == """Node
+- 'a'
+- Node
| +- 'b'
| +- Node
| | +- 'c'
| | +- Node
| | | `- 'd'
| | `- Node
| | `- 'e'
| `- Node
| `- 'f'
+- Node
| `- 'g'
`- Node
+- 'h'
+- Node
| +- 'i'
| +- Node
| | `- 'j'
| `- Node
| `- 'k'
`- Node
`- 'l'""", text
class NestedNamedTest(TestCase):
def tree(self):
return Node(('a', Node('A')), ('b', Node('B')))
def test_str(self):
text = str(self.tree())
assert text == """Node
+- a
| `- 'A'
`- b
`- 'B'""", text
class NodeEqualityTest(TestCase):
def test_equals(self):
a = Node('abc')
b = Node('abc')
assert a == a
assert not (a != a)
assert not (a == b)
assert a._recursively_eq(b)
assert Node(a) != a
assert Node(a)._recursively_eq(Node(a))
assert not Node(a)._recursively_eq(a) | PypiClean |
/Faker-19.3.1.tar.gz/Faker-19.3.1/faker/providers/person/pt_BR/__init__.py | from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats_female = (
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{prefix_female}} {{first_name_female}} {{last_name}}",
)
formats_male = (
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{prefix_male}} {{first_name_male}} {{last_name}}",
)
formats = formats_male + formats_female
"""
To a previous (undocumented?) list of female given names was added the 100
most popular names in Brazil in 2014 and 2015 according to Exame magazine:
* http://exame.abril.com.br/brasil/noticias/os-100-nomes-mais-comuns-no-brasil-em-2014
* http://exame.abril.com.br/brasil/noticias/os-100-nomes-mais-comuns-no-brasil-em-2015
"""
first_names_female = (
"Agatha",
"Alana",
"Alexia",
"Alice",
"Alícia",
"Amanda",
"Ana Beatriz",
"Ana Carolina",
"Ana Clara",
"Ana Julia",
"Ana Júlia",
"Ana Laura",
"Ana Luiza",
"Ana Lívia",
"Ana Sophia",
"Ana Vitória",
"Ana",
"Beatriz",
"Bianca",
"Brenda",
"Bruna",
"Bárbara",
"Camila",
"Carolina",
"Caroline",
"Catarina",
"Cecília",
"Clara",
"Clarice",
"Daniela",
"Eduarda",
"Elisa",
"Eloah",
"Emanuella",
"Emanuelly",
"Emilly",
"Esther",
"Evelyn",
"Fernanda",
"Gabriela",
"Gabrielly",
"Giovanna",
"Helena",
"Heloísa",
"Isabel",
"Isabella",
"Isabelly",
"Isadora",
"Isis",
"Joana",
"Julia",
"Juliana",
"Júlia",
"Kamilly",
"Lara",
"Larissa",
"Laura",
"Lavínia",
"Laís",
"Letícia",
"Lorena",
"Luana",
"Luiza",
"Luna",
"Lívia",
"Maitê",
"Manuela",
"Marcela",
"Maria Alice",
"Maria Cecília",
"Maria Clara",
"Maria Eduarda",
"Maria Fernanda",
"Maria Julia",
"Maria Luiza",
"Maria Sophia",
"Maria Vitória",
"Maria",
"Mariana",
"Mariane",
"Marina",
"Maysa",
"Melissa",
"Milena",
"Mirella",
"Natália",
"Nicole",
"Nina",
"Olivia",
"Pietra",
"Rafaela",
"Raquel",
"Rebeca",
"Sabrina",
"Sarah",
"Sofia",
"Sophia",
"Sophie",
"Stella",
"Stephany",
"Valentina",
"Vitória",
"Yasmin",
)
"""
To a previous (undocumented?) list of male given names was added the 100
most popular names in Brazil in 2014 and 2015 according to this blog post:
* http://exame.abril.com.br/brasil/noticias/os-100-nomes-mais-comuns-no-brasil-em-2014
* http://exame.abril.com.br/brasil/noticias/os-100-nomes-mais-comuns-no-brasil-em-2015
"""
first_names_male = (
"Alexandre",
"André",
"Anthony",
"Antônio",
"Arthur",
"Augusto",
"Benjamin",
"Benício",
"Bernardo",
"Breno",
"Bruno",
"Bryan",
"Caio",
"Calebe",
"Carlos Eduardo",
"Cauã",
"Cauê",
"Daniel",
"Danilo",
"Davi Lucas",
"Davi Lucca",
"Davi Luiz",
"Davi",
"Diego",
"Diogo",
"Eduardo",
"Emanuel",
"Enrico",
"Enzo Gabriel",
"Enzo",
"Erick",
"Felipe",
"Fernando",
"Francisco",
"Gabriel",
"Guilherme",
"Gustavo Henrique",
"Gustavo",
"Heitor",
"Henrique",
"Ian",
"Igor",
"Isaac",
"Joaquim",
"João Felipe",
"João Gabriel",
"João Guilherme",
"João Lucas",
"João Miguel",
"João Pedro",
"João Vitor",
"João",
"Juan",
"Kaique",
"Kevin",
"Leandro",
"Leonardo",
"Levi",
"Lorenzo",
"Lucas Gabriel",
"Lucas",
"Lucca",
"Luigi",
"Luiz Felipe",
"Luiz Fernando",
"Luiz Gustavo",
"Luiz Henrique",
"Luiz Miguel",
"Luiz Otávio",
"Marcelo",
"Marcos Vinicius",
"Matheus",
"Miguel",
"Murilo",
"Nathan",
"Nicolas",
"Noah",
"Otávio",
"Paulo",
"Pedro Henrique",
"Pedro Lucas",
"Pedro Miguel",
"Pedro",
"Pietro",
"Rafael",
"Raul",
"Renan",
"Rodrigo",
"Ryan",
"Samuel",
"Thales",
"Theo",
"Thiago",
"Thomas",
"Vicente",
"Vinicius",
"Vitor Gabriel",
"Vitor Hugo",
"Vitor",
"Yago",
"Yuri",
)
first_names = first_names_male + first_names_female
"""
To a previous (undocumented?) list of family names was added the 70
most popular family names in Brazil according to this blog post:
* http://nomeschiques.com/os-70-sobrenomes-mais-comuns-e-famosos-do-brasil/
"""
last_names = (
"Almeida",
"Alves",
"Aragão",
"Araújo",
"Azevedo",
"Barbosa",
"Barros",
"Caldeira",
"Campos",
"Cardoso",
"Cardoso",
"Carvalho",
"Castro",
"Cavalcanti",
"Correia",
"Costa",
"Costela",
"Cunha",
"da Conceição",
"da Costa",
"da Cruz",
"da Cunha",
"da Luz",
"da Mata",
"da Mota",
"da Paz",
"da Rocha",
"da Rosa",
"das Neves",
"Dias",
"Duarte",
"Farias",
"Fernandes",
"Ferreira",
"Fogaça",
"Freitas",
"Gomes",
"Gonçalves",
"Jesus",
"Lima",
"Lopes",
"Martins",
"Melo",
"Mendes",
"Monteiro",
"Moraes",
"Moreira",
"Moura",
"Nascimento",
"Nogueira",
"Novaes",
"Nunes",
"Oliveira",
"Peixoto",
"Pereira",
"Pinto",
"Pires",
"Porto",
"Ramos",
"Rezende",
"Ribeiro",
"Rocha",
"Rodrigues",
"Sales",
"Santos",
"Silva",
"Silveira",
"Souza",
"Teixeira",
"Viana",
"Vieira",
)
prefixes_female = ("Srta.", "Sra.", "Dra.")
prefixes_male = ("Sr.", "Dr.") | PypiClean |
/Flask-MDEditor-0.1.4.tar.gz/Flask-MDEditor-0.1.4/flask_mdeditor/static/mdeditor/js/lib/codemirror/mode/soy/soy.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"), require("../htmlmixed/htmlmixed"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror", "../htmlmixed/htmlmixed"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
var indentingTags = ["template", "literal", "msg", "fallbackmsg", "let", "if", "elseif",
"else", "switch", "case", "default", "foreach", "ifempty", "for",
"call", "param", "deltemplate", "delcall", "log"];
CodeMirror.defineMode("soy", function(config) {
var textMode = CodeMirror.getMode(config, "text/plain");
var modes = {
html: CodeMirror.getMode(config, {name: "text/html", multilineTagIndentFactor: 2, multilineTagIndentPastTag: false}),
attributes: textMode,
text: textMode,
uri: textMode,
css: CodeMirror.getMode(config, "text/css"),
js: CodeMirror.getMode(config, {name: "text/javascript", statementIndent: 2 * config.indentUnit})
};
function last(array) {
return array[array.length - 1];
}
function tokenUntil(stream, state, untilRegExp) {
var oldString = stream.string;
var match = untilRegExp.exec(oldString.substr(stream.pos));
if (match) {
// We don't use backUp because it backs up just the position, not the state.
// This uses an undocumented API.
stream.string = oldString.substr(0, stream.pos + match.index);
}
var result = stream.hideFirstChars(state.indent, function() {
return state.localMode.token(stream, state.localState);
});
stream.string = oldString;
return result;
}
return {
startState: function() {
return {
kind: [],
kindTag: [],
soyState: [],
indent: 0,
localMode: modes.html,
localState: CodeMirror.startState(modes.html)
};
},
copyState: function(state) {
return {
tag: state.tag, // Last seen Soy tag.
kind: state.kind.concat([]), // Values of kind="" attributes.
kindTag: state.kindTag.concat([]), // Opened tags with kind="" attributes.
soyState: state.soyState.concat([]),
indent: state.indent, // Indentation of the following line.
localMode: state.localMode,
localState: CodeMirror.copyState(state.localMode, state.localState)
};
},
token: function(stream, state) {
var match;
switch (last(state.soyState)) {
case "comment":
if (stream.match(/^.*?\*\//)) {
state.soyState.pop();
} else {
stream.skipToEnd();
}
return "comment";
case "variable":
if (stream.match(/^}/)) {
state.indent -= 2 * config.indentUnit;
state.soyState.pop();
return "variable-2";
}
stream.next();
return null;
case "tag":
if (stream.match(/^\/?}/)) {
if (state.tag == "/template" || state.tag == "/deltemplate") state.indent = 0;
else state.indent -= (stream.current() == "/}" || indentingTags.indexOf(state.tag) == -1 ? 2 : 1) * config.indentUnit;
state.soyState.pop();
return "keyword";
} else if (stream.match(/^(\w+)(?==)/)) {
if (stream.current() == "kind" && (match = stream.match(/^="([^"]+)/, false))) {
var kind = match[1];
state.kind.push(kind);
state.kindTag.push(state.tag);
state.localMode = modes[kind] || modes.html;
state.localState = CodeMirror.startState(state.localMode);
}
return "attribute";
} else if (stream.match(/^"/)) {
state.soyState.push("string");
return "string";
}
stream.next();
return null;
case "literal":
if (stream.match(/^(?=\{\/literal})/)) {
state.indent -= config.indentUnit;
state.soyState.pop();
return this.token(stream, state);
}
return tokenUntil(stream, state, /\{\/literal}/);
case "string":
if (stream.match(/^.*?"/)) {
state.soyState.pop();
} else {
stream.skipToEnd();
}
return "string";
}
if (stream.match(/^\/\*/)) {
state.soyState.push("comment");
return "comment";
} else if (stream.match(stream.sol() ? /^\s*\/\/.*/ : /^\s+\/\/.*/)) {
return "comment";
} else if (stream.match(/^\{\$\w*/)) {
state.indent += 2 * config.indentUnit;
state.soyState.push("variable");
return "variable-2";
} else if (stream.match(/^\{literal}/)) {
state.indent += config.indentUnit;
state.soyState.push("literal");
return "keyword";
} else if (match = stream.match(/^\{([\/@\\]?\w*)/)) {
if (match[1] != "/switch")
state.indent += (/^(\/|(else|elseif|case|default)$)/.test(match[1]) && state.tag != "switch" ? 1 : 2) * config.indentUnit;
state.tag = match[1];
if (state.tag == "/" + last(state.kindTag)) {
// We found the tag that opened the current kind="".
state.kind.pop();
state.kindTag.pop();
state.localMode = modes[last(state.kind)] || modes.html;
state.localState = CodeMirror.startState(state.localMode);
}
state.soyState.push("tag");
return "keyword";
}
return tokenUntil(stream, state, /\{|\s+\/\/|\/\*/);
},
indent: function(state, textAfter) {
var indent = state.indent, top = last(state.soyState);
if (top == "comment") return CodeMirror.Pass;
if (top == "literal") {
if (/^\{\/literal}/.test(textAfter)) indent -= config.indentUnit;
} else {
if (/^\s*\{\/(template|deltemplate)\b/.test(textAfter)) return 0;
if (/^\{(\/|(fallbackmsg|elseif|else|ifempty)\b)/.test(textAfter)) indent -= config.indentUnit;
if (state.tag != "switch" && /^\{(case|default)\b/.test(textAfter)) indent -= config.indentUnit;
if (/^\{\/switch\b/.test(textAfter)) indent -= config.indentUnit;
}
if (indent && state.localMode.indent)
indent += state.localMode.indent(state.localState, textAfter);
return indent;
},
innerMode: function(state) {
if (state.soyState.length && last(state.soyState) != "literal") return null;
else return {state: state.localState, mode: state.localMode};
},
electricInput: /^\s*\{(\/|\/template|\/deltemplate|\/switch|fallbackmsg|elseif|else|case|default|ifempty|\/literal\})$/,
lineComment: "//",
blockCommentStart: "/*",
blockCommentEnd: "*/",
blockCommentContinue: " * ",
fold: "indent"
};
}, "htmlmixed");
CodeMirror.registerHelper("hintWords", "soy", indentingTags.concat(
["delpackage", "namespace", "alias", "print", "css", "debugger"]));
CodeMirror.defineMIME("text/x-soy", "soy");
}); | PypiClean |
/INF367-chen-1.1.1.zip/INF367-chen-1.1.1/src/persistent_homology.py | from itertools import combinations
import numpy as np
import pandas
import plotly.express as px
import plotly.graph_objects as go
import torch
from plotly.subplots import make_subplots
from scipy.signal import convolve2d
from sklearn.datasets import make_moons
from sklearn.metrics import euclidean_distances
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KernelDensity
# Some example values from the youtube video about this topic for testing.
example_boundary_matrix = np.array([[0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
example_boundary_matrix_dimensions = np.array([0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2])
example_distance_matrix = np.array([[0., 3.23398405, 3.23398405, 3.23398405, 3.23398405, 3.23398405],
[0., 0., 3.23398405, 2.26929705, 2.73298802, 2.26929705],
[0., 0., 0., 3.23398405, 3.23398405, 3.23398405],
[0., 0., 0., 0., 2.7]])
example_reduced_matrix = np.array([[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
example_persistence_pairs = pandas.DataFrame(columns=["birth", "death", "dimension"],
data=np.array(
[[0, np.inf, 0], [2, 4, 0], [3, 5, 0], [1, 6, 0], [8, 9, 1],
[7, 10, 1]]))
def get_mutual_reachability_distance_matrix(all_points: np.array, k_nearest_neighours=5):
"""Calculates the mutual reachability distance matrix
:param all_points: Input points
:type all_points: np.array
:param k_nearest_neighours: Number of k nearest neighbours that should be used in the algorithm
:type k_nearest_neighours: int
:return: mutual reachability distance matrix
:rtype: np.array
"""
number_of_datapoints = len(all_points)
distance_matrix = np.zeros((number_of_datapoints, number_of_datapoints))
indices = list(combinations(range(number_of_datapoints), 2))
for (id_1, id_2), (point_1, point_2) in zip(indices, combinations(all_points, 2)):
distance_matrix[id_1, id_2] = mutual_reachability_distance(all_points, point_1, point_2, k_nearest_neighours)
return distance_matrix
def mutual_reachability_distance(all_points: np.array, a, b, k_nearest_neighours=5):
"""Calcualtes the mutual_reachability_distance between two points a and b.
:param all_points: All points in which a and b also are lying.
:type all_points: np.array
:param a: The first point of interest.
:type a: np.array
:param b: The second point of interest.
:type b: np.array
:param k_nearest_neighours: Number of k nearest neighbours that should be used in the algorithm
:type k_nearest_neighours: int
:return: distance between a and b in term of mutual reachability.
:rtype: float
"""
# calculate the core_k of a and b respectively
a_core = np.sort(euclidean_distances(a.reshape(1, -1), np.array(all_points)).transpose().flatten().tolist())[
1:k_nearest_neighours + 1]
a_core_max = max(a_core) if len(a_core) > 0 else 0
b_core = np.sort(euclidean_distances(b.reshape(1, -1), np.array(all_points)).transpose().flatten().tolist())[
1:k_nearest_neighours + 1]
b_core_max = max(b_core) if len(b_core) > 0 else 0
# Calculate distance between a and b
dist_a_b = euclidean_distances(a.reshape(1, -1), b.reshape(1, -1)).flatten()[0]
# get the max of the three values
max_value = max(a_core_max, b_core_max, dist_a_b)
return max_value
def create_boundary_matrix_from_simplices_dataframe(df_simplices: pandas.DataFrame) -> tuple[np.array, np.array]:
"""Creats a boundary matrix from simplicies.
:param df_simplices: The simplicies that will form the simplical complex.
:type df_simplices: pandas.DataFrame
:return: boundary matrix and the dimensions of it as a separate array.
:rtype: Tuple[np.array, np.array]
"""
print("Calculating boundary_matrix: This may take a while...")
# Initialize boundary matrix
boundary_matrix = np.zeros((len(df_simplices), len(df_simplices)), dtype=int)
boundary_matrix_dimensions = np.zeros(len(df_simplices), dtype=int)
list_over_all_indices = df_simplices["value"].tolist()
for col_index, indices in enumerate(list_over_all_indices):
# 0-dimensional simplices
if len(indices) == 1:
continue
# Find the coordinates in the boundary matrix for the faces of n-1 dimension
sub_dim = len(indices) - 1
for comb in combinations(indices, sub_dim):
for idx, row in enumerate(df_simplices["value"]):
if list(comb) == row:
boundary_matrix[idx, col_index] = 1
boundary_matrix_dimensions[idx] = df_simplices.iloc[idx]["dimension"]
# fixing highest dimension
# Small check for the validity of the boundary matrix
assert np.all((boundary_matrix == 0) | (boundary_matrix == 1))
print(f"Boundary Matrix: \n {boundary_matrix}")
print(f"Boundary Matrix Dimensions: \n {boundary_matrix_dimensions}")
return boundary_matrix, boundary_matrix_dimensions
class FilteredComplexes(object):
"""Filtered Complexes. For an example run, we refer to the method visualize_filtered_complex()."""
def __init__(self, training_data: np.array = None):
self.distance_list = None
self._training_data = training_data
self._index = 0
self._edge_index = 0
self._not_changed_counter = 0
self._df_length = -1
# Init of the simplices DataFrame
self.df_simplices = pandas.DataFrame(
columns=["index", "dimension", "value", "edge_birth_index", "edge_death_index"])
def get_next_node_number(self):
"""Getter function
@return: Current node number
@rtype: int
"""
self._index += 1
return self._index - 1
def get_next_edge_number(self):
"""Getter function
@return: Current node number
@rtype: int
"""
self._edge_index += 1
return self._edge_index - 1
def get_simplices(self):
"""Getter function
@return: simplicies
@rtype: pandas.DataFrame
"""
return self.df_simplices
def _calculate_simplices(self, dist_matrix, max_simplices_dim, verbose=False):
"""Calculates internally simplicies based on distance matrix.
:param dist_matrix: Distance matrix
:type dist_matrix: np.array
:param max_simplices_dim: Highest dimension of the simplicies that can be achieved
:type max_simplices_dim: int
:return: None
:rtype: None
"""
list_of_iterating_radius = np.sort(np.unique(dist_matrix.flatten()))
print(f"All radii: {list_of_iterating_radius}")
# saving distances for the barcode
# Spreading radius
for radius in list_of_iterating_radius:
if verbose:
print(f"The current radius is: {radius}")
edges = np.where(dist_matrix <= radius, 1, 0)
# This matrix contains 1 where we have an edge between datapoints for the given radius
upper_matrix_with_edges = np.triu(edges, k=1)
if verbose:
print(upper_matrix_with_edges)
# get the indices of the edge points
edges_indices = np.argwhere(upper_matrix_with_edges == 1)
if verbose:
print(edges_indices)
for dimension in range(1, max_simplices_dim + 1):
if dimension == 1:
for edge in edges_indices.tolist():
if edge not in self.df_simplices[self.df_simplices["dimension"] == dimension]["value"].tolist():
self.distance_list.append(radius) # saving the distance
new_df_row = dict(index=self.get_next_node_number(), dimension=dimension, value=edge,
edge_birth_index=self.get_next_edge_number(), edge_death_index=-1)
new_df_row = pandas.DataFrame([new_df_row])
self.df_simplices = pandas.concat([self.df_simplices, new_df_row])
else:
# All higher dimensions rely on looking up whether we have created such an index combination
# that the simplicy of dimnesion n has been created
all_combs_of_current_dimension = combinations(range(len(dist_matrix)), dimension + 1)
for comb in all_combs_of_current_dimension:
combinations_of_one_dimension_lower = combinations(comb, dimension)
matches_from_lower_dimension = 0
# Count if we have all the faces of n-1 dimension to build the simplex of dimension n
for comb_from_lower_dim in combinations_of_one_dimension_lower:
if list(comb_from_lower_dim) in \
self.df_simplices[self.df_simplices["dimension"] == dimension - 1][
"value"].tolist():
matches_from_lower_dimension += 1
# We got a simplex!
if matches_from_lower_dimension == dimension + 1:
if list(comb) not in self.df_simplices[self.df_simplices["dimension"] == dimension][
"value"].tolist():
self.distance_list.append(radius) # saving the distance
if verbose:
print(f"We got a {dimension}-dimensional simplex: {comb}")
new_df_row = dict(index=self.get_next_node_number(), dimension=dimension,
value=list(comb), edge_birth_index=-1,
edge_death_index=self.get_next_edge_number())
new_df_row = pandas.DataFrame([new_df_row])
self.df_simplices = pandas.concat([self.df_simplices, new_df_row])
if verbose:
print(self.df_simplices)
self.df_simplices = self.df_simplices.set_index("index")
self.df_simplices["index"] = self.df_simplices.index
if verbose:
print(f"Final dataframe of simplices is: {self.df_simplices}")
def fit(self, max_simplices_dim: int = 3, k_nearest_neighours=5, own_dist_matrix=None):
"""Fits a Filtered Simplical Complex onto the training data.
:param max_simplices_dim: Highest dimension of the simplicies that can be achieved
:type max_simplices_dim: int
:param k_nearest_neighours: Number of k nearest neighbours that should be used in the algorithm
:type k_nearest_neighours: int
:param own_dist_matrix: Usually none, sometimes we already have calculated one, then we can use it here.
:type own_dist_matrix: np.array
:return: Returns a boundary matrix, an array with the dimension of the persistence pairs,
and then also a distance list where the actual distances between nodes are saved.
:rtype: Tuple[np.array, np.array, List[float]]
"""
# We fit via rips complex with HDBSCANs distance function mutual_reachability_distance
self.distance_list = []
# First we calculate the distance matrix (dist fucntion is the mutual reachability distance)
if own_dist_matrix is not None:
dist_matrix = own_dist_matrix
else:
dist_matrix = get_mutual_reachability_distance_matrix(self._training_data,
k_nearest_neighours=k_nearest_neighours)
# Add 0 dimensional simplices
for idx, point in enumerate(range(len(dist_matrix))):
self.distance_list.append(0)
new_df_row = dict(index=self.get_next_node_number(), dimension=0, value=[idx], edge_birth_index=-1,
edge_death_index=-1)
new_df_row = pandas.DataFrame([new_df_row])
self.df_simplices = pandas.concat([self.df_simplices, new_df_row])
self._calculate_simplices(dist_matrix, max_simplices_dim)
boundary_matrix, dimensions_array = create_boundary_matrix_from_simplices_dataframe(self.df_simplices)
assert len(dimensions_array) == len(self.distance_list), "Distance list is not filled properly!!"
return boundary_matrix, dimensions_array, self.distance_list
def reduction_of_column(transposed_matrix, index_of_current_row):
"""Reduces the current column of a boundary matrix.
:param transposed_matrix: The matrix to be reduced in transposed form
:type transposed_matrix: np.array
:param index_of_current_row: The column of interest
:type index_of_current_row: int
:return: transposed matrix with the reduced column
:rtype: np.array
"""
row_we_want_to_add = 0
# We are in the while loop as long we have potential (now) rows to add to our current (now) row
while row_we_want_to_add != -1:
# if column (row now) contains only zeros, we skip it
if np.all((transposed_matrix[index_of_current_row] == 0)):
return transposed_matrix
# we ask for the first value which is non-zero. In the transposed matrix, it is the last value in the (now) row.
index = np.where(transposed_matrix[index_of_current_row] != 0)[0][-1]
# we select here the matrix now rows up to the index of our column to check the previous ones
cutted_matrix = transposed_matrix[:index_of_current_row]
# get the column of interest
column_of_interest = cutted_matrix[:, index]
# get the index of the potentially lowest non zero element
check_for_non_zeros = np.where(column_of_interest != 0)[0]
# if that column has only zeros, then our now row has the lowest index value already!
if np.all((check_for_non_zeros == 0)):
return transposed_matrix
# -1 indicates that there is no lowest non zero value (all are zero)
row_we_want_to_add = check_for_non_zeros[-1] if len(check_for_non_zeros) > 0 else -1
# Get the (lowest) latest non zero value in that now row
lowest_index_of_the_row_we_want_to_add = np.where(transposed_matrix[row_we_want_to_add] != 0)[0][-1]
# Check the the lowest non zero value index is actually the lowest one, or we will never finish!
is_actually_lowest_equal = index == lowest_index_of_the_row_we_want_to_add
if is_actually_lowest_equal:
new_now_row = np.mod(np.add(transposed_matrix[row_we_want_to_add],
transposed_matrix[index_of_current_row]), 2)
transposed_matrix[index_of_current_row] = new_now_row
else:
break
return transposed_matrix
class MatrixReduction(object):
"""Matrix Reduction. For an example run, we refer to the method visualize_matrix_reduction()."""
def __init__(self, boundary_matrix: np.array, boundary_matrix_dimens: np.array, distance_list: np.array = None):
self._bounday_matrix = boundary_matrix
self._distance_list = distance_list
self._boundary_matrix_dimens = boundary_matrix_dimens
self._reduced_matrix = None
self._persistence_pairs = pandas.DataFrame(columns=["birth", "death", "dimension"])
self._reduced_matrix_barcode = pandas.DataFrame(columns=["birth_value", "death_value", "dimension"])
def get_barcode_of_reduced_matrix(self):
return self._reduced_matrix_barcode
def get_persistence_pairs(self):
return self._persistence_pairs
def get_reduced_matrix(self):
return self._reduced_matrix
def _calculate_persistence_pairs_and_barcode(self):
"""The dimension are needed in a separate 1-dim np.array"""
# We again transpose the matrix to make iteration easier
transposed_matrix = self._reduced_matrix.transpose()
for idx, now_row in enumerate(transposed_matrix):
# Strawdummy values so we can fill our df in either case
new_df_row = dict(birth=-1, death=-1, dimension=-1)
new_df_row_barcode = dict(birth_value=-1, death_value=-1, dimension=-1)
# assigning dimension
new_df_row["dimension"] = self._boundary_matrix_dimens[idx]
new_df_row_barcode["dimension"] = self._boundary_matrix_dimens[idx]
if np.all((now_row == 0)):
# assigning births
new_df_row["birth"] = idx
if self._distance_list:
new_df_row_barcode["birth_value"] = self._distance_list[idx]
df_dictionary = pandas.DataFrame([new_df_row])
df_dictionary_barcode = pandas.DataFrame([new_df_row_barcode])
self._persistence_pairs = pandas.concat([self._persistence_pairs, df_dictionary])
if self._distance_list:
self._reduced_matrix_barcode = pandas.concat([self._reduced_matrix_barcode, df_dictionary_barcode])
continue
# assigning deaths
index_of_dying = np.where(transposed_matrix[idx] != 0)[0][-1]
self._persistence_pairs.loc[self._persistence_pairs['birth'] == index_of_dying, "death"] = idx
if self._distance_list:
self._reduced_matrix_barcode.loc[self._persistence_pairs['birth'] == index_of_dying, "death_value"] = \
self._distance_list[idx]
# assign infinity to the ones who are never closed
self._persistence_pairs.loc[self._persistence_pairs['death'] == -1, "death"] = np.inf
if self._distance_list:
self._reduced_matrix_barcode.loc[self._reduced_matrix_barcode['death_value'] == -1, "death_value"] = np.inf
def reduce_boundary_matrix(self):
# print("Initial boundary matrix:")
# print(self._bounday_matrix)
# transpose rows with columns to iterate easier over the columns
transposed_matrix = self._bounday_matrix.transpose()
for index_of_current_row, _ in enumerate(transposed_matrix.copy()):
# we made the function recursive
transposed_matrix = reduction_of_column(transposed_matrix, index_of_current_row)
final_matrix = transposed_matrix.transpose()
# print("Reduced matrix:")
# print(final_matrix)
self._reduced_matrix = final_matrix
self._calculate_persistence_pairs_and_barcode()
def transform(self):
raise NotImplementedError
def predict(self):
raise NotImplementedError
def draw_persistence_diagram(self, plot_inf=False, title=None):
print(self._reduced_matrix_barcode)
assert type(self._reduced_matrix_barcode) == pandas.DataFrame
df = self._reduced_matrix_barcode.copy()
if plot_inf:
max_value = df[df["death_value"] != np.inf]["death_value"].max() + 100
df['death_value'].replace(np.inf, max_value, inplace=True)
else:
df = self._reduced_matrix_barcode.copy()
df = df[df["death_value"] != np.inf]
max_value = df[df["death_value"] != np.inf]["death_value"].max() + 1
fig = px.scatter(df, x="birth_value", y="death_value", color="dimension")
fig.update_layout(
title=title if title else "Persistence Diagram",
xaxis_range=[-1, max_value + 5],
yaxis_range=[-1, max_value + 5],
shapes=[
{'type': 'line', 'yref': 'paper', 'xref': 'paper', 'y0': 0, 'y1': 1, 'x0': 0, 'x1': 1,
'layer': 'below'}])
fig.show()
class PersistenceImages(object):
"""Persistence Images created by consuming a persistence bar code. For an example run, we refer to the method visualize_persistence_image()."""
def __init__(self):
self._size_of_each_pixel = 0.5 # Decides the resolution of the Discretized Kernel Density Distribution
self._range_of_births = (0.0, 3)
self._range_of_persistences = (0.0, 4)
self._kernel_bandwidth = 1.0 # same default as sklearn
self._width = self._range_of_births[1] - self._range_of_births[0]
self._height = self._range_of_persistences[1] - self._range_of_persistences[0]
self._number_of_samples_per_pixel = 10
self._number_of_pixels = (
int(self._width / self._size_of_each_pixel), int(self._height / self._size_of_each_pixel))
def transform(self, persistence_barcode: pandas.DataFrame, is_in_birth_death_format=True, show_graphs=False):
""" This method transforms a persistence barcode into a persistence image.
:param persistence_barcode: The persistence barcode on which the persistence image is based on.
:type persistence_barcode: pandas.DataFrame
:param is_in_birth_death_format: Whether the persistence pairs is in (birth, death) tuple format
:type is_in_birth_death_format: bool
:param show_graphs: Whether we should show the graph visualization
:type show_graphs: bool
:return: The persistence image corressponding to the persistence diagram
:rtype: np.array
"""
""""""
df = persistence_barcode.copy()
max_value = df[df["death_value"] != np.inf]["death_value"].max()
df['death_value'].replace(np.inf, max_value + 10, inplace=True)
# convert from is_in_birth_death_format to birth persistence values
df = df.to_numpy()
if is_in_birth_death_format:
df[:, 1] = df[:, 1] - df[:, 0]
# changing out infinities
# Construct meshgrid
birth_density_interval = np.linspace(self._range_of_births[0], self._range_of_births[1],
self._number_of_pixels[0] * self._number_of_samples_per_pixel)
persistence_density_interval = np.linspace(self._range_of_persistences[0], self._range_of_persistences[1],
self._number_of_pixels[1] * self._number_of_samples_per_pixel)
birth_discrete_interval = np.linspace(self._range_of_births[0], self._range_of_births[1],
self._number_of_pixels[0])
persistence_discrete_interval = np.linspace(self._range_of_persistences[0], self._range_of_persistences[1],
self._number_of_pixels[1])
xx, yy = np.meshgrid(birth_density_interval, persistence_density_interval)
xy = np.vstack([xx.ravel(), yy.ravel()]).T
# creating the kernel density estimation
kde = KernelDensity(kernel='gaussian', bandwidth=self._kernel_bandwidth).fit(df[:, 0:2])
reshaping_tuple = (self._number_of_pixels[1] * self._number_of_samples_per_pixel,
self._number_of_pixels[0] * self._number_of_samples_per_pixel)
density_values = np.exp(kde.score_samples(xy)).reshape(reshaping_tuple)
conv_n = int(self._number_of_samples_per_pixel / self._size_of_each_pixel)
kernel = np.ones((conv_n, conv_n))
convolved = convolve2d(density_values, kernel, mode='valid')
density_values_downsampled = convolved[::conv_n, ::conv_n] / conv_n
persistence_image = density_values_downsampled
if show_graphs:
fig = make_subplots(rows=1, cols=2,
subplot_titles=('Kernel Density Distribution',
f'Discretized Kernel Density Distribution:\n Pixel size: {self._size_of_each_pixel}x{self._size_of_each_pixel}'))
fig.add_trace(go.Contour(
z=density_values,
line_smoothing=0,
x=birth_density_interval, # horizontal axis
y=persistence_density_interval, # vertical axis
), 1, 1)
fig.add_trace(go.Contour(
z=persistence_image,
line_smoothing=0,
x=birth_discrete_interval, # horizontal axis
y=persistence_discrete_interval, # vertical axis
), 1, 2)
fig.update_layout(title_text="Persistence Image on the two moons")
fig.show()
return persistence_image
class PersistenceLandscapes(object):
"""Persistence Landscapes. For an example run, we refer to the method visualize_persistence_landscape()."""
def __init__(self, discretization_depth: int = 2):
self._discretization_depth = discretization_depth
def fit(self, persistence_barcode: pandas.DataFrame, show_graphs: bool = False):
"""This method transforms a persistence barcode into a persistence landscape.
:param persistence_barcode: The persistence barcode on which the persistence landscape is based on.
:type persistence_barcode: pandas.DataFrame
:param show_graphs: Controls whether the result is displayed visually.
:type show_graphs: bool
:return: The matrix containing the unsorted landscape coordinates, and the matrix with sorted values
:rtype: Tuple[pandas.DataFrame, pandas.DataFrame]
"""
df = persistence_barcode.copy()
# changing out infinities
max_value = df[df["death_value"] != np.inf]["death_value"].max()
df['death_value'].replace(np.inf, max_value + 10, inplace=True)
df.loc[df.death_value == max_value + 10, "birth_value"] = max_value + 9
df["half_life"] = np.divide(df["death_value"] - df["birth_value"], 2)
df["mid_life"] = np.divide(df["death_value"] + df["birth_value"], 2)
# creating coordinate system grid with linspace
max_x = df["mid_life"].max()
max_y = df["half_life"].max()
x_range = np.linspace(0, int(max_x) + 1, int(self._discretization_depth * max_x))
matrix = np.zeros([len(df), len(x_range)])
for index, (_, series) in enumerate(df.iterrows()):
current_value_row = np.copy(x_range)
first_value = current_value_row - series.birth_value
second_value = series.death_value - current_value_row
final_row = np.min(np.array([first_value, second_value]), axis=0).clip(min=0)
matrix[index, :] = final_row
# Only for debugging
# print(matrix)
sorted_matrix = np.flip(matrix, axis=0)
sorted_matrix = np.sort(sorted_matrix, axis=0)
matrix_df = pandas.DataFrame(matrix.T)
matrix_df.index = x_range
sorted_matrix_df = pandas.DataFrame(sorted_matrix.T)
sorted_matrix_df.index = x_range
if show_graphs:
fig = make_subplots(rows=2, cols=1,
subplot_titles=('Persistence landscape',
'Persistence landscape sorted'))
for i in range(len(matrix_df.columns)):
fig.add_trace(go.Line(x=matrix_df[i].index.values, y=matrix_df[i].values,
name=i,
legendgroup="1",
legendgrouptitle_text="The unsorted persistence landscape"),
row=1,
col=1)
fig.add_trace(go.Line(x=sorted_matrix_df[i].index.values, y=sorted_matrix_df[i].values,
name=f"k={i}",
legendgroup="2",
legendgrouptitle_text="The sorted persistence landscape"),
row=2,
col=1)
fig.update_layout(title="Persistence Landscapes", title_font_size=20)
fig.add_vline(x=max_value + 9, line_width=2, line_dash="dot", line_color="green",
annotation_text="never dying simplicies", annotation_position="bottom left",
annotation_font_size=12,
annotation_font_color="green")
fig.show()
return matrix_df, sorted_matrix_df
class PersLay(torch.nn.Module):
"""PersLay"""
def __init__(self, layer_type="persistence_diagram", rho_output_dim_q=10, operation="sum"):
super().__init__()
self.operation = operation
self.layer_type = layer_type
if self.layer_type == "persistence_image":
raise NotImplementedError
elif self.layer_type == "persistence_landscape":
raise NotImplementedError
elif self.layer_type == "persistence_diagram":
self.input_dimension = 2
self.rho = torch.nn.Sequential(
torch.nn.Linear(self.input_dimension, 124),
torch.nn.ReLU(),
torch.nn.Linear(124, 512),
torch.nn.ReLU(),
torch.nn.Linear(512, rho_output_dim_q),
torch.nn.ReLU()
)
self.w = torch.nn.Sequential(
torch.nn.Linear(self.input_dimension, 512),
torch.nn.ReLU(),
torch.nn.Linear(512, 256),
torch.nn.ReLU(),
torch.nn.Linear(256, 128),
torch.nn.ReLU(),
torch.nn.Linear(128, 1),
torch.nn.ReLU()
)
self.final = torch.nn.Sequential(
torch.nn.Linear(rho_output_dim_q, 4 * rho_output_dim_q),
torch.nn.ReLU(),
torch.nn.Linear(4 * rho_output_dim_q, rho_output_dim_q),
torch.nn.ReLU(),
torch.nn.Softmax(dim=0)
)
def forward(self, input_values):
"""Forward function of the perslay as neural network.
:param input_values: Input vectors
:type input_values: torch.Tensor
:return: The output of this neural network
:rtype: torch.Tensor
"""
w_result = self.w(input_values)
rho_result = self.rho(input_values)
multiplication_result = torch.mul(w_result, rho_result)
after_operation = self.permutation_invariant_operation(multiplication_result)
after_softmax = self.final(after_operation)
return after_softmax
def permutation_invariant_operation(self, input_tensor: torch.Tensor):
"""Input tensors that can appear in different permutations each time are mixed together with a permutations invariant operation.
:param input_tensor: Input tensors
:type input_tensor: torch.Tensor
:param operation: Three different permutation invariant operations are available: max, min, sum.
:type operation: str
:return: The input tensor reduced by one dimension
:rtype: torch.Tensor
"""
initial_tensor = input_tensor
if self.operation is not None:
if self.operation == "max":
new_value, _ = torch.max(input_tensor, dim=0)
elif self.operation == "min":
new_value, _ = torch.min(input_tensor, dim=0)
elif self.operation == "sum":
new_value = torch.sum(input_tensor, dim=0)
else:
raise TypeError("operation should be max, min or sum")
return new_value
else:
return initial_tensor
def visualize_matrix_reduction():
"""Tests the MatrixReduction class. Method is called visualize for conformity of style.
:return: None
:rtype: None
"""
matrix_reductor = MatrixReduction(example_boundary_matrix, example_boundary_matrix_dimensions)
matrix_reductor.reduce_boundary_matrix()
reduced_matrix = matrix_reductor.get_reduced_matrix()
df = matrix_reductor.get_persistence_pairs()
assert np.array_equal(example_reduced_matrix, reduced_matrix)
assert np.array_equal(df.sort_values(by="birth").to_numpy(),
example_persistence_pairs.sort_values(by="birth").to_numpy())
print("Reduced Matrix has been correctly calculated! :)")
def visualize_filtered_complex():
"""Tests the FilteredComplexes class. Method is called visualize for conformity of style.
:return: None
:rtype: None
"""
moon_coord, moon_label = make_moons(n_samples=10, noise=0.2, random_state=0)
# Load and split data with a method from scikit-learn
moon_train_coord, moon_test_coord, label_train, label_test = train_test_split(
moon_coord, moon_label.astype(str), test_size=0.25, random_state=0)
filtered_complex = FilteredComplexes(moon_train_coord)
bounday_matrix, dimensional_array, dist_matrix = filtered_complex.fit(max_simplices_dim=4, k_nearest_neighours=3)
print(bounday_matrix)
print(dimensional_array)
def visualize_persistence_image():
"""Visualizes the results of the PersistenceImages class. We draw persistence image of the two moons
:return: None
:rtype: None
"""
moon_coord, moon_label = make_moons(n_samples=10, noise=0.2, random_state=0)
# Load and split data with a method from scikit-learn
moon_train_coord, moon_test_coord, label_train, label_test = train_test_split(
moon_coord, moon_label.astype(str), test_size=0.25, random_state=0)
filtered_complex = FilteredComplexes(moon_train_coord)
bounday_matrix, dimensional_array, dist_list = filtered_complex.fit(max_simplices_dim=4, k_nearest_neighours=3)
matrix_reductor = MatrixReduction(bounday_matrix, dimensional_array, dist_list)
matrix_reductor.reduce_boundary_matrix()
persistence_barcode = matrix_reductor.get_barcode_of_reduced_matrix()
persistent_image = PersistenceImages()
persistent_image.transform(persistence_barcode=persistence_barcode, show_graphs=True)
def visualize_persistence_diagram():
"""Visualizes the results of the MatrixReduction class. We draw the persistence diagram of the two moons
:return: None
:rtype: None
"""
moon_coord, moon_label = make_moons(n_samples=10, noise=0.2, random_state=0)
# Load and split data with a method from scikit-learn
moon_train_coord, moon_test_coord, label_train, label_test = train_test_split(
moon_coord, moon_label.astype(str), test_size=0.25, random_state=0)
filtered_complex = FilteredComplexes(moon_train_coord)
bounday_matrix, dimensional_array, dist_list = filtered_complex.fit(max_simplices_dim=4, k_nearest_neighours=3)
matrix_reductor = MatrixReduction(bounday_matrix, dimensional_array, dist_list)
matrix_reductor.reduce_boundary_matrix()
matrix_reductor.draw_persistence_diagram()
def visualize_persistence_landscape():
"""Visualizes the results of the PersistenceLandscapes class. We draw persistence landscape of the two moons
:return: None
:rtype: None
"""
moon_coord, moon_label = make_moons(n_samples=10, noise=0.2, random_state=0)
# Load and split data with a method from scikit-learn
moon_train_coord, moon_test_coord, label_train, label_test = train_test_split(
moon_coord, moon_label.astype(str), test_size=0.25, random_state=0)
filtered_complex = FilteredComplexes(moon_train_coord)
bounday_matrix, dimensional_array, dist_list = filtered_complex.fit(max_simplices_dim=4, k_nearest_neighours=3)
matrix_reductor = MatrixReduction(bounday_matrix, dimensional_array, dist_list)
matrix_reductor.reduce_boundary_matrix()
persistent_landscapes = PersistenceLandscapes(discretization_depth=200)
persistent_landscapes.fit(persistence_barcode=matrix_reductor.get_barcode_of_reduced_matrix(), show_graphs=True)
if __name__ == "__main__":
visualize_filtered_complex()
visualize_matrix_reduction()
visualize_persistence_diagram()
visualize_persistence_landscape()
visualize_persistence_image() | PypiClean |
/D47crunch-2.0.3.tar.gz/D47crunch-2.0.3/docs/tutorial.md | ## 1. Tutorial
### 1.1 Installation
The easy option is to use `pip`; open a shell terminal and simply type:
```
python -m pip install D47crunch
```
For those wishing to experiment with the bleeding-edge development version, this can be done through the following steps:
1. Download the `dev` branch source code [here](https://raw.githubusercontent.com/mdaeron/D47crunch/dev/D47crunch/__init__.py) and rename it to `D47crunch.py`.
2. Do any of the following:
* copy `D47crunch.py` to somewhere in your Python path
* copy `D47crunch.py` to a working directory (`import D47crunch` will only work if called within that directory)
* copy `D47crunch.py` to any other location (e.g., `/foo/bar`) and then use the following code snippet in your own code to import `D47crunch`:
```py
import sys
sys.path.append('/foo/bar')
import D47crunch
```
Documentation for the development version can be downloaded [here](https://github.com/mdaeron/D47crunch/raw/dev/docs/index.html) (save html file and open it locally).
### 1.2 Usage
Start by creating a file named `rawdata.csv` with the following contents:
```html
UID, Sample, d45, d46, d47, d48, d49
A01, ETH-1, 5.79502, 11.62767, 16.89351, 24.56708, 0.79486
A02, MYSAMPLE-1, 6.21907, 11.49107, 17.27749, 24.58270, 1.56318
A03, ETH-2, -6.05868, -4.81718, -11.63506, -10.32578, 0.61352
A04, MYSAMPLE-2, -3.86184, 4.94184, 0.60612, 10.52732, 0.57118
A05, ETH-3, 5.54365, 12.05228, 17.40555, 25.96919, 0.74608
A06, ETH-2, -6.06706, -4.87710, -11.69927, -10.64421, 1.61234
A07, ETH-1, 5.78821, 11.55910, 16.80191, 24.56423, 1.47963
A08, MYSAMPLE-2, -3.87692, 4.86889, 0.52185, 10.40390, 1.07032
```
Then instantiate a `D47data` object which will store and process this data:
```py
import D47crunch
mydata = D47crunch.D47data()
```
For now, this object is empty:
```html
>>> print(mydata)
[]
```
To load the analyses saved in `rawdata.csv` into our `D47data` object and process the data:
```py
mydata.read('rawdata.csv')
# compute δ13C, δ18O of working gas:
mydata.wg()
# compute δ13C, δ18O, raw Δ47 values for each analysis:
mydata.crunch()
# compute absolute Δ47 values for each analysis
# as well as average Δ47 values for each sample:
mydata.standardize()
```
We can now print a summary of the data processing:
```html
>>> mydata.summary(verbose = True, save_to_file = False)
[summary]
––––––––––––––––––––––––––––––– –––––––––
N samples (anchors + unknowns) 5 (3 + 2)
N analyses (anchors + unknowns) 8 (5 + 3)
Repeatability of δ13C_VPDB 4.2 ppm
Repeatability of δ18O_VSMOW 47.5 ppm
Repeatability of Δ47 (anchors) 13.4 ppm
Repeatability of Δ47 (unknowns) 2.5 ppm
Repeatability of Δ47 (all) 9.6 ppm
Model degrees of freedom 3
Student's 95% t-factor 3.18
Standardization method pooled
––––––––––––––––––––––––––––––– –––––––––
```
This tells us that our data set contains 5 different samples: 3 anchors (ETH-1, ETH-2, ETH-3) and 2 unknowns (MYSAMPLE-1, MYSAMPLE-2). The total number of analyses is 8, with 5 anchor analyses and 3 unknown analyses. We get an estimate of the analytical repeatability (i.e. the overall, pooled standard deviation) for δ13C, δ18O and Δ47, as well as the number of degrees of freedom (here, 3) that these estimated standard deviations are based on, along with the corresponding Student's t-factor (here, 3.18) for 95 % confidence limits. Finally, the summary indicates that we used a “pooled” standardization approach (see [Daëron, 2021]).
To see the actual results:
```html
>>> mydata.table_of_samples(verbose = True, save_to_file = False)
[table_of_samples]
–––––––––– – ––––––––– –––––––––– –––––– –––––– –––––––– –––––– ––––––––
Sample N d13C_VPDB d18O_VSMOW D47 SE 95% CL SD p_Levene
–––––––––– – ––––––––– –––––––––– –––––– –––––– –––––––– –––––– ––––––––
ETH-1 2 2.01 37.01 0.2052 0.0131
ETH-2 2 -10.17 19.88 0.2085 0.0026
ETH-3 1 1.73 37.49 0.6132
MYSAMPLE-1 1 2.48 36.90 0.2996 0.0091 ± 0.0291
MYSAMPLE-2 2 -8.17 30.05 0.6600 0.0115 ± 0.0366 0.0025
–––––––––– – ––––––––– –––––––––– –––––– –––––– –––––––– –––––– ––––––––
```
This table lists, for each sample, the number of analytical replicates, average δ13C and δ18O values (for the analyte CO2 , *not* for the carbonate itself), the average Δ47 value and the SD of Δ47 for all replicates of this sample. For unknown samples, the SE and 95 % confidence limits for mean Δ47 are also listed These 95 % CL take into account the number of degrees of freedom of the regression model, so that in large datasets the 95 % CL will tend to 1.96 times the SE, but in this case the applicable t-factor is much larger.
We can also generate a table of all analyses in the data set (again, note that `d18O_VSMOW` is the composition of the CO2 analyte):
```html
>>> mydata.table_of_analyses(verbose = True, save_to_file = False)
[table_of_analyses]
––– ––––––––– –––––––––– ––––––––––– –––––––––––– ––––––––– ––––––––– –––––––––– –––––––––– –––––––– –––––––––– –––––––––– ––––––––– ––––––––– –––––––––– ––––––––
UID Session Sample d13Cwg_VPDB d18Owg_VSMOW d45 d46 d47 d48 d49 d13C_VPDB d18O_VSMOW D47raw D48raw D49raw D47
––– ––––––––– –––––––––– ––––––––––– –––––––––––– ––––––––– ––––––––– –––––––––– –––––––––– –––––––– –––––––––– –––––––––– ––––––––– ––––––––– –––––––––– ––––––––
A01 mySession ETH-1 -3.807 24.921 5.795020 11.627670 16.893510 24.567080 0.794860 2.014086 37.041843 -0.574686 1.149684 -27.690250 0.214454
A02 mySession MYSAMPLE-1 -3.807 24.921 6.219070 11.491070 17.277490 24.582700 1.563180 2.476827 36.898281 -0.499264 1.435380 -27.122614 0.299589
A03 mySession ETH-2 -3.807 24.921 -6.058680 -4.817180 -11.635060 -10.325780 0.613520 -10.166796 19.907706 -0.685979 -0.721617 16.716901 0.206693
A04 mySession MYSAMPLE-2 -3.807 24.921 -3.861840 4.941840 0.606120 10.527320 0.571180 -8.159927 30.087230 -0.248531 0.613099 -4.979413 0.658270
A05 mySession ETH-3 -3.807 24.921 5.543650 12.052280 17.405550 25.969190 0.746080 1.727029 37.485567 -0.226150 1.678699 -28.280301 0.613200
A06 mySession ETH-2 -3.807 24.921 -6.067060 -4.877100 -11.699270 -10.644210 1.612340 -10.173599 19.845192 -0.683054 -0.922832 17.861363 0.210328
A07 mySession ETH-1 -3.807 24.921 5.788210 11.559100 16.801910 24.564230 1.479630 2.009281 36.970298 -0.591129 1.282632 -26.888335 0.195926
A08 mySession MYSAMPLE-2 -3.807 24.921 -3.876920 4.868890 0.521850 10.403900 1.070320 -8.173486 30.011134 -0.245768 0.636159 -4.324964 0.661803
––– ––––––––– –––––––––– ––––––––––– –––––––––––– ––––––––– ––––––––– –––––––––– –––––––––– –––––––– –––––––––– –––––––––– ––––––––– ––––––––– –––––––––– ––––––––
```
| PypiClean |
/Mopidy-3.4.1-py3-none-any.whl/mopidy/models/fields.py | import sys
class Field:
"""
Base field for use in
:class:`~mopidy.models.immutable.ValidatedImmutableObject`. These fields
are responsible for type checking and other data sanitation in our models.
For simplicity fields use the Python descriptor protocol to store the
values in the instance dictionary. Also note that fields are mutable if
the object they are attached to allow it.
Default values will be validated with the exception of :class:`None`.
:param default: default value for field
:param type: if set the field value must be of this type
:param choices: if set the field value must be one of these
"""
def __init__(self, default=None, type=None, choices=None):
self._name = None # Set by ValidatedImmutableObjectMeta
self._choices = choices
self._default = default
self._type = type
if self._default is not None:
self.validate(self._default)
def validate(self, value):
"""Validate and possibly modify the field value before assignment"""
if self._type and not isinstance(value, self._type):
raise TypeError(
f"Expected {self._name} to be a {self._type}, not {value!r}"
)
if self._choices and value not in self._choices:
raise TypeError(
f"Expected {self._name} to be a one of {self._choices}, not {value!r}"
)
return value
def __get__(self, instance, owner):
if not instance:
return self
return getattr(instance, "_" + self._name, self._default)
def __set__(self, instance, value):
if value is not None:
value = self.validate(value)
if value is None or value == self._default:
self.__delete__(instance)
else:
setattr(instance, "_" + self._name, value)
def __delete__(self, instance):
if hasattr(instance, "_" + self._name):
delattr(instance, "_" + self._name)
class String(Field):
"""
Specialized :class:`Field` which is wired up for bytes and unicode.
:param default: default value for field
"""
def __init__(self, default=None):
# TODO: normalize to unicode?
# TODO: only allow unicode?
# TODO: disallow empty strings?
super().__init__(type=str, default=default)
class Date(String):
"""
:class:`Field` for storing ISO 8601 dates as a string.
Supported formats are ``YYYY-MM-DD``, ``YYYY-MM`` and ``YYYY``, currently
not validated.
:param default: default value for field
"""
pass # TODO: make this check for YYYY-MM-DD, YYYY-MM, YYYY using strptime.
class Identifier(String):
"""
:class:`Field` for storing values such as GUIDs or other identifiers.
Values will be interned.
:param default: default value for field
"""
def validate(self, value):
value = super().validate(value)
if isinstance(value, bytes):
value = value.decode()
return sys.intern(value)
class URI(Identifier):
"""
:class:`Field` for storing URIs
Values will be interned, currently not validated.
:param default: default value for field
"""
pass # TODO: validate URIs?
class Integer(Field):
"""
:class:`Field` for storing integer numbers.
:param default: default value for field
:param min: field value must be larger or equal to this value when set
:param max: field value must be smaller or equal to this value when set
"""
def __init__(self, default=None, min=None, max=None):
self._min = min
self._max = max
super().__init__(type=int, default=default)
def validate(self, value):
value = super().validate(value)
if self._min is not None and value < self._min:
raise ValueError(
f"Expected {self._name} to be at least {self._min}, not {value:d}"
)
if self._max is not None and value > self._max:
raise ValueError(
f"Expected {self._name} to be at most {self._max}, not {value:d}"
)
return value
class Boolean(Field):
"""
:class:`Field` for storing boolean values
:param default: default value for field
"""
def __init__(self, default=None):
super().__init__(type=bool, default=default)
class Collection(Field):
"""
:class:`Field` for storing collections of a given type.
:param type: all items stored in the collection must be of this type
:param container: the type to store the items in
"""
def __init__(self, type, container=tuple):
super().__init__(type=type, default=container())
def validate(self, value):
if isinstance(value, str):
raise TypeError(
f"Expected {self._name} to be a collection of "
f"{self._type.__name__}, not {value!r}"
)
for v in value:
if not isinstance(v, self._type):
raise TypeError(
f"Expected {self._name} to be a collection of "
f"{self._type.__name__}, not {value!r}"
)
return self._default.__class__(value) or None | PypiClean |
/CleanAdminDjango-1.5.3.1.tar.gz/CleanAdminDjango-1.5.3.1/django/utils/unittest/case.py |
import sys
import difflib
import pprint
import re
import unittest
import warnings
from django.utils.unittest import result
from django.utils.unittest.util import\
safe_repr, safe_str, strclass,\
unorderable_list_difference
from django.utils.unittest.compatibility import wraps
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
# can't use super because Python 2.4 exceptions are old style
Exception.__init__(self)
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
@wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"%s not raised" % (exc_name,))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class _TypeEqualityDict(object):
def __init__(self, testcase):
self.testcase = testcase
self._store = {}
def __setitem__(self, key, value):
self._store[key] = value
def __getitem__(self, key):
value = self._store[key]
if isinstance(value, basestring):
return getattr(self.testcase, value)
return value
def get(self, key, default=None):
if key in self._store:
return self[key]
return default
class TestCase(unittest.TestCase):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = True
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" % \
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = _TypeEqualityDict(self)
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(unicode, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("Use of a TestResult without an addSkip method is deprecated",
DeprecationWarning, 2)
result.addSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
self.setUp()
except SkipTest as e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
try:
testMethod()
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure as e:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn("Use of a TestResult without an addExpectedFailure method is deprecated",
DeprecationWarning)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("Use of a TestResult without an addUnexpectedSuccess method is deprecated",
DeprecationWarning)
result.addFailure(self, sys.exc_info())
except SkipTest as e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
success = True
try:
self.tearDown()
except Exception:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
result = self._resultForDoCleanups
ok = True
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
try:
function(*args, **kwargs)
except Exception:
ok = False
result.addError(self, sys.exc_info())
return ok
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"Fail the test if the expression is true."
if expr:
msg = self._formatMessage(msg, "%s is not False" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Fail the test unless the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not True" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_str(standardMsg), safe_str(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
if callableObj is None:
return _AssertRaisesContext(excClass, self)
try:
callableObj(*args, **kwargs)
except excClass:
return
if hasattr(excClass,'__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise self.failureException("%s not raised" % excName)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
# Synonyms for assertion methods
# The plurals are undocumented. Keep them that way to discourage use.
# Do not add more. Do not remove.
# Going through a deprecation cycle on these would annoy many people.
assertEquals = assertEqual
assertNotEquals = assertNotEqual
assertAlmostEquals = assertAlmostEqual
assertNotAlmostEquals = assertNotAlmostEqual
assert_ = assertTrue
# These fail* assertion method names are pending deprecation and will
# be a DeprecationWarning in 3.2; http://bugs.python.org/issue2578
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = _deprecate(assertEqual)
failIfEqual = _deprecate(assertNotEqual)
failUnlessAlmostEqual = _deprecate(assertAlmostEqual)
failIfAlmostEqual = _deprecate(assertNotAlmostEqual)
failUnless = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
def assertSequenceEqual(self, seq1, seq2,
msg=None, seq_type=None, max_diff=80*8):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
max_diff: Maximum size off the diff, larger diffs are not shown
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = repr(seq1)
seq2_repr = repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support
different types of sets, and is optimized for sets specifically
(parameters must support a difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1), safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertTrue(isinstance(d1, dict), 'First argument is not a dictionary')
self.assertTrue(isinstance(d2, dict), 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.iteritems():
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
expected_seq and actual_seq contain the same elements. It is
the equivalent of::
self.assertEqual(sorted(expected_seq), sorted(actual_seq))
Raises with an error message listing which elements of expected_seq
are missing from actual_seq and vice versa if any.
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
try:
expected = sorted(expected_seq)
actual = sorted(actual_seq)
except TypeError:
# Unsortable items (example: set(), complex(), ...)
expected = list(expected_seq)
actual = list(actual_seq)
missing, unexpected = unorderable_list_difference(
expected, actual, ignore_duplicate=False
)
else:
return self.assertSequenceEqual(expected, actual, msg=msg)
errors = []
if missing:
errors.append('Expected, but missing:\n %s' %
safe_repr(missing))
if unexpected:
errors.append('Unexpected, but present:\n %s' %
safe_repr(unexpected))
if errors:
standardMsg = '\n'.join(errors)
self.fail(self._formatMessage(msg, standardMsg))
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertTrue(isinstance(first, basestring), (
'First argument is not a string'))
self.assertTrue(isinstance(second, basestring), (
'Second argument is not a string'))
if first != second:
standardMsg = '%s != %s' % (safe_repr(first, True), safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(first.splitlines(True),
second.splitlines(True)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
Args:
expected_exception: Exception class expected to be raised.
expected_regexp: Regexp (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
if callable_obj is None:
return _AssertRaisesContext(expected_exception, self, expected_regexp)
try:
callable_obj(*args, **kwargs)
except expected_exception as exc_value:
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
else:
if hasattr(expected_exception, '__name__'):
excName = expected_exception.__name__
else:
excName = str(expected_exception)
raise self.failureException("%s not raised" % excName)
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, basestring):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s testFunc=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None | PypiClean |
/IETK-Ret-0.1.1.tar.gz/IETK-Ret-0.1.1/ietk/methods/illuminate_sharpen.py | import numpy as np
import cv2
import scipy as sp
from dehaze import get_dark_channel
from ietk import methods
from ietk import util
def reshape_A(A, I_shape):
if np.shape(A) == (): # scalar
A = np.reshape(A, (1,1,1))
elif np.shape(A) == (3,): # rgb pixel color
A = np.reshape(A, (1,1,3))
elif np.shape(A) == (I_shape[0], I_shape[1]): # gray img
A = np.reshape(A, I_shape[0], I_shape[1], 1)
else: # full channel img
assert A.shape == I_shape
return A
def reshape_t(t, I_shape):
sh = np.shape(t)
if sh == (): # scalar
t = np.reshape(t, (1,1,1))
else:
assert t.shape == I_shape[:2]
t = t.reshape(*t.shape, 1)
return t
def illuminate_sharpen(
I, ill_dark_channel_filter_size=50, ill_guided_filter_radius=100,
ill_guided_eps=1e-8, ill_A=1, sh_t=0.20, sh_blur_radius=75, sh_blur_guided_eps=1e-8):
"""
Simultaneously Illuminate and Sharpen an image.
Not equivalent to sharpen(illuminate(img)), since computes J in one pass
and sharpening computes A=blur(I) rather than A=blur(illuminated(I)). This
ends up having quite different results!
`I` - a [0,1] normalized image of shape (h,w,ch)
illumination hyperparams:
`ill_A` - atmosphere, usually (1,1,1) - rgb color (r,g,b) or an array of shape (h,w,ch)
ill_dark_channel_filter_size
ill_guided_filter_radius
ill_guided_eps
Sharpen hyperparams:
sh_t - the transmission map for sharpening. how quickly to amplify differences.
sh_blur_radius
sh_blur_guided_eps
"""
A1 = reshape_A(ill_A, I.shape)
assert A1.max() <= 1 and A1.min() > 0
t_unrefined = get_dark_channel(
(1-I) / ill_A, filter_size=ill_dark_channel_filter_size)
t1 = 1 - cv2.ximgproc.guidedFilter(
# 1-I.astype('float32'), # same
I.astype('float32'), t_unrefined.astype('float32'),
ill_guided_filter_radius, ill_guided_eps)
t1 = t1.clip(.00001, 1)
t1 = reshape_t(t1, I.shape)
# J1 = 1 - ((1-I - ill_A)/t1 + A1)
# return J1
# TODO: sharpen
# blurring, faster than ndi.gaussian_filter(I)
# kernel = np.outer(*([sp.stats.norm.pdf(np.linspace(-1, 1, sh_blur_radius), 0, .7)]*2))
# A2 = sp.signal.fftconvolve(I/t1, kernel[:,:,np.newaxis], axes=(0,1))
A2 = cv2.ximgproc.guidedFilter(
# radiance.astype('float32'),
(I).astype('float32'),
(I).astype('float32'),
sh_blur_radius, sh_blur_guided_eps)
t2 = reshape_t(sh_t, I.shape)
# dz = np.array(np.gradient(I))/2+.5
# dzn = (dz - dz.min((1,2), keepdims=True)) / dz.ptp((1,2), keepdims=True)
# t2 = reshape_t(dzn.mean((0,3)), I.shape)
# J2 = (I - A2*(1-t2))/t2
# Jb = 1/t2 + (A1-(1-I))/(t1*t2) - A1/t2 - A2/t2 + A2
# Jb2 = util.norm01(Jb, bg).clip(0,1)
# Jb = 1/t2 + (A1-(1-I))/(t1*t2) - A1/t2 - A2/t2 + A2
Jb = ((I-(1-A1))/t1 +(1-A1)-A2)/t2 + A2
Jb2 = util.norm01(Jb, bg).clip(0,1)
# average with image
# Jc = ((util.norm01(Jb, bg) + img)/2).clip(0,1)
# Jc /= min(Jc.max(), I.max())
Jc = Jb2/2 + I/2 # TODO: jb2/2
Jc2 = util.norm01(Jc, bg)
# geometric avg with image (better looking)
# Jc = np.sqrt(Jb.clip(0, 100)*I)
# Jc2 = util.norm01(Jc, bg).clip(0,1)
best = methods.sharpen(Jc2, focus_region=~bg)
paper = methods.illuminate_sharpen(I, focus_region=~bg)
globals().update(locals())
return best
# sigmoid average
# import matplotlib.colors as C
# Jd = C.rgb_to_hsv(I)
# z = Jb[:,:,2].copy()
# z = ndi.maximum_filter(z, size=3) - ndi.minimum_filter(z, size=3)
# z2 = C.rgb_to_hsv(Jb2)[:,:,2]
# z2 = ndi.maximum_filter(z2, size=15) - ndi.minimum_filter(z2, size=15)
# D = z - z2
# # D = Jb2 - I
# b = -D.min() / D.ptp()
# D = (D-D.min())/D.ptp()
# a = 5
# print(b)
# _W = 1/(1+np.exp(-1.0*a*(D - b)))
# W = (_W - _W.min()) / _W.ptp()
# Jd[:,:,2] = (W) * z + (1-W) * z2
# Jd = C.hsv_to_rgb(Jd)
# # Jd = ((util.norm01(Jb, bg) + img)/2).clip(0,1)
# # sharpen illuminate
# # Jd = 1 - A1 - 1/t1+(I-A2)/(t2*t1) + A2/t1 + A1/t1
# plt.figure() ; plt.imshow(Jd)
# plt.figure() ; plt.imshow(Jc)
# # illuminate sharpen, I and A swapped
# Je = 1/t2 + (A1-(1-A2))/(t1*t2) - A1/t2 - I/t2 + I
# Je2 = util.norm01(Je, bg).clip(0,1)
globals().update(locals())
print(Jb.max(), Jb.min(), Je.max(), Je.min())
# return Jb, Jc, Je # TODO
if __name__ == "__main__":
from matplotlib import pyplot as plt
from ietk.data import IDRiD
from ietk import util
# I = np.dstack([(np.outer(*[np.exp(-np.linspace(0, 1, 1000))]*2))]*3)
# bg = np.zeros_like(I, dtype='bool')
# z = illuminate_sharpen(I)
# import sys ; sys.exit()
def sh(I, t=.20): # custom sharpen debugging
A2 = cv2.ximgproc.guidedFilter(
# radiance.astype('float32'),
(I).astype('float32'),
(I).astype('float32'),
sh_blur_radius, sh_blur_guided_eps)
# kernel = np.outer(*([sp.stats.norm.pdf(np.linspace(-1, 1, sh_blur_radius), 0, .7)]*2))
# A2 = sp.signal.fftconvolve(I/t1, reshape_A(kernel))
return (I-A2)/t + A2
dset = IDRiD('./data/IDRiD_segmentation')
img, labels = dset['IDRiD_25']
# he = labels['HE']
# ma = labels['MA']
# ex = labels['EX']
# se = labels['SE']
# od = labels['OD']
# set background pure black.
bg = util.get_background(img)
img[bg] = 0
best = illuminate_sharpen(img)
illum = methods.illuminate_dcp(I, focus_region=~bg)
plt.figure(1) ; plt.imshow(sh(illum))
plt.figure(2) ; plt.imshow(sh(Jc2, .2))
# best = competing_methods.sharpen(Jc2, focus_region=~bg)
import sys ; sys.exit()
plt.figure() ; plt.imshow(img)
plt.figure() ; plt.imshow(best)
plt.figure() ; plt.imshow(util.norm01(best, bg))
import sys ; sys.exit()
from sharpen_img import sharpen
# shar
# sharpen(util.norm01(np.sqrt(Jb.clip(0, 100)*I), bg),
plt.figure() ; plt.imshow(best2);
z = (img - Je) / .8 + Je
plt.imshow(z);
plt.imshow(Jd)
plt.savefig('data/test_sharp_ill.png')
import sys ; sys.exit()
import mpl_toolkits.axes_grid1
f = plt.figure(figsize=(20,20))
axs = mpl_toolkits.axes_grid1.axes_grid.ImageGrid(f, 111, (2, 3))
axs[0].imshow(img)
axs[1].imshow(util.norm01(Jb, bg).clip(0,1))
axs[2].imshow(Jc/Jc.max())
axs[3].imshow(methods.illuminate_sharpen(img, focus_region=~bg))
JS = methods.sharpen(Jc/Jc.max(), focus_region=~bg)
axs[4].imshow(JS)
axs[5].imshow(util.norm01(methods.illuminate_dcp(img, focus_region=~bg), bg))
f.savefig('data/test_illuminate_sharpen_atonce.png')
# # J2 = competing_methods.illuminate_dcp(img, focus_region=util.get_foreground(img))
# # print(np.sqrt(np.sum(np.abs(J.clip(0,1) - J2))))
# # z = competing_methods.illuminate_dcp(img, focus_region=~bg)
# # z = competing_methods.sharpen(util.norm01(z), focus_region=~bg)
# # plt.figure(); plt.imshow(util.norm01(z, bg))
# # z = competing_methods.sharpen(img, focus_region=~bg)
# # z = competing_methods.illuminate_dcp(util.norm01(z, bg), focus_region=~bg)
# # plt.figure(); plt.imshow(util.norm01(z)) | PypiClean |
/Distribution_Waed-0.1.tar.gz/Distribution_Waed-0.1/Distribution_Waed/Gaussiandistribution.py | import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | PypiClean |
/FuzzyClassificator-1.3.84-py3-none-any.whl/pybrain/structure/networks/mdrnn.py | __author__ = 'Justin S Bayer, bayer.justin@googlemail.com'
__version__ = '$Id$'
import operator
import scipy
try:
from arac.pybrainbridge import _FeedForwardNetwork #@UnresolvedImport
except:
_FeedForwardNetwork = object
from pybrain.structure.modules.mdrnnlayer import MdrnnLayer
from pybrain.structure import LinearLayer
from pybrain.structure.connections.permutation import PermutationConnection
from pybrain.utilities import crossproduct, permute, permuteToBlocks
class _Mdrnn(_FeedForwardNetwork):
def __init__(self, timedim, shape,
hiddendim, outsize, blockshape=None, name=None,
inlayerclass=LinearLayer, outlayerclass=LinearLayer):
super(_Mdrnn, self).__init__()
# Initialize necessary member variables
self.timedim = timedim
self.shape = shape
self.hiddendim = hiddendim
self.outsize = outsize
self.blockshape = blockshape
self.indim = reduce(operator.mul, shape, 1)
self.blocksize = reduce(operator.mul, blockshape, 1)
self.sequenceLength = self.indim / self.blocksize
self.inlayerclass = inlayerclass
self.outlayerclass = outlayerclass
# Build up topology
self._buildTopology()
def _makeMdrnnLayer(self):
"""Return an MdrnnLayer suitable for this network."""
return MdrnnLayer(self.timedim, self.shape, self.hiddendim,
self.outsize, self.blockshape)
def _standardPermutation(self):
"""Return the permutation of input data that is suitable for this
network."""
# TODO: include blockpermute here
return scipy.array(range(self.sequenceLength))
def _buildTopology(self):
inlayer = self.inlayerclass(self.indim)
outlayer = self.outlayerclass(self.sequenceLength * self.outsize)
self.hiddenlayers = []
# Add connections and layers
self.addInputModule(inlayer)
for p in self._permsForSwiping():
i = self._makeMdrnnLayer()
self.hiddenlayers.append(i)
# Make a connection that permutes the input...
in_pc = PermutationConnection(inlayer, i, p, self.blocksize)
# .. and one that permutes it back.
pinv = permute(range(len(p)), p)
out_pc = PermutationConnection(i, outlayer, pinv, self.outsize)
self.addModule(i)
self.addConnection(in_pc)
self.addConnection(out_pc)
self.addOutputModule(outlayer)
def _permsForSwiping(self):
"""Return the correct permutations of blocks for all swiping direction.
"""
# We use an identity permutation to generate the permutations from by
# slicing correctly.
return [self._standardPermutation()]
def activate(self, inpt):
inpt.shape = self.shape
inpt_ = permuteToBlocks(inpt, self.blockshape)
inpt.shape = scipy.size(inpt),
return super(_Mdrnn, self).activate(inpt_)
def filterResult(self, inpt):
return inpt
class _MultiDirectionalMdrnn(_Mdrnn):
def _permsForSwiping(self):
"""Return the correct permutations of blocks for all swiping direction.
"""
# We use an identity permutation to generate the permutations from by
# slicing correctly.
identity = scipy.array(range(self.sequenceLength))
identity.shape = tuple(s / b for s, b in zip(self.shape, self.blockshape))
permutations = []
# Loop over all possible directions: from each corner to each corner
for direction in crossproduct([('+', '-')] * self.timedim):
axises = []
for _, axisdir in enumerate(direction):
# Use a normal complete slice for forward...
if axisdir == '+':
indices = slice(None, None, 1)
# ...and a reversed complete slice for backward
else:
indices = slice(None, None, -1)
axises.append(indices)
permutations.append(operator.getitem(identity, axises).flatten())
return permutations
class _AccumulatingMdrnn(_Mdrnn):
def activate(self, inpt):
res = super(_AccumulatingMdrnn, self).activate(inpt)
res.shape = self.outsize, self.indim
res = res.sum() | PypiClean |
/DNBC4tools-2.1.0.tar.gz/DNBC4tools-2.1.0/dnbc4tools/rna/run.py | import os,collections
import argparse
from dnbc4tools.tools.utils import str_mkdir,judgeFilexits,change_path,read_json,logging_call
from dnbc4tools.__init__ import __root_dir__
class Runpipe:
def __init__(self, args):
self.name = args.name
self.cDNAr1 = args.cDNAfastq1
self.cDNAr2 = args.cDNAfastq2
self.oligor1 = args.oligofastq1
self.oligor2 = args.oligofastq2
self.genomeDir = os.path.abspath(args.genomeDir)
self.outdir = os.path.abspath(args.outdir)
self.threads = args.threads
self.chemistry = args.chemistry
self.darkreaction = args.darkreaction
self.customize = args.customize
self.calling_method = args.calling_method
self.expectcells = args.expectcells
self.forcecells = args.forcecells
self.process = args.process
self.no_introns = args.no_introns
self.minumi = args.minumi
self.outunmappedreads = args.outunmappedreads
def runpipe(self):
change_path()
genomeDir = os.path.abspath(self.genomeDir)
judgeFilexits('%s/ref.json'%genomeDir)
indexConfig = read_json('%s/ref.json'%genomeDir)
gtf = indexConfig['gtf']
judgeFilexits(self.cDNAr1,self.cDNAr2,self.oligor1,self.oligor2,self.genomeDir,gtf)
# data_cmd = ['dnbc4rna data --cDNAfastq1 %s --cDNAfastq2 %s --oligofastq1 %s --oligofastq2 %s --threads %s --name %s --chemistry %s --darkreaction %s --outdir %s --genomeDir %s'
# %(self.cDNAr1,self.cDNAr2,self.oligor1,self.oligor2,self.threads,self.name,self.chemistry,self.darkreaction,self.outdir,self.genomeDir)]
data_cmd = [
"dnbc4rna data",
f"--cDNAfastq1 {self.cDNAr1}",
f"--cDNAfastq2 {self.cDNAr2}",
f"--oligofastq1 {self.oligor1}",
f"--oligofastq2 {self.oligor2}",
f"--threads {self.threads}",
f"--name {self.name}",
f"--chemistry {self.chemistry}",
f"--darkreaction {self.darkreaction}",
f"--outdir {self.outdir}",
f"--genomeDir {self.genomeDir}"
]
if self.customize:
data_cmd += ['--customize %s'%self.customize]
if self.no_introns:
data_cmd += ['--no_introns']
if self.outunmappedreads:
data_cmd += ['--outunmappedreads']
data_cmd = ' '.join(data_cmd)
count_cmd = 'dnbc4rna count --name %s --calling_method %s --expectcells %s --forcecells %s --minumi %s --threads %s --outdir %s'\
%(self.name,self.calling_method,self.expectcells,self.forcecells,self.minumi,self.threads,self.outdir)
analysis_cmd = 'dnbc4rna analysis --name %s --outdir %s --genomeDir %s'\
%(self.name,self.outdir,self.genomeDir)
report_cmd = ['dnbc4rna report --name %s --genomeDir %s --outdir %s --threads %s'
%(self.name,self.genomeDir,self.outdir,self.threads)]
if self.no_introns:
report_cmd += ['--no_introns']
report_cmd = ' '.join(report_cmd)
pipelist = str(self.process).split(',')
for pipe in pipelist:
if pipe not in ['data','count','analysis','report','']:
print('\033[0;31;40mUnable to recognize pipe!\033[0m')
raise Exception('Unable to recognize pipe!')
cmdlist = collections.OrderedDict()
if 'data' in pipelist:
cmdlist['data'] = data_cmd
if 'count' in pipelist:
cmdlist['count'] = count_cmd
if 'analysis' in pipelist:
cmdlist['analysis'] = analysis_cmd
if 'report' in pipelist:
cmdlist['report'] = report_cmd
str_mkdir('%s/log'%os.path.join(self.outdir,self.name))
for pipe,pipecmd in cmdlist.items():
logging_call(pipecmd,pipe,os.path.join(self.outdir,self.name))
def run(args):
Runpipe(args).runpipe()
def helpInfo_run(parser):
parser.add_argument(
'--name',
metavar='STR',
help='Sample name.',
type=str,
required=True
)
parser.add_argument(
'--cDNAfastq1',
metavar='FASTQ',
help='Paths to the raw R1 fastq files of cDNA library.',
required=True
)
parser.add_argument(
'--cDNAfastq2',
metavar='FASTQ',
help='Paths to the raw R2 fastq files of cDNA library.',
required=True
)
parser.add_argument(
'--oligofastq1',
metavar='FASTQ',
help='Paths to the raw R1 fastq files of oligo library.',
required=True
)
parser.add_argument(
'--oligofastq2',
metavar='FASTQ',
help='Paths to the raw R2 fastq files of oligo library.',
required=True
)
parser.add_argument(
'--genomeDir',
type=str,
metavar='PATH',
help='Path to the directory where genome files are stored.',
required=True
)
parser.add_argument(
'--outdir',
metavar='PATH',
help='Output directory, [default: current directory].',
default=os.getcwd()
)
parser.add_argument(
'--threads',
type=int,
metavar='INT',
default=4,
help='Number of threads used for analysis, [default: 4].'
)
parser.add_argument(
'--calling_method',
metavar='STR',
choices=["barcoderanks","emptydrops"],
help='Cell calling method, choose from barcoderanks and emptydrops, [default: emptydrops].',
default='emptydrops'
)
parser.add_argument(
'--expectcells',
metavar='INT',
help='Expected number of recovered beads, [default: 3000].',
default=3000
)
parser.add_argument(
'--forcecells',
metavar='INT',
help='Force pipeline to use this number of beads.',
default=0
)
parser.add_argument(
'--minumi',
metavar='INT',
help=argparse.SUPPRESS,
default=1000
)
parser.add_argument(
'--chemistry',
metavar='STR',
choices=["scRNAv1HT","scRNAv2HT","auto"],
help='Chemistry version. Automatic detection is recommended , [default: auto].',
default='auto'
)
parser.add_argument(
'--darkreaction',
metavar='STR',
help='Sequencing dark cycles. Automatic detection is recommended, [default: auto].',
default='auto'
)
parser.add_argument(
'--customize',
metavar='STR',
help='Customize files for whitelist and readstructure in JSON format for cDNA and oligo.'
)
parser.add_argument(
'--process',
metavar='STR',
help='Custom analysis steps enable the skipping of unnecessary steps, [default: data,count,analysis,report].',
type=str,
default='data,count,analysis,report'
)
parser.add_argument(
'--no_introns',
action='store_true',
help='Intron reads are not included in the expression matrix.'
)
parser.add_argument(
'--outunmappedreads',
action='store_true',
help=argparse.SUPPRESS,
)
return parser | PypiClean |
/Bubot_AdminPanel-0.0.2-py3-none-any.whl/BubotObj/OcfDevice/subtype/AdminPanel/static/ui/js/chunk-194d1552.b563dbf4.js | (window["webpackJsonp"]=window["webpackJsonp"]||[]).push([["chunk-194d1552"],{"0fd9":function(t,e,n){"use strict";n("99af"),n("4160"),n("caad"),n("13d5"),n("4ec9"),n("b64b"),n("d3b7"),n("ac1f"),n("2532"),n("3ca3"),n("5319"),n("159b"),n("ddb0");var a=n("ade3"),i=n("5530"),s=(n("4b85"),n("2b0e")),r=n("d9f7"),o=n("80d2"),l=["sm","md","lg","xl"],c=["start","end","center"];function u(t,e){return l.reduce((function(n,a){return n[t+Object(o["D"])(a)]=e(),n}),{})}var d=function(t){return[].concat(c,["baseline","stretch"]).includes(t)},h=u("align",(function(){return{type:String,default:null,validator:d}})),f=function(t){return[].concat(c,["space-between","space-around"]).includes(t)},v=u("justify",(function(){return{type:String,default:null,validator:f}})),b=function(t){return[].concat(c,["space-between","space-around","stretch"]).includes(t)},p=u("alignContent",(function(){return{type:String,default:null,validator:b}})),m={align:Object.keys(h),justify:Object.keys(v),alignContent:Object.keys(p)},g={align:"align",justify:"justify",alignContent:"align-content"};function y(t,e,n){var a=g[t];if(null!=n){if(e){var i=e.replace(t,"");a+="-".concat(i)}return a+="-".concat(n),a.toLowerCase()}}var O=new Map;e["a"]=s["a"].extend({name:"v-row",functional:!0,props:Object(i["a"])(Object(i["a"])(Object(i["a"])({tag:{type:String,default:"div"},dense:Boolean,noGutters:Boolean,align:{type:String,default:null,validator:d}},h),{},{justify:{type:String,default:null,validator:f}},v),{},{alignContent:{type:String,default:null,validator:b}},p),render:function(t,e){var n=e.props,i=e.data,s=e.children,o="";for(var l in n)o+=String(n[l]);var c=O.get(o);return c||function(){var t,e;for(e in c=[],m)m[e].forEach((function(t){var a=n[t],i=y(e,t,a);i&&c.push(i)}));c.push((t={"no-gutters":n.noGutters,"row--dense":n.dense},Object(a["a"])(t,"align-".concat(n.align),n.align),Object(a["a"])(t,"justify-".concat(n.justify),n.justify),Object(a["a"])(t,"align-content-".concat(n.alignContent),n.alignContent),t)),O.set(o,c)}(),t(n.tag,Object(r["a"])(i,{staticClass:"row",class:c}),s)}})},"16b7":function(t,e,n){"use strict";n("a9e3");var a=n("2b0e");e["a"]=a["a"].extend().extend({name:"delayable",props:{openDelay:{type:[Number,String],default:0},closeDelay:{type:[Number,String],default:0}},data:function(){return{openTimeout:void 0,closeTimeout:void 0}},methods:{clearDelay:function(){clearTimeout(this.openTimeout),clearTimeout(this.closeTimeout)},runDelay:function(t,e){var n=this;this.clearDelay();var a=parseInt(this["".concat(t,"Delay")],10);this["".concat(t,"Timeout")]=setTimeout(e||function(){n.isActive={open:!0,close:!1}[t]},a)}}})},"22da":function(t,e,n){"use strict";var a=n("490a");e["a"]=a["a"]},3978:function(t,e,n){"use strict";n.r(e);var a=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("v-card",{staticClass:"pa-0 ma-0",attrs:{flat:""}},[n("v-card-actions",{staticClass:"pa-0 pb-1"},[n("div",{staticClass:"pa-0 ma-0 pt-4"},[n("v-btn",{attrs:{icon:"",dense:""},on:{click:function(e){t.show=!t.show}}},[n("v-icon",[t._v(t._s(t.show?"mdi-minus-box-outline":"mdi-plus-box-outline"))])],1)],1),n("v-text-field",{attrs:{label:(t.schema.title||t.elemName)+" ["+t.elemValue.length+"]",placeholder:t.schema["description"],flat:"","hide-details":"",disabled:"",value:t.title}}),t.readOnly?t._e():n("div",{staticClass:"pa-0 ma-0 pt-4"},[t.show?n("v-btn",{attrs:{dense:"",icon:"",disabled:t.readOnly}},[n("v-icon",[t._v(t._s("mdi-plus-circle"))])],1):t._e()],1)],1),n("v-expand-transition",[n("v-card",{directives:[{name:"show",rawName:"v-show",value:t.show,expression:"show"}],staticClass:"ml-4 pb-1 pl-2",staticStyle:{"border-left":"1px solid var(--v-delimiter-base)"},attrs:{flat:"",tile:""}},t._l(t.elemValue,(function(e,a){return n("span",{key:""+t.path+t.delimiter+a},[n("v-hover",{scopedSlots:t._u([{key:"default",fn:function(i){var s=i.hover;return[n("v-row",{staticClass:"pl-2",staticStyle:{"flex-wrap":"nowrap"},attrs:{"no-gutters":""}},[n("JsonElem",{staticClass:"flex-grow-1 flex-shrink-0",staticStyle:{"min-width":"100px","max-width":"100%"},attrs:{"elem-value":e,"elem-name":"",schema:t.schema.items,path:(t.path?t.path+t.delimiter:"")+a,"input-listeners":t.inputListeners,"array-elem":!0,level:t.level+1,"read-only":t.readOnly?t.readOnly:t.schema.readOnly,"hide-read-only":t.hideReadOnly}}),t.readOnly?t._e():n("v-btn",{staticClass:"flex-grow-0 flex-shrink-1",attrs:{dense:"",icon:"",disabled:t.readOnly}},[s?n("v-icon",{on:{click:function(e){return t.deleteItem(a)}}},[t._v(" "+t._s("mdi-delete")+" ")]):t._e()],1)],1)]}}],null,!0)})],1)})),0)],1)],1)},i=[],s=(n("a9e3"),n("d3b7"),{components:{JsonElem:function(){return Promise.resolve().then(n.bind(null,"3c48"))}},props:{schema:Object,elemValue:Array,elemName:String,path:String,inputListeners:Object,arrayElem:Boolean,level:Number,readOnly:Boolean,hideReadOnly:Boolean},data:function(){return{show:!1,delimiter:"."}},computed:{arrayLen:function(){return this.elemValue.length},title:function(){var t="";if(!this.elemValue)return"";for(var e="",n=0;n<=Math.min(this.elemValue.length-1,1);n++){switch(this.schema.items.type){case"string":e=this.elemValue[n];break;case"object":e=this.elemValue[n][this.schema.items["titleField"]||"title"];break;default:return""}t&&(t+=", "),t+=e}return this.elemValue.length>2&&(t+=", ..."),t}},methods:{deleteItem:function(t){this.$emit("action",{name:"UpdateProp",data:{action:"delete",path:this.path,value:t}})}}}),r=s,o=(n("95ac"),n("2877")),l=n("6544"),c=n.n(l),u=n("8336"),d=n("b0af"),h=n("99d9"),f=n("0789"),v=n("ce87"),b=n("132d"),p=n("0fd9"),m=n("8654"),g=Object(o["a"])(r,a,i,!1,null,"357f6692",null);e["default"]=g.exports;c()(g,{VBtn:u["a"],VCard:d["a"],VCardActions:h["a"],VExpandTransition:f["a"],VHover:v["a"],VIcon:b["a"],VRow:p["a"],VTextField:m["a"]})},"490a":function(t,e,n){"use strict";n("99af"),n("a9e3"),n("8d4f");var a=n("a9ad"),i=n("80d2");e["a"]=a["a"].extend({name:"v-progress-circular",props:{button:Boolean,indeterminate:Boolean,rotate:{type:[Number,String],default:0},size:{type:[Number,String],default:32},width:{type:[Number,String],default:4},value:{type:[Number,String],default:0}},data:function(){return{radius:20}},computed:{calculatedSize:function(){return Number(this.size)+(this.button?8:0)},circumference:function(){return 2*Math.PI*this.radius},classes:function(){return{"v-progress-circular--indeterminate":this.indeterminate,"v-progress-circular--button":this.button}},normalizedValue:function(){return this.value<0?0:this.value>100?100:parseFloat(this.value)},strokeDashArray:function(){return Math.round(1e3*this.circumference)/1e3},strokeDashOffset:function(){return(100-this.normalizedValue)/100*this.circumference+"px"},strokeWidth:function(){return Number(this.width)/+this.size*this.viewBoxSize*2},styles:function(){return{height:Object(i["g"])(this.calculatedSize),width:Object(i["g"])(this.calculatedSize)}},svgStyles:function(){return{transform:"rotate(".concat(Number(this.rotate),"deg)")}},viewBoxSize:function(){return this.radius/(1-Number(this.width)/+this.size)}},methods:{genCircle:function(t,e){return this.$createElement("circle",{class:"v-progress-circular__".concat(t),attrs:{fill:"transparent",cx:2*this.viewBoxSize,cy:2*this.viewBoxSize,r:this.radius,"stroke-width":this.strokeWidth,"stroke-dasharray":this.strokeDashArray,"stroke-dashoffset":e}})},genSvg:function(){var t=[this.indeterminate||this.genCircle("underlay",0),this.genCircle("overlay",this.strokeDashOffset)];return this.$createElement("svg",{style:this.svgStyles,attrs:{xmlns:"http://www.w3.org/2000/svg",viewBox:"".concat(this.viewBoxSize," ").concat(this.viewBoxSize," ").concat(2*this.viewBoxSize," ").concat(2*this.viewBoxSize)}},t)},genInfo:function(){return this.$createElement("div",{staticClass:"v-progress-circular__info"},this.$slots.default)}},render:function(t){return t("div",this.setTextColor(this.color,{staticClass:"v-progress-circular",attrs:{role:"progressbar","aria-valuemin":0,"aria-valuemax":100,"aria-valuenow":this.indeterminate?void 0:this.normalizedValue},class:this.classes,style:this.styles,on:this.$listeners}),[this.genSvg(),this.genInfo()])}})},"4b85":function(t,e,n){},"4e82":function(t,e,n){"use strict";n.d(e,"a",(function(){return s}));var a=n("ade3"),i=n("3206");function s(t,e,n){var s=Object(i["a"])(t,e,n).extend({name:"groupable",props:{activeClass:{type:String,default:function(){if(this[t])return this[t].activeClass}},disabled:Boolean},data:function(){return{isActive:!1}},computed:{groupClasses:function(){return this.activeClass?Object(a["a"])({},this.activeClass,this.isActive):{}}},created:function(){this[t]&&this[t].register(this)},beforeDestroy:function(){this[t]&&this[t].unregister(this)},methods:{toggle:function(){this.$emit("change")}}});return s}s("itemGroup")},"615b":function(t,e,n){},8336:function(t,e,n){"use strict";n("4160"),n("caad"),n("c7cd");var a=n("53ca"),i=n("3835"),s=n("5530"),r=(n("86cc"),n("10d2")),o=n("22da"),l=n("4e82"),c=n("f2e7"),u=n("fe6c"),d=n("1c87"),h=n("af2b"),f=n("58df"),v=n("d9bd"),b=Object(f["a"])(r["a"],d["a"],u["a"],h["a"],Object(l["a"])("btnToggle"),Object(c["b"])("inputValue"));e["a"]=b.extend().extend({name:"v-btn",props:{activeClass:{type:String,default:function(){return this.btnToggle?this.btnToggle.activeClass:""}},block:Boolean,depressed:Boolean,fab:Boolean,icon:Boolean,loading:Boolean,outlined:Boolean,retainFocusOnClick:Boolean,rounded:Boolean,tag:{type:String,default:"button"},text:Boolean,tile:Boolean,type:{type:String,default:"button"},value:null},data:function(){return{proxyClass:"v-btn--active"}},computed:{classes:function(){return Object(s["a"])(Object(s["a"])(Object(s["a"])(Object(s["a"])(Object(s["a"])({"v-btn":!0},d["a"].options.computed.classes.call(this)),{},{"v-btn--absolute":this.absolute,"v-btn--block":this.block,"v-btn--bottom":this.bottom,"v-btn--contained":this.contained,"v-btn--depressed":this.depressed||this.outlined,"v-btn--disabled":this.disabled,"v-btn--fab":this.fab,"v-btn--fixed":this.fixed,"v-btn--flat":this.isFlat,"v-btn--icon":this.icon,"v-btn--left":this.left,"v-btn--loading":this.loading,"v-btn--outlined":this.outlined,"v-btn--right":this.right,"v-btn--round":this.isRound,"v-btn--rounded":this.rounded,"v-btn--router":this.to,"v-btn--text":this.text,"v-btn--tile":this.tile,"v-btn--top":this.top},this.themeClasses),this.groupClasses),this.elevationClasses),this.sizeableClasses)},contained:function(){return Boolean(!this.isFlat&&!this.depressed&&!this.elevation)},computedRipple:function(){var t=!this.icon&&!this.fab||{circle:!0};return!this.disabled&&(null!=this.ripple?this.ripple:t)},isFlat:function(){return Boolean(this.icon||this.text||this.outlined)},isRound:function(){return Boolean(this.icon||this.fab)},styles:function(){return Object(s["a"])({},this.measurableStyles)}},created:function(){var t=this,e=[["flat","text"],["outline","outlined"],["round","rounded"]];e.forEach((function(e){var n=Object(i["a"])(e,2),a=n[0],s=n[1];t.$attrs.hasOwnProperty(a)&&Object(v["a"])(a,s,t)}))},methods:{click:function(t){!this.retainFocusOnClick&&!this.fab&&t.detail&&this.$el.blur(),this.$emit("click",t),this.btnToggle&&this.toggle()},genContent:function(){return this.$createElement("span",{staticClass:"v-btn__content"},this.$slots.default)},genLoader:function(){return this.$createElement("span",{class:"v-btn__loader"},this.$slots.loader||[this.$createElement(o["a"],{props:{indeterminate:!0,size:23,width:2}})])}},render:function(t){var e=[this.genContent(),this.loading&&this.genLoader()],n=this.isFlat?this.setTextColor:this.setBackgroundColor,i=this.generateRouteLink(),s=i.tag,r=i.data;return"button"===s&&(r.attrs.type=this.type,r.attrs.disabled=this.disabled),r.attrs.value=["string","number"].includes(Object(a["a"])(this.value))?this.value:JSON.stringify(this.value),t(s,this.disabled?r:n(this.color,r),e)}})},"86cc":function(t,e,n){},"8d4f":function(t,e,n){},"8e1b":function(t,e,n){},"95ac":function(t,e,n){"use strict";var a=n("8e1b"),i=n.n(a);i.a},"99d9":function(t,e,n){"use strict";n.d(e,"a",(function(){return s}));var a=n("b0af"),i=n("80d2"),s=Object(i["h"])("v-card__actions"),r=Object(i["h"])("v-card__subtitle"),o=Object(i["h"])("v-card__text"),l=Object(i["h"])("v-card__title");a["a"]},b0af:function(t,e,n){"use strict";n("0481"),n("4069"),n("a9e3");var a=n("5530"),i=(n("615b"),n("10d2")),s=n("297c"),r=n("1c87"),o=n("58df");e["a"]=Object(o["a"])(s["a"],r["a"],i["a"]).extend({name:"v-card",props:{flat:Boolean,hover:Boolean,img:String,link:Boolean,loaderHeight:{type:[Number,String],default:4},raised:Boolean},computed:{classes:function(){return Object(a["a"])(Object(a["a"])({"v-card":!0},r["a"].options.computed.classes.call(this)),{},{"v-card--flat":this.flat,"v-card--hover":this.hover,"v-card--link":this.isClickable,"v-card--loading":this.loading,"v-card--disabled":this.disabled,"v-card--raised":this.raised},i["a"].options.computed.classes.call(this))},styles:function(){var t=Object(a["a"])({},i["a"].options.computed.styles.call(this));return this.img&&(t.background='url("'.concat(this.img,'") center center / cover no-repeat')),t}},methods:{genProgress:function(){var t=s["a"].options.methods.genProgress.call(this);return t?this.$createElement("div",{staticClass:"v-card__progress",key:"progress"},[t]):null}},render:function(t){var e=this.generateRouteLink(),n=e.tag,a=e.data;return a.style=this.styles,this.isClickable&&(a.attrs=a.attrs||{},a.attrs.tabindex=0),t(n,this.setBackgroundColor(this.color,a),[this.genProgress(),this.$slots.default])}})},ce87:function(t,e,n){"use strict";var a=n("16b7"),i=n("f2e7"),s=n("58df"),r=n("d9bd");e["a"]=Object(s["a"])(a["a"],i["a"]).extend({name:"v-hover",props:{disabled:{type:Boolean,default:!1},value:{type:Boolean,default:void 0}},methods:{onMouseEnter:function(){this.runDelay("open")},onMouseLeave:function(){this.runDelay("close")}},render:function(){return this.$scopedSlots.default||void 0!==this.value?(this.$scopedSlots.default&&(t=this.$scopedSlots.default({hover:this.isActive})),Array.isArray(t)&&1===t.length&&(t=t[0]),t&&!Array.isArray(t)&&t.tag?(this.disabled||(t.data=t.data||{},this._g(t.data,{mouseenter:this.onMouseEnter,mouseleave:this.onMouseLeave})),t):(Object(r["c"])("v-hover should only contain a single element",this),t)):(Object(r["c"])("v-hover is missing a default scopedSlot or bound value",this),null);var t}})},f2e7:function(t,e,n){"use strict";n.d(e,"b",(function(){return s}));var a=n("ade3"),i=n("2b0e");function s(){var t,e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"value",n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"input";return i["a"].extend({name:"toggleable",model:{prop:e,event:n},props:Object(a["a"])({},e,{required:!1}),data:function(){return{isActive:!!this[e]}},watch:(t={},Object(a["a"])(t,e,(function(t){this.isActive=!!t})),Object(a["a"])(t,"isActive",(function(t){!!t!==this[e]&&this.$emit(n,t)})),t)})}var r=s();e["a"]=r}}]);
//# sourceMappingURL=chunk-194d1552.b563dbf4.js.map | PypiClean |
/Magnesium-0.1.1.tar.gz/Magnesium-0.1.1/src/magnesium/prefab/list_of_dicts_prefab.py | from lxml import etree
from .base_prefab import BasePrefab
from magnesium.query_marker import DoubleCurlyQueryMarkerStrategy
from magnesium.path_processor import SimplePathProcessor
from magnesium.path_interpreter import XPathInterpreter
from magnesium.mapping import SimpleMapping
from magnesium.pipeline import (
SchemaPipelineBuilder,
ExtractorPipelineBuilder
)
from magnesium.extractor import XPathExtractor
from magnesium.post_processor import (
SpaceNormalizerProcessor,
TailCutterProcessor,
ListOfDictsProcessor
)
class ListOfDictsPrefab(BasePrefab):
""""""
def __init__(
self,
schema_data=None,
query_marker_strategy=DoubleCurlyQueryMarkerStrategy(),
path_processor=SimplePathProcessor(),
path_interpreter=XPathInterpreter(),
mapping=SimpleMapping(),
extractor_processor=XPathExtractor()
):
""""""
self.query_marker_strategy = query_marker_strategy
self.path_processor = path_processor
self.path_interpreter = path_interpreter
self.mapping = mapping
self.extractor_processor = extractor_processor
self.set_schema_data(schema_data)
def set_schema_data(self, v):
self.schema_data = v
schema_root = etree.fromstring(self.schema_data)
schema_pipeline_builder = SchemaPipelineBuilder()
schema_pipeline_builder.set_query_marker_strategy(
self.query_marker_strategy
).set_text_prop_name(
'text'
).set_path_processor(
self.path_processor
).set_path_interpreter(
self.path_interpreter
).set_mapping(
self.mapping
)
schema_pipeline = schema_pipeline_builder.build()
self.fit_mapping = schema_pipeline.execute(schema_root)
def execute(self, sample_data):
""""""
sample_root = etree.fromstring(sample_data)
space_normalizer_processor = SpaceNormalizerProcessor()
tail_cutter_processor = TailCutterProcessor()
list_of_dicts_processor = ListOfDictsProcessor()
extractor_pipeline_builder = ExtractorPipelineBuilder()
extractor_pipeline_builder.set_mapping(
self.fit_mapping
).set_extractor(
self.extractor_processor
).add_post_processor(
tail_cutter_processor
).add_post_processor(
space_normalizer_processor
).add_post_processor(
list_of_dicts_processor
)
extractor_pipeline = extractor_pipeline_builder.build()
results = extractor_pipeline.execute(sample_root)
return results | PypiClean |
/LFake-18.9.0.tar.gz/LFake-18.9.0/lfake/providers/company/pt_BR/__init__.py | from typing import List
from .. import Provider as CompanyProvider
def company_id_checksum(digits: List[int]) -> List[int]:
digits = list(digits)
weights = 6, 5, 4, 3, 2, 9, 8, 7, 6, 5, 4, 3, 2
dv = sum(w * d for w, d in zip(weights[1:], digits))
dv = (11 - dv) % 11
dv = 0 if dv >= 10 else dv
digits.append(dv)
dv2 = sum(w * d for w, d in zip(weights, digits))
dv2 = (11 - dv2) % 11
dv2 = 0 if dv2 >= 10 else dv2
digits.append(dv2)
return digits[-2:]
class Provider(CompanyProvider):
formats = (
"{{last_name}} {{company_suffix}}",
"{{last_name}} {{last_name}} {{company_suffix}}",
"{{last_name}}",
"{{last_name}}",
)
catch_phrase_formats = ("{{catch_phrase_noun}} {{catch_phrase_verb}} {{catch_phrase_attribute}}",)
nouns = (
"a segurança",
"o prazer",
"o conforto",
"a simplicidade",
"a certeza",
"a arte",
"o poder",
"o direito",
"a possibilidade",
"a vantagem",
"a liberdade",
)
verbs = (
"de conseguir",
"de avançar",
"de evoluir",
"de mudar",
"de inovar",
"de ganhar",
"de atingir seus objetivos",
"de concretizar seus projetos",
"de realizar seus sonhos",
)
attributes = (
"de maneira eficaz",
"mais rapidamente",
"mais facilmente",
"simplesmente",
"com toda a tranquilidade",
"antes de tudo",
"naturalmente",
"sem preocupação",
"em estado puro",
"com força total",
"direto da fonte",
"com confiança",
)
company_suffixes = ("S/A", "S.A.", "Ltda.", "- ME", "- EI", "e Filhos")
def catch_phrase_noun(self) -> str:
"""
Returns a random catch phrase noun.
"""
return self.random_element(self.nouns)
def catch_phrase_attribute(self) -> str:
"""
Returns a random catch phrase attribute.
"""
return self.random_element(self.attributes)
def catch_phrase_verb(self) -> str:
"""
Returns a random catch phrase verb.
"""
return self.random_element(self.verbs)
def catch_phrase(self) -> str:
"""
:example: 'a segurança de evoluir sem preocupação'
"""
pattern: str = self.random_element(self.catch_phrase_formats)
catch_phrase = self.generator.parse(pattern)
catch_phrase = catch_phrase[0].upper() + catch_phrase[1:]
return catch_phrase
def company_id(self) -> str:
digits: List[int] = list(self.random_sample(range(10), 8))
digits += [0, 0, 0, 1]
digits += company_id_checksum(digits)
return "".join(str(d) for d in digits)
def cnpj(self) -> str:
digits = self.company_id()
return f"{digits[:2]}.{digits[2:5]}.{digits[5:8]}/{digits[8:12]}-{digits[12:]}" | PypiClean |
/Cheetah-2.4.4.tar.gz/Cheetah-2.4.4/cheetah/Tests/Regressions.py |
import Cheetah.NameMapper
import Cheetah.Template
import sys
import unittest
majorVer, minorVer = sys.version_info[0], sys.version_info[1]
versionTuple = (majorVer, minorVer)
def isPython23():
''' Python 2.3 is still supported by Cheetah, but doesn't support decorators '''
return majorVer == 2 and minorVer < 4
class GetAttrException(Exception):
pass
class CustomGetAttrClass(object):
def __getattr__(self, name):
raise GetAttrException('FAIL, %s' % name)
class GetAttrTest(unittest.TestCase):
'''
Test for an issue occurring when __getatttr__() raises an exception
causing NameMapper to raise a NotFound exception
'''
def test_ValidException(self):
o = CustomGetAttrClass()
try:
print(o.attr)
except GetAttrException, e:
# expected
return
except:
self.fail('Invalid exception raised: %s' % e)
self.fail('Should have had an exception raised')
def test_NotFoundException(self):
template = '''
#def raiseme()
$obj.attr
#end def'''
template = Cheetah.Template.Template.compile(template, compilerSettings={}, keepRefToGeneratedCode=True)
template = template(searchList=[{'obj' : CustomGetAttrClass()}])
assert template, 'We should have a valid template object by now'
self.failUnlessRaises(GetAttrException, template.raiseme)
class InlineImportTest(unittest.TestCase):
def test_FromFooImportThing(self):
'''
Verify that a bug introduced in v2.1.0 where an inline:
#from module import class
would result in the following code being generated:
import class
'''
template = '''
#def myfunction()
#if True
#from os import path
#return 17
Hello!
#end if
#end def
'''
template = Cheetah.Template.Template.compile(template, compilerSettings={'useLegacyImportMode' : False}, keepRefToGeneratedCode=True)
template = template(searchList=[{}])
assert template, 'We should have a valid template object by now'
rc = template.myfunction()
assert rc == 17, (template, 'Didn\'t get a proper return value')
def test_ImportFailModule(self):
template = '''
#try
#import invalidmodule
#except
#set invalidmodule = dict(FOO='BAR!')
#end try
$invalidmodule.FOO
'''
template = Cheetah.Template.Template.compile(template, compilerSettings={'useLegacyImportMode' : False}, keepRefToGeneratedCode=True)
template = template(searchList=[{}])
assert template, 'We should have a valid template object by now'
assert str(template), 'We weren\'t able to properly generate the result from the template'
def test_ProperImportOfBadModule(self):
template = '''
#from invalid import fail
This should totally $fail
'''
self.failUnlessRaises(ImportError, Cheetah.Template.Template.compile, template, compilerSettings={'useLegacyImportMode' : False}, keepRefToGeneratedCode=True)
def test_AutoImporting(self):
template = '''
#extends FakeyTemplate
Boo!
'''
self.failUnlessRaises(ImportError, Cheetah.Template.Template.compile, template)
def test_StuffBeforeImport_Legacy(self):
template = '''
###
### I like comments before import
###
#extends Foo
Bar
'''
self.failUnlessRaises(ImportError, Cheetah.Template.Template.compile, template, compilerSettings={'useLegacyImportMode' : True}, keepRefToGeneratedCode=True)
class Mantis_Issue_11_Regression_Test(unittest.TestCase):
'''
Test case for bug outlined in Mantis issue #11:
Output:
Traceback (most recent call last):
File "test.py", line 12, in <module>
t.respond()
File "DynamicallyCompiledCheetahTemplate.py", line 86, in respond
File "/usr/lib64/python2.6/cgi.py", line 1035, in escape
s = s.replace("&", "&") # Must be done first!
'''
def test_FailingBehavior(self):
import cgi
template = Cheetah.Template.Template("$escape($request)", searchList=[{'escape' : cgi.escape, 'request' : 'foobar'}])
assert template
self.failUnlessRaises(AttributeError, template.respond)
def test_FailingBehaviorWithSetting(self):
import cgi
template = Cheetah.Template.Template("$escape($request)",
searchList=[{'escape' : cgi.escape, 'request' : 'foobar'}],
compilerSettings={'prioritizeSearchListOverSelf' : True})
assert template
assert template.respond()
class Mantis_Issue_21_Regression_Test(unittest.TestCase):
'''
Test case for bug outlined in issue #21
Effectively @staticmethod and @classmethod
decorated methods in templates don't
properly define the _filter local, which breaks
when using the NameMapper
'''
def runTest(self):
if isPython23():
return
template = '''
#@staticmethod
#def testMethod()
This is my $output
#end def
'''
template = Cheetah.Template.Template.compile(template)
assert template
assert template.testMethod(output='bug') # raises a NameError: global name '_filter' is not defined
class Mantis_Issue_22_Regression_Test(unittest.TestCase):
'''
Test case for bug outlined in issue #22
When using @staticmethod and @classmethod
in conjunction with the #filter directive
the generated code for the #filter is reliant
on the `self` local, breaking the function
'''
def test_NoneFilter(self):
# XXX: Disabling this test for now
return
if isPython23():
return
template = '''
#@staticmethod
#def testMethod()
#filter None
This is my $output
#end filter
#end def
'''
template = Cheetah.Template.Template.compile(template)
assert template
assert template.testMethod(output='bug')
def test_DefinedFilter(self):
# XXX: Disabling this test for now
return
if isPython23():
return
template = '''
#@staticmethod
#def testMethod()
#filter Filter
This is my $output
#end filter
#end def
'''
# The generated code for the template's testMethod() should look something
# like this in the 'error' case:
'''
@staticmethod
def testMethod(**KWS):
## CHEETAH: generated from #def testMethod() at line 3, col 13.
trans = DummyTransaction()
_dummyTrans = True
write = trans.response().write
SL = [KWS]
_filter = lambda x, **kwargs: unicode(x)
########################################
## START - generated method body
_orig_filter_18517345 = _filter
filterName = u'Filter'
if self._CHEETAH__filters.has_key("Filter"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
write(u' This is my ')
_v = VFFSL(SL,"output",True) # u'$output' on line 5, col 32
if _v is not None: write(_filter(_v, rawExpr=u'$output')) # from line 5, col 32.
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
'''
template = Cheetah.Template.Template.compile(template)
assert template
assert template.testMethod(output='bug')
if __name__ == '__main__':
unittest.main() | PypiClean |
/ApplicationClientServer_server-0.1-py3-none-any.whl/common/metaclasses.py | import dis
from pprint import pprint
# Метакласс для проверки соответствия сервера:
class ServerMaker(type):
def __init__(cls, clsname, bases, clsdict):
"""
:param clsname: - экземпляр метакласса - Server
:param bases: кортеж базовых классов - ()
:param clsdict: словарь атрибутов и методов экземпляра метакласса
"""
# Список методов, которые используются в функциях класса:
methods = [] # с помощью 'LOAD_GLOBAL'
methods_2 = [] # методы, обёрнутые декораторами попадают не в 'LOAD_GLOBAL', а в 'LOAD_METHOD'
# Атрибуты, используемые в функциях классов
attrs = []
for func in clsdict:
try:
ret = dis.get_instructions(clsdict[func])
except TypeError:
pass
else:
# Если функция разбираем код, получая используемые методы и атрибуты.
for i in ret:
print(i)
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
# заполняем список методами, использующимися в функциях класса
methods.append(i.argval)
elif i.opname == 'LOAD_METHOD':
if i.argval not in methods_2:
methods_2.append(i.argval)
elif i.opname == 'LOAD_ATTR':
if i.argval not in attrs:
# заполняем список атрибутами, использующимися в функциях класса
attrs.append(i.argval)
print(20 * '-', 'methods', 20 * '-')
pprint(methods)
print(20 * '-', 'methods_2', 20 * '-')
pprint(methods_2)
print(20 * '-', 'attrs', 20 * '-')
pprint(attrs)
print(50 * '-')
if 'connect' in methods:
raise TypeError('Использование метода connect недопустимо в серверном классе')
if not ('SOCK_STREAM' in attrs and 'AF_INET' in attrs):
raise TypeError('Некорректная инициализация сокета.')
# Вызываем конструктор предка
super().__init__(clsname, bases, clsdict)
# Метакласс для проверки корректности клиентов:
class ClientMaker(type):
def __init__(cls, clsname, bases, clsdict):
# Список методов, которые используются в функциях класса:
methods = []
for func in clsdict:
try:
ret = dis.get_instructions(clsdict[func])
# Если не функция то ловим исключение
except TypeError:
pass
else:
#Если функция разбираем код, получая используемые методы.
for i in ret:
if i.opname == 'LOAD_GLOBAL':
if i.argval not in methods:
methods.append(i.argval)
# Если обнаружено использование недопустимого метода accept, listen, socket бросаем исключение:
for command in ('accept', 'listen', 'socket'):
if command in methods:
raise TypeError('В классе обнаружено использование запрещённого метода')
# Вызов get_message или send_message из utils считаем корректным использованием сокетов
if 'get_message' in methods or 'send_message' in methods:
pass
else:
raise TypeError('Отсутствуют вызовы функций, работающих с сокетами.')
super().__init__(clsname, bases, clsdict) | PypiClean |
/GSAS-II-WONDER_linux-1.0.1.tar.gz/GSAS-II-WONDER_linux-1.0.1/GSAS-II-WONDER/imports/G2img_GE.py | from __future__ import division, print_function
import os
import numpy as np
import GSASIIobj as G2obj
import GSASIIpath
GSASIIpath.SetVersionNumber("$Revision: 4112 $")
class GE_ReaderClass(G2obj.ImportImage):
'''Routine to read a GE image, typically from APS Sector 1.
The image files may be of form .geX (where X is ' ', 1, 2, 3, 4 or 5),
which is a raw image from the detector. These files may contain more
than one image and have a rudimentary header.
Files with extension .sum or .cor are 4 byte integers/pixel, one image/file.
Files with extension .avg are 2 byte integers/pixel, one image/file.
'''
def __init__(self):
super(self.__class__,self).__init__( # fancy way to self-reference
extensionlist=('.sum','.cor','.cor32','.avg','.ge','.ge1','.ge2','.ge3','.ge4','.ge5'),
strictExtension=True,
formatName = 'GE image',
longFormatName = 'Summed GE image file'
)
def ContentsValidator(self, filename):
'''just a test on file size
'''
if '.sum' not in str(filename):
try:
fp = open(filename,'rb')
statinfo = os.stat(str(fp).split("'")[1])
fsize = statinfo.st_size
self.nimages = (fsize-8192)/(2*2048**2)
fp.close()
except:
return False #bad file size
return True
def Reader(self,filename, ParentFrame=None, **kwarg):
'''Read using GE file reader, :func:`GetGEsumData`
'''
#rdbuffer = kwarg.get('buffer')
imagenum = kwarg.get('blocknum')
#sum = kwarg.get('sum')
if imagenum is None: imagenum = 1
self.Comments,self.Data,self.Npix,self.Image,more = \
GetGEsumData(self,filename,imagenum=imagenum)
if self.Npix == 0 or not self.Comments:
return False
self.LoadImage(ParentFrame,filename,imagenum)
self.repeatcount = imagenum
self.repeat = more
return True
class GEsum_ReaderClass(G2obj.ImportImage):
'''Routine to read multiple GE images & sum them, typically from APS Sector 1.
The image files may be of form .geX (where X is ' ', 1, 2, 3, 4 or 5),
which is a raw image from the detector. These files may contain more
than one image and have a rudimentary header.
Files with extension .sum or .cor are 4 byte integers/pixel, one image/file.
Files with extension .avg are 2 byte integers/pixel, one image/file.
'''
def __init__(self):
super(self.__class__,self).__init__( # fancy way to self-reference
extensionlist=('.ge1','.ge2','.ge3','.ge4','.ge5'),
strictExtension=True,
formatName = 'sum GE multi-image',
longFormatName = 'sum of GE multi-image file'
)
def ContentsValidator(self, filename):
'''just a test on file size
'''
try:
fp = open(filename,'rb')
statinfo = os.stat(str(fp).split("'")[1])
fsize = statinfo.st_size
nimages = (fsize-8192)/(2*2048**2)
fp.close()
except:
return False #bad file size
return True
def Reader(self,filename, ParentFrame=None, **kwarg):
'''Read using GE file reader, :func:`GetGEsumData`
'''
#rdbuffer = kwarg.get('buffer')
imagenum = kwarg.get('blocknum')
if imagenum is None: imagenum = 1
self.Comments,self.Data,self.Npix,self.Image,more = GetGEsumData(
self,filename,imagenum=imagenum,sum=True)
if self.Npix == 0 or not self.Comments:
return False
self.LoadImage(ParentFrame,filename,imagenum)
self.repeatcount = imagenum
self.repeat = more
return True
def GetGEsumData(self,filename,imagenum=1,sum=False):
'''Read G.E. detector images from various files as produced at 1-ID and
with Detector Pool detector. Also sums multiple image files if desired
'''
import struct as st
import platform
if '2' in platform.python_version_tuple()[0]:
import cPickle
else:
import pickle as cPickle
import time
more = False
time0 = time.time()
File = open(filename,'rb')
if filename.split('.')[-1] in ['sum','cor32']:
head = ['GE detector sum/corrected data from APS 1-ID',]
sizexy = [2048,2048]
Npix = sizexy[0]*sizexy[1]
image = np.array(np.frombuffer(File.read(4*Npix),dtype=np.float32),dtype=np.int32)
elif filename.split('.')[-1] in ['avg','cor']:
File.seek(0,2)
last = File.tell()
pos = last-2*(2048**2)
File.seek(pos)
head = ['GE detector avg or cor data from APS 1-ID',]
sizexy = [2048,2048]
Npix = sizexy[0]*sizexy[1]
image = np.array(np.frombuffer(File.read(2*Npix),dtype=np.int16),dtype=np.int32)
else:
head = ['GE detector raw data',]
File.seek(18)
size,nframes = st.unpack('<ih',File.read(6))
# number of frames seems to be 3 for single-image files
if size != 2048:
print('Warning GE image size unexpected: '+str(size))
print('Assumed 2048x2048')
size = 2048
statinfo = os.stat(str(File).split("'")[1])
fsize = statinfo.st_size
nframes = (fsize-8192)/(2*2048**2)
# return 0,0,0,0,False # probably should quit now
if imagenum > nframes:
print('Error: attempt to read image #'+str(imagenum)+
' from file with '+str(nframes)+' images.')
return 0,0,0,0,False
elif imagenum < nframes:
more = True
sizexy = [2048,2048]
Npix = sizexy[0]*sizexy[1]
pos = 8192 + (imagenum-1)*2*Npix
File.seek(pos)
image = np.array(np.frombuffer(File.read(2*Npix),dtype=np.int16),dtype=np.int32)
if len(image) != sizexy[1]*sizexy[0]:
print('not enough images while reading GE file: '+filename+'image #'+str(imagenum))
return 0,0,0,0,False
head += ['file: '+filename+' image #'+str(imagenum),]
if sum: #will ignore imagenum
print ('Frames to read %d,'%(nframes),end='')
while nframes > 1: #OK, this will sum the frames.
try:
image += np.array(np.frombuffer(File.read(2*Npix),dtype=np.int16),dtype=np.int32)
except ValueError:
break
nframes -= 1
print ('%d,'%(nframes),end='')
print ('')
more = False
filename = os.path.splitext(filename)[0]+'.G2img'
File = open(filename,'wb')
Data = {'pixelSize':[200.,200.],'wavelength':0.15,'distance':250.0,'center':[204.8,204.8],'size':sizexy}
image = np.reshape(image,(sizexy[1],sizexy[0]))
cPickle.dump([head,Data,Npix,image],File,1)
File.close()
self.sumfile = filename
self.formatName = 'GSAS-II image'
sum = False
image = np.reshape(image,(sizexy[1],sizexy[0]))
data = {'pixelSize':[200.,200.],'wavelength':0.15,'distance':250.0,'center':[204.8,204.8],'size':sizexy}
File.close()
if GSASIIpath.GetConfigValue('debug'):
print ('Image read time %.2fs'%(time.time()-time0))
print ('Read GE file: '+filename+' image #'+'%04d'%(imagenum))
return head,data,Npix,image,more | PypiClean |
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/43377.e8dcf533195e606a1451.min.js | (self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[43377],{43377:function(s){s.exports=function(s){return{name:"Intel x86 Assembly",case_insensitive:!0,keywords:{$pattern:"[.%]?"+s.IDENT_RE,keyword:"lock rep repe repz repne repnz xaquire xrelease bnd nobnd aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2 fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286 lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence monitor mov movd movq movsb movsd movsq movsw movsx movsxd movzx mul mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmove cmovz cmovne cmovnz cmova cmovnbe cmovae cmovnb cmovb cmovnae cmovbe cmovna cmovg cmovnle cmovge cmovnl cmovl cmovnge cmovle cmovng cmovc cmovnc cmovo cmovno cmovs cmovns cmovp cmovpe cmovnp cmovpo je jz jne jnz ja jnbe jae jnb jb jnae jbe jna jg jnle jge jnl jl jnge jle jng jc jnc jo jno js jns jpo jnp jpe jp sete setz setne setnz seta setnbe setae setnb setnc setb setnae setcset setbe setna setg setnle setge setnl setl setnge setle setng sets setns seto setno setpe setp setpo setnp addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps xorps fxrstor fxrstor64 fxsave fxsave64 xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64 prefetchnta prefetcht0 prefetcht1 prefetcht2 maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw pf2iw pfnacc pfpnacc pi2fw pswapd maskmovdqu clflush movntdq movnti movntpd movdqa movdqu movdq2q movq2dq paddq pmuludq pshufd pshufhw pshuflw pslldq psrldq psubq punpckhqdq punpcklqdq addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd cmpunordpd cmpunordsd cmppd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread vmresume vmrun vmsave vmwrite vmxoff vmxon invept invvpid pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw pmaddubsw pmulhrsw pshufb psignb psignw psignd extrq insertq movntsd movntss lzcnt blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd roundps roundsd roundss crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt getsec pfrcpv pfrsqrtv movbe aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128 vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmplt_oqpd vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps vcmptrue_uqps vcmptrueps vcmplt_oqps vcmple_oqps vcmpunord_sps vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss vcmptrue_uqss vcmptruess vcmplt_oqss vcmple_oqss vcmpunord_sss vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128 vinsertps vlddqu vldqqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd vmovq vmovddup vmovdqa vmovqqa vmovdqu vmovqqu vmovhlps vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd vunpcklps vxorpd vxorps vzeroall vzeroupper pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox rdseed clac stac xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256 llwpcb slwpcb lwpval lwpins vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw vbroadcasti128 vpblendd vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vgatherdpd vgatherqpd vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdq vpgatherqq xabort xbegin xend xtest andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc valignd valignq vblendmpd vblendmps vbroadcastf32x4 vbroadcastf64x4 vbroadcasti32x4 vbroadcasti64x4 vcompresspd vcompressps vcvtpd2udq vcvtps2udq vcvtsd2usi vcvtss2usi vcvttpd2udq vcvttps2udq vcvttsd2usi vcvttss2usi vcvtudq2pd vcvtudq2ps vcvtusi2sd vcvtusi2ss vexpandpd vexpandps vextractf32x4 vextractf64x4 vextracti32x4 vextracti64x4 vfixupimmpd vfixupimmps vfixupimmsd vfixupimmss vgetexppd vgetexpps vgetexpsd vgetexpss vgetmantpd vgetmantps vgetmantsd vgetmantss vinsertf32x4 vinsertf64x4 vinserti32x4 vinserti64x4 vmovdqa32 vmovdqa64 vmovdqu32 vmovdqu64 vpabsq vpandd vpandnd vpandnq vpandq vpblendmd vpblendmq vpcmpltd vpcmpled vpcmpneqd vpcmpnltd vpcmpnled vpcmpd vpcmpltq vpcmpleq vpcmpneqq vpcmpnltq vpcmpnleq vpcmpq vpcmpequd vpcmpltud vpcmpleud vpcmpnequd vpcmpnltud vpcmpnleud vpcmpud vpcmpequq vpcmpltuq vpcmpleuq vpcmpnequq vpcmpnltuq vpcmpnleuq vpcmpuq vpcompressd vpcompressq vpermi2d vpermi2pd vpermi2ps vpermi2q vpermt2d vpermt2pd vpermt2ps vpermt2q vpexpandd vpexpandq vpmaxsq vpmaxuq vpminsq vpminuq vpmovdb vpmovdw vpmovqb vpmovqd vpmovqw vpmovsdb vpmovsdw vpmovsqb vpmovsqd vpmovsqw vpmovusdb vpmovusdw vpmovusqb vpmovusqd vpmovusqw vpord vporq vprold vprolq vprolvd vprolvq vprord vprorq vprorvd vprorvq vpscatterdd vpscatterdq vpscatterqd vpscatterqq vpsraq vpsravq vpternlogd vpternlogq vptestmd vptestmq vptestnmd vptestnmq vpxord vpxorq vrcp14pd vrcp14ps vrcp14sd vrcp14ss vrndscalepd vrndscaleps vrndscalesd vrndscaless vrsqrt14pd vrsqrt14ps vrsqrt14sd vrsqrt14ss vscalefpd vscalefps vscalefsd vscalefss vscatterdpd vscatterdps vscatterqpd vscatterqps vshuff32x4 vshuff64x2 vshufi32x4 vshufi64x2 kandnw kandw kmovw knotw kortestw korw kshiftlw kshiftrw kunpckbw kxnorw kxorw vpbroadcastmb2q vpbroadcastmw2d vpconflictd vpconflictq vplzcntd vplzcntq vexp2pd vexp2ps vrcp28pd vrcp28ps vrcp28sd vrcp28ss vrsqrt28pd vrsqrt28ps vrsqrt28sd vrsqrt28ss vgatherpf0dpd vgatherpf0dps vgatherpf0qpd vgatherpf0qps vgatherpf1dpd vgatherpf1dps vgatherpf1qpd vgatherpf1qps vscatterpf0dpd vscatterpf0dps vscatterpf0qpd vscatterpf0qps vscatterpf1dpd vscatterpf1dps vscatterpf1qpd vscatterpf1qps prefetchwt1 bndmk bndcl bndcu bndcn bndmov bndldx bndstx sha1rnds4 sha1nexte sha1msg1 sha1msg2 sha256rnds2 sha256msg1 sha256msg2 hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6 hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13 hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20 hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27 hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34 hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41 hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48 hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55 hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62 hint_nop63",built_in:"ip eip rip al ah bl bh cl ch dl dh sil dil bpl spl r8b r9b r10b r11b r12b r13b r14b r15b ax bx cx dx si di bp sp r8w r9w r10w r11w r12w r13w r14w r15w eax ebx ecx edx esi edi ebp esp eip r8d r9d r10d r11d r12d r13d r14d r15d rax rbx rcx rdx rsi rdi rbp rsp r8 r9 r10 r11 r12 r13 r14 r15 cs ds es fs gs ss st st0 st1 st2 st3 st4 st5 st6 st7 mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 xmm8 xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 xmm16 xmm17 xmm18 xmm19 xmm20 xmm21 xmm22 xmm23 xmm24 xmm25 xmm26 xmm27 xmm28 xmm29 xmm30 xmm31 ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13 ymm14 ymm15 ymm16 ymm17 ymm18 ymm19 ymm20 ymm21 ymm22 ymm23 ymm24 ymm25 ymm26 ymm27 ymm28 ymm29 ymm30 ymm31 zmm0 zmm1 zmm2 zmm3 zmm4 zmm5 zmm6 zmm7 zmm8 zmm9 zmm10 zmm11 zmm12 zmm13 zmm14 zmm15 zmm16 zmm17 zmm18 zmm19 zmm20 zmm21 zmm22 zmm23 zmm24 zmm25 zmm26 zmm27 zmm28 zmm29 zmm30 zmm31 k0 k1 k2 k3 k4 k5 k6 k7 bnd0 bnd1 bnd2 bnd3 cr0 cr1 cr2 cr3 cr4 cr8 dr0 dr1 dr2 dr3 dr8 tr3 tr4 tr5 tr6 tr7 r0 r1 r2 r3 r4 r5 r6 r7 r0b r1b r2b r3b r4b r5b r6b r7b r0w r1w r2w r3w r4w r5w r6w r7w r0d r1d r2d r3d r4d r5d r6d r7d r0h r1h r2h r3h r0l r1l r2l r3l r4l r5l r6l r7l r8l r9l r10l r11l r12l r13l r14l r15l db dw dd dq dt ddq do dy dz resb resw resd resq rest resdq reso resy resz incbin equ times byte word dword qword nosplit rel abs seg wrt strict near far a32 ptr",meta:"%define %xdefine %+ %undef %defstr %deftok %assign %strcat %strlen %substr %rotate %elif %else %endif %if %ifmacro %ifctx %ifidn %ifidni %ifid %ifnum %ifstr %iftoken %ifempty %ifenv %error %warning %fatal %rep %endrep %include %push %pop %repl %pathsearch %depend %use %arg %stacksize %local %line %comment %endcomment .nolist __FILE__ __LINE__ __SECT__ __BITS__ __OUTPUT_FORMAT__ __DATE__ __TIME__ __DATE_NUM__ __TIME_NUM__ __UTC_DATE__ __UTC_TIME__ __UTC_DATE_NUM__ __UTC_TIME_NUM__ __PASS__ struc endstruc istruc at iend align alignb sectalign daz nodaz up down zero default option assume public bits use16 use32 use64 default section segment absolute extern global common cpu float __utf16__ __utf16le__ __utf16be__ __utf32__ __utf32le__ __utf32be__ __float8__ __float16__ __float32__ __float64__ __float80m__ __float80e__ __float128l__ __float128h__ __Infinity__ __QNaN__ __SNaN__ Inf NaN QNaN SNaN float8 float16 float32 float64 float80m float80e float128l float128h __FLOAT_DAZ__ __FLOAT_ROUND__ __FLOAT__"},contains:[s.COMMENT(";","$",{relevance:0}),{className:"number",variants:[{begin:"\\b(?:([0-9][0-9_]*)?\\.[0-9_]*(?:[eE][+-]?[0-9_]+)?|(0[Xx])?[0-9][0-9_]*(\\.[0-9_]*)?(?:[pP](?:[+-]?[0-9_]+)?)?)\\b",relevance:0},{begin:"\\$[0-9][0-9A-Fa-f]*",relevance:0},{begin:"\\b(?:[0-9A-Fa-f][0-9A-Fa-f_]*[Hh]|[0-9][0-9_]*[DdTt]?|[0-7][0-7_]*[QqOo]|[0-1][0-1_]*[BbYy])\\b"},{begin:"\\b(?:0[Xx][0-9A-Fa-f_]+|0[DdTt][0-9_]+|0[QqOo][0-7_]+|0[BbYy][0-1_]+)\\b"}]},s.QUOTE_STRING_MODE,{className:"string",variants:[{begin:"'",end:"[^\\\\]'"},{begin:"`",end:"[^\\\\]`"}],relevance:0},{className:"symbol",variants:[{begin:"^\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\s+label)"},{begin:"^\\s*%%[A-Za-z0-9_$#@~.?]*:"}],relevance:0},{className:"subst",begin:"%[0-9]+",relevance:0},{className:"subst",begin:"%!S+",relevance:0},{className:"meta",begin:/^\s*\.[\w_-]+/}]}}}}]);
//# sourceMappingURL=43377.e8dcf533195e606a1451.min.js.map | PypiClean |
/OctoBot-Trading-2.4.23.tar.gz/OctoBot-Trading-2.4.23/octobot_trading/personal_data/positions/types/linear_position.py | import decimal
import octobot_trading.constants as constants
import octobot_trading.enums as enums
import octobot_trading.personal_data.positions.position as position_class
class LinearPosition(position_class.Position):
def update_value(self):
"""
Notional value = CONTRACT_QUANTITY * MARK_PRICE
"""
self.value = self.size * self.mark_price
def get_unrealized_pnl(self, price):
"""
LONG_PNL = CONTRACT_QUANTITY x [MARK_PRICE - ENTRY_PRICE]
SHORT_PNL = CONTRACT_QUANTITY x [ENTRY_PRICE - MARK_PRICE]
:param price: the pnl calculation price
:return: the unrealized pnl
"""
# ensure update validity
if price <= constants.ZERO or self.entry_price <= constants.ZERO:
return constants.ZERO
if self.is_long():
return self.size * (price - self.entry_price)
if self.is_short():
return -self.size * (self.entry_price - price)
return constants.ZERO
def get_margin_from_size(self, size):
"""
Calculates margin from size : margin = (Position quantity x entry price) / leverage
"""
return (size * self.entry_price) / self.symbol_contract.current_leverage
def get_size_from_margin(self, margin):
"""
Calculates size from margin : size = (margin x leverage) / entry price
"""
return (margin * self.symbol_contract.current_leverage) / self.entry_price
def calculate_maintenance_margin(self):
"""
:return: Maintenance margin = Position quantity x entry price x Maintenance margin rate
"""
return self.size * self.entry_price * self.symbol_contract.maintenance_margin_rate
def update_isolated_liquidation_price(self):
"""
Updates isolated position liquidation price
LONG LIQUIDATION PRICE = ENTRY_PRICE * (1 - Initial Margin Rate + MAINTENANCE_MARGIN_RATE)
SHORT LIQUIDATION PRICE = ENTRY_PRICE * (1 + Initial Margin Rate - MAINTENANCE_MARGIN_RATE)
- Long : - Extra Margin Added/ Contract Size
- Short : + Extra Margin Added/ Contract Size
"""
try:
if self.is_long():
self.liquidation_price = self.entry_price * (
constants.ONE - self.get_initial_margin_rate() + self.symbol_contract.maintenance_margin_rate)
elif self.is_short():
self.liquidation_price = self.entry_price * (
constants.ONE + self.get_initial_margin_rate() - self.symbol_contract.maintenance_margin_rate)
else:
self.liquidation_price = constants.ZERO
self.update_fee_to_close()
except (decimal.DivisionByZero, decimal.InvalidOperation):
self.liquidation_price = constants.ZERO
def get_bankruptcy_price(self, price, side, with_mark_price=False):
"""
:param price: the price to compute bankruptcy from
:param side: the side of the position
:param with_mark_price: if price should be mark price instead of entry price
:return: Bankruptcy Price
Long position = Entry Price x (1 - Initial Margin Rate)
Short position = Entry Price × (1 + Initial Margin Rate)
"""
if side is enums.PositionSide.LONG:
return self.mark_price if with_mark_price else \
price * (constants.ONE - self.get_initial_margin_rate())
elif side is enums.PositionSide.SHORT:
return self.mark_price if with_mark_price else \
price * (constants.ONE + self.get_initial_margin_rate())
return constants.ZERO
def get_fee_to_open(self, quantity, price, symbol):
"""
:return: Fee to open = (Quantity * Mark Price) x Taker fee
"""
return quantity * price * self.get_taker_fee(symbol)
def get_fee_to_close(self, quantity, price, side, symbol, with_mark_price=False):
"""
:return: Fee to open = (Quantity * Mark Price) x Taker fee
"""
return quantity * self.get_bankruptcy_price(price, side, with_mark_price=with_mark_price) * \
self.get_taker_fee(symbol)
def get_order_cost(self):
"""
:return: Order Cost = Initial Margin + Two-Way Taker Fee
"""
return self.initial_margin + self.get_two_way_taker_fee()
def update_fee_to_close(self):
"""
:return: Fee to close = (Quantity * Bankruptcy Price derived from mark price) x Taker fee
"""
self.fee_to_close = self.get_fee_to_close(self.size, self.entry_price, self.side, self.symbol,
with_mark_price=True)
def update_average_entry_price(self, update_size, update_price):
"""
Average entry price = total contract value in market / total quantity of contracts
Total contract value in market = [(Current position quantity * Current position entry price)
+ (Update quantity * Update price)]
"""
total_contract_value = self.size + update_size
self.entry_price = ((self.size * self.entry_price + update_size * update_price) /
(total_contract_value if total_contract_value != constants.ZERO else constants.ONE))
if self.entry_price < constants.ZERO:
self.entry_price = constants.ZERO
def update_average_exit_price(self, update_size, update_price):
"""
Average exit price = total contract value in market / total quantity of contracts
Total contract value in market = [(Current position quantity * Current position exit price)
+ (Update quantity * Update price)]
"""
if self.exit_price == constants.ZERO:
self.exit_price = update_price
else:
total_contract_value = self.already_reduced_size + update_size
self.exit_price = ((self.already_reduced_size * self.exit_price + update_size * update_price) /
(total_contract_value if total_contract_value != constants.ZERO else constants.ONE))
if self.exit_price < constants.ZERO:
self.exit_price = constants.ZERO
@staticmethod
def is_inverse():
return False | PypiClean |
/Netzob-2.0.0.tar.gz/Netzob-2.0.0/src/netzob/Simulator/Channels/DebugChannel.py |
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2017 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#| ANSSI, https://www.ssi.gouv.fr |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| File contributors : |
#| - Frédéric Guihéry <frederic.guihery (a) amossys.fr> |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports |
#+---------------------------------------------------------------------------+
import io
import sys
try:
from typing import Callable, Union
except ImportError:
pass
#+---------------------------------------------------------------------------+
#| Related third party imports |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Local application imports |
#+---------------------------------------------------------------------------+
from netzob.Common.Utils.Decorators import typeCheck, NetzobLogger, public_api
from netzob.Simulator.AbstractChannel import AbstractChannel
from netzob.Simulator.ChannelBuilder import ChannelBuilder
@NetzobLogger
class DebugChannel(AbstractChannel):
"""A DebugChannel is a file-like channel that handles writing of output
data.
The DebugChannel constructor expects some parameters:
:param stream: The output stream
:param timeout: The default timeout of the channel for global
connection. Default value is blocking (None).
:type stream: :class:`str` or a file-like object, required
:type timeout: :class:`float`, optional
The following code shows the use of a DebugChannel channel:
>>> from netzob.all import *
>>> client = DebugChannel("/dev/null")
>>> symbol = Symbol([Field("Hello everyone!")])
>>> with client:
... client.write(next(symbol.specialize()))
18
"""
STREAM_MAP = {
"stdout": sys.stdout,
"stderr": sys.stderr
}
@public_api
@typeCheck((str, io.IOBase))
def __init__(self,
stream, # type: Union[str, io.IOBase]
timeout=AbstractChannel.DEFAULT_TIMEOUT
):
# type: (...) -> None
super(DebugChannel, self).__init__(timeout=timeout)
self._stream = self.STREAM_MAP.get(stream, stream)
@staticmethod
def getBuilder():
return DebugChannelBuilder
@public_api
def open(self, timeout=5.):
"""Open the communication channel. If the channel is a client, it
starts to connect to the specified server.
:param timeout: The default timeout of the channel for opening
connection and waiting for a message. Default value
is 5.0 seconds. To specify no timeout, None value is
expected.
:type timeout: :class:`float`, optional
:raise: RuntimeError if the channel is already opened
"""
super().open(timeout=timeout)
if isinstance(self._stream, str):
self._stream = open(self._stream, 'w')
self.isOpen = True
@public_api
def close(self):
"""Close the communication channel."""
if self.isOpen:
self._stream.close()
self.isOpen = False
@public_api
def read(self):
"""Do nothing
"""
def writePacket(self, data):
"""Write on stream
:param data: the data to write on the channel
:type data: :class:`bytes`
"""
return self._stream.write(repr(data))
@public_api
def sendReceive(self, data):
"""Write on the communication channel the specified data and returns
the corresponding response.
:param data: the data to write on the channel
:type data: :class:`bytes`
"""
self.write(data)
return self.read()
@public_api
def checkReceived(self,
predicate, # type: Callable[..., bool]
*args, **kwargs):
# type: (...) -> bool
"""
Method used to simulate the validation of an input data that could not
be retrieved.
:param predicate: the function used to validate the received data
:type predicate: Callable[[bytes], bool]
"""
return True
def updateSocketTimeout(self):
"""Do nothing
"""
@public_api
def set_rate(self, rate):
"""This method set the the given transmission rate to the channel.
Used in testing network under high load
:parameter rate: This specifies the bandwidth in bytes per second to
respect during traffic emission. Default value is
``None``, which means that the bandwidth is only
limited by the underlying physical layer.
:type rate: :class:`int`, required
"""
if rate is not None:
self._logger.info("Network rate limited to {:.2f} kBps".format(rate/1000))
self._rate = rate
@public_api
def unset_rate(self):
"""This method clears the transmission rate.
"""
if self._rate is not None:
self._rate = None
self._logger.info("Network rate limitation removed")
class DebugChannelBuilder(ChannelBuilder):
"""
This builder is used to create an
:class:`~netzob.Simulator.Channels.DebugChannel.DebugChannel` instance
>>> from netzob.Simulator.Channels.NetInfo import NetInfo
>>> builder = DebugChannelBuilder().set("stream", "stderr")
>>> chan = builder.build()
>>> type(chan)
<class 'netzob.Simulator.Channels.DebugChannel.DebugChannel'>
"""
@public_api
def __init__(self):
super().__init__(DebugChannel)
def set_stream(self, stream):
self.attrs['stream'] = stream | PypiClean |
/Anemone-0.0.1.tar.gz/Anemone-0.0.1/anemone/reporter.py | import zmq
import threading
from Queue import Queue, Empty
class Reporter(object):
def __init__(self, program_name, analysis_name):
"""
The only anemone class to use for the data generating program
The analysis name should be the name of the input file
or some other easily recognizable name such that a user of
the GUI inspector program understands that he or she has
connected to the right analysis
"""
self._program_name = program_name
self._analysis_name = analysis_name
self._queue = Queue()
def start(self, address):
self.server = Server(self._program_name, self._analysis_name, self._queue)
self.thread = threading.Thread(target=self.server.serve, args=(address,))
self.thread.daemon = True
self.thread.start()
def report_2dplot(self, report_name, x, y):
rep = (report_name, TYPE_2D_PLOT, (x, y))
self._queue.put(rep)
TYPE_2D_PLOT = '2dplot'
class Server(object):
def __init__(self, program_name, analysis_name, queue):
"""
Internal class to handle communication with the listening GUIs
"""
self.queue = queue
self.program_name = program_name
self.analysis_name = analysis_name
self.reports = {}
def serve(self, address):
self.zmq_context = zmq.Context()
self.zmq_socket = self.zmq_context.socket(zmq.REP)
self.zmq_socket.bind(address)
while True:
try:
item = self.queue.get(block=True, timeout=0.1)
self.handle_queue_item(item)
except Empty:
# No items waiting, do nothing
pass
try:
request = self.zmq_socket.recv_pyobj(flags=zmq.NOBLOCK)
self.handle_zmq_request(request)
except zmq.Again:
# No requests waiting, do nothing
pass
def handle_queue_item(self, item):
"""
Get new report data from the analysis thread through the queue and
append it to the reports we currently hold
"""
name, type, data = item
if name not in self.reports:
if type == TYPE_2D_PLOT:
self.reports[name] = (type, ([], []))
if type == TYPE_2D_PLOT:
self.reports[name][1][0].append(data[0])
self.reports[name][1][1].append(data[1])
def handle_zmq_request(self, request):
"""
Handle a request for information from the remote GUI
"""
print 'request:', request
# The resuest must be a tuple
if not isinstance(request, tuple):
self.zmq_socket.send_pyobj('ERROR: unknown command')
return
# The tuple must have at least one item
if len(request) < 1:
self.zmq_socket.send_pyobj('ERROR: unknown command')
return
cmd = request[0]
if cmd == 'get_analysis_info':
# Return tuple containing (program_name, analysis_name, num_reports)
response = (self.program_name, self.analysis_name, len(self.reports))
elif cmd == 'get_reports':
# Return list of (name, type) tuples
response = [(name, self.reports[name][0]) for name in self.reports]
elif cmd == 'get_report' and len(request) == 3:
# Return the data for the selected report
name, start_index = request[1:]
# Check that the requested report exists
if not name in self.reports:
self.zmq_socket.send_pyobj('ERROR: unknown report')
return
# Check that the start_index is an integer >= 0
if not isinstance(start_index, int) or start_index < 0:
self.zmq_socket.send_pyobj('ERROR: malformed start index')
return
type, data = self.reports[name]
if type == TYPE_2D_PLOT:
if len(data[0]) > start_index:
response = (data[0][start_index:], data[1][start_index:])
else:
response = ([], [])
else:
self.zmq_socket.send_pyobj('ERROR: unknown command')
return
self.zmq_socket.send_pyobj(response) | PypiClean |
/LTEpy-1.0.4.tar.gz/LTEpy-1.0.4/docs/notebooks/boltzmann_factor_demo.ipynb | # Boltzmann Factor Demo
```
import numpy as np
import matplotlib.cm as cm
import sys
from LTEpy import lte, atom, plot
from LTEpy.constants import EVOLT
```
### Make a hydrogen atom
```
hydrogen = atom.Hydrogen()
print(f"{hydrogen.levels=}")
print(f"{hydrogen.energy/EVOLT=}eV\n{hydrogen.gdegen=}")
```
### Calculate Boltzmann Factor of all energy levels
```
hbf = lte.Boltzmann_Factor(temp=10**3, #Kelvin
atom=hydrogen,)
xx = hbf.bfact
fig, hh = hbf.plot_bfact()
fig.axes[0].legend(handles=[hh,])
```
### Plot for many temperatures
```
handles = []
labels = []
fig, ax = plot.figax(
xlabel=plot.LABEL_LEVEL, ylabel=plot.LABEL_BFACT,
xscale='linear')
temps = np.logspace(4,6,5)
colors = cm.rainbow_r(np.linspace(0,1,len(temps)))
for ii, temp in enumerate(temps):
hbf = lte.Boltzmann_Factor(temp=temp, atom=hydrogen,)
bfact = hbf.bfact
hh = hbf.draw_bfact(ax, color=colors[ii])
handles.append(hh)
labels.append(f"{temp:.2e}")
print(labels)
ax.legend(handles=handles, labels=labels, title=plot.LABEL_TEMP)
```
| PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/docs/releasenotes/0.6.10.rst | ============================
Djblets 0.6.10 Release Notes
============================
**Release date**: August 20, 2011
djblets.datagrid
================
* Log failed attempts at finding cell templates, in order to aid
debugging.
djblets.feedview
================
* Don't fail with an uncaught exception if loading an RSS feed fails.
(:bug:`2189`)
If loading the RSS feed fails (if it's behind a proxy, for example),
feedview would raise an error. Now it provides the error data for
the caller to render.
djblets.log
===========
* Fixed a date range calculation bug in the log viewer. (:bug:`2218`)
The "This Month" view in the log viewer was broken due to it
attempting to use the beginning of the date range calculated, which
was 0, instead of 01. Since we can assume the start of the month
will continue to be 1 for the foreseeable future, we can just
hardcode this and be safe.
Patch by Lee Loucks.
djblets.util
============
* Added a :py:func:`make_cache_key` function for generating usable cache keys.
The :py:func:`make_cache_key` function generates a cache key guaranteed to
be able to fit inside the memcached key length restrictions. It will
generate a MD5 hash of the key if it exceeds the length.
This is the same logic that cache_memoize uses to build the keys.
It can be used to perform other operations on that key.
Patch by Vlad Filippov.
* Allow JSONField to store up to 4GB of data on MySQL. (:bug:`1481`)
JSONField on MySQL was hard-coding the "TEXT" column type,
allowing only 64KB of data. Now it uses the default for Django's
TextField, which is 4GB.
This won't break compatibility with existing fields, but those
fields won't gain the benefits. To update an existing field on
MySQL, you will need to perform the following::
ALTER TABLE table_name MODIFY field_name LONGTEXT;
* Fixed a storage problem with large cache data. (:bug:`1660`)
Large cache data was encoded incorrectly, causing anything stored to
become unloadable, meaning it'd have to be recomputed. This was due
to a string being stored for all but the last chunk, instead of a
string wrapped in a list. This affects any data that, after being
picked and zlib-compressed, is over 1MB in size.
djblets.webapi
==============
* Make it easier to debug invalid data types in ``@webapi_request_fields``.
If using an invalid data type in the ``@webapi_request_fields`` listing,
you'd see a cryptic error being raised. Now the field is ignored
and an error is logged.
Contributors
============
* Christian Hammond
* David Trowbridge
* Lee Loucks
* Vlad Filippov
| PypiClean |
/CsuPTMD-1.0.12.tar.gz/CsuPTMD-1.0.12/PTMD/maskrcnn_benchmark/data/samplers/grouped_batch_sampler.py | import itertools
import torch
from torch.utils.data.sampler import BatchSampler
from torch.utils.data.sampler import Sampler
class GroupedBatchSampler(BatchSampler):
"""
Wraps another sampler to yield a mini-batch of indices.
It enforces that elements from the same group should appear in groups of batch_size.
It also tries to provide mini-batches which follows an ordering which is
as close as possible to the ordering from the original sampler.
Arguments:
sampler (Sampler): Base sampler.
batch_size (int): Size of mini-batch.
drop_uneven (bool): If ``True``, the sampler will drop the batches whose
size is less than ``batch_size``
"""
def __init__(self, sampler, group_ids, batch_size, drop_uneven=False):
if not isinstance(sampler, Sampler):
raise ValueError(
"sampler should be an instance of "
"torch.utils.data.Sampler, but got sampler={}".format(sampler)
)
self.sampler = sampler
self.group_ids = torch.as_tensor(group_ids)
assert self.group_ids.dim() == 1
self.batch_size = batch_size
self.drop_uneven = drop_uneven
self.groups = torch.unique(self.group_ids).sort(0)[0]
self._can_reuse_batches = False
def _prepare_batches(self):
dataset_size = len(self.group_ids)
# get the sampled indices from the sampler
sampled_ids = torch.as_tensor(list(self.sampler))
# potentially not all elements of the dataset were sampled
# by the sampler (e.g., DistributedSampler).
# construct a tensor which contains -1 if the element was
# not sampled, and a non-negative number indicating the
# order where the element was sampled.
# for example. if sampled_ids = [3, 1] and dataset_size = 5,
# the order is [-1, 1, -1, 0, -1]
order = torch.full((dataset_size,), -1, dtype=torch.int64)
order[sampled_ids] = torch.arange(len(sampled_ids))
# get a mask with the elements that were sampled
mask = order >= 0
# find the elements that belong to each individual cluster
clusters = [(self.group_ids == i) & mask for i in self.groups]
# get relative order of the elements inside each cluster
# that follows the order from the sampler
relative_order = [order[cluster] for cluster in clusters]
# with the relative order, find the absolute order in the
# sampled space
permutation_ids = [s[s.sort()[1]] for s in relative_order]
# permute each cluster so that they follow the order from
# the sampler
permuted_clusters = [sampled_ids[idx] for idx in permutation_ids]
# splits each cluster in batch_size, and merge as a list of tensors
splits = [c.split(self.batch_size) for c in permuted_clusters]
merged = tuple(itertools.chain.from_iterable(splits))
# now each batch internally has the right order, but
# they are grouped by clusters. Find the permutation between
# different batches that brings them as close as possible to
# the order that we have in the sampler. For that, we will consider the
# ordering as coming from the first element of each batch, and sort
# correspondingly
first_element_of_batch = [t[0].item() for t in merged]
# get and inverse mapping from sampled indices and the position where
# they occur (as returned by the sampler)
inv_sampled_ids_map = {v: k for k, v in enumerate(sampled_ids.tolist())}
# from the first element in each batch, get a relative ordering
first_index_of_batch = torch.as_tensor(
[inv_sampled_ids_map[s] for s in first_element_of_batch]
)
# permute the batches so that they approximately follow the order
# from the sampler
permutation_order = first_index_of_batch.sort(0)[1].tolist()
# finally, permute the batches
batches = [merged[i].tolist() for i in permutation_order]
if self.drop_uneven:
kept = []
for batch in batches:
if len(batch) == self.batch_size:
kept.append(batch)
batches = kept
return batches
def __iter__(self):
if self._can_reuse_batches:
batches = self._batches
self._can_reuse_batches = False
else:
batches = self._prepare_batches()
self._batches = batches
return iter(batches)
def __len__(self):
if not hasattr(self, "_batches"):
self._batches = self._prepare_batches()
self._can_reuse_batches = True
return len(self._batches) | PypiClean |
/Loaderio-1.0.2.tar.gz/Loaderio-1.0.2/README.md | Loaderio
===========================================
Python wrapper for loader.io api v2
## Installation
```pip install loaderio```
## How to use
Go to go [Loaderio][] for more details on api resources.
## Resources
### Applications
```
from loaderio.Loaderio import Loaderio
loader = Loaderio('API_KEY')
loader.apps.list()
loader.apps.create('www.example.com')
loader.apps.get('app_id')
loader.apps.verify('app_id', method = 'http')
loader.apps.delete('app_id')
```
### Tests
```
#Get tests list
loader.tests.list()
#Create test
#Note: You can add more url options as per api docs
loader.tests.name = 'Gonna crush yah!'
loader.tests.test_type = 'Non-Cycling'
loader.tests.total = 400
loader.tests.duration = 30
loader.tests.urls = [
{'url': 'http://gonacrushyaurl.com', 'request_params' : {"name": "Steve"}}]
loader.tests.create()
#Others
loader.tests.get(test_id)
loader.tests.run(test_id)
loader.tests.stop(test_id)
```
### Results
```
loader.results.list(test_id)
loader.results.get(test_id,results_id)
```
### Servers
```
loader.servers.list(test_id)
```
##License
The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
[Loaderio]: http://docs.loader.io/api/intro.html "Loader.io" | PypiClean |
/NeodroidVision-0.3.0-py36-none-any.whl/neodroidvision/regression/vae/architectures/vanilla_vae.py |
__author__ = "Christian Heider Nielsen"
__doc__ = """ description """
import torch
import torch.utils.data
from draugr.torch_utilities import ReductionMethodEnum
from torch import nn
from torch.nn.functional import binary_cross_entropy
from warg import Number
from neodroidvision.regression.vae.architectures.vae import VAE
__all__ = ["VanillaVAE"]
class Encoder(nn.Module):
"""description"""
def __init__(self, input_size: Number = 784, output_size: Number = 20):
super().__init__()
self.fcs = nn.Sequential(
nn.Linear(input_size, 400), nn.ReLU(), nn.Linear(400, 200), nn.ReLU()
)
self.mean = nn.Linear(200, output_size)
self.log_std = nn.Linear(200, output_size)
def encode(self, x):
"""
Args:
x:
Returns:
"""
x.reshape(-1, self._input_size)
h1 = self.fcs(x)
return self.mean(h1), self.log_std(h1)
def forward(self, x):
"""
Args:
x:
Returns:
"""
return self.encode(x)
class Decoder(nn.Module):
"""description"""
def __init__(self, input_size: Number = 20, output_size: Number = 784):
super().__init__()
self.fcs = nn.Sequential(
nn.Linear(input_size, 200),
nn.ReLU(),
nn.Linear(200, 400),
nn.ReLU(),
nn.Linear(400, output_size),
nn.Sigmoid(),
)
def decode(self, z):
"""
Args:
z:
Returns:
"""
return self.fcs(z)
def forward(self, x):
"""
Args:
x:
Returns:
"""
return self.decode(x).view(-1, 28, 28)
class VanillaVAE(VAE):
"""description"""
def encode(self, *x: torch.Tensor) -> torch.Tensor:
"""
:param x:
:return:"""
return self._encoder(*x)
def decode(self, *x: torch.Tensor) -> torch.Tensor:
"""
:param x:
:return:"""
return self._decoder(*x)
def __init__(self, input_size=784, latent_size=2):
super().__init__(latent_size)
self._input_size = input_size
self._encoder = Encoder(input_size=input_size, output_size=latent_size)
self._decoder = Decoder(input_size=latent_size, output_size=input_size)
def forward(self, x):
"""
Args:
x:
Returns:
"""
mean, log_var = self.encode(x)
z = self.reparameterise(mean, log_var)
return self.decode(z), mean, log_var
# Reconstruction + KL divergence losses summed over all elements and batch
def loss_function(self, recon_x, x, mu, log_var):
"""
Args:
recon_x:
x:
mu:
log_var:
Returns:
"""
BCE = binary_cross_entropy(
recon_x,
x.view(-1, self._input_size),
reduction=ReductionMethodEnum.sum.value,
)
# see Appendix B from VAE paper:
# Kingma and Welling. Auto-Encoding Variational Bayes. ICLR, 2014
# https://arxiv.org/abs/1312.6114
# 0.5 * sum(1 + log(sigma^2) - mu^2 - sigma^2)
KLD = -0.5 * torch.sum(1 + log_var - mu.pow(2) - log_var.exp())
return BCE + KLD | PypiClean |
/NSoL-0.1.14.tar.gz/NSoL-0.1.14/nsol/linear_operators.py |
# Import libraries
import numpy as np
import scipy.ndimage
from abc import ABCMeta, abstractmethod
import nsol.kernels as Kernels
class LinearOperators(object):
__metaclass__ = ABCMeta
##
# { constructor_description }
# \date 2017-07-23 16:42:57+0100
#
# \param self The object
# \param dimension Dimension of space, integer
# \param spacing Spacing in each spatial direction in x- (y-, z-)
# direction as numpy array or scalar (only 1D)
#
def __init__(self, dimension, spacing):
self._dimension = dimension
self._spacing = spacing
if dimension == 1:
self._kernels = Kernels.Kernels1D(spacing=spacing)
elif dimension == 2:
self._kernels = Kernels.Kernels2D(spacing=spacing)
elif dimension == 3:
self._kernels = Kernels.Kernels3D(spacing=spacing)
def get_spacing(self):
return self._spacing
def get_dimension(self):
return self._dimension
##
# Gets the convolution and adjoint convolution operators given the provided
# kernel
# \date 2017-07-19 16:14:44+0100
#
# \param self The object
# \param kernel Kernel specifying the linear operation as numpy array
# \param mode Mode specifying the boundary conditions for the
# convolution
#
# \return The convolution and adjoint convolution operators.
#
def get_convolution_and_adjoint_convolution_operators(
self, kernel, mode="wrap"):
kernel_adj = kernel
A = lambda x: scipy.ndimage.convolve(x, kernel, mode=mode)
A_adj = lambda x: scipy.ndimage.convolve(x, kernel_adj, mode=mode)
return A, A_adj
##
# Gets the Gaussian blurring operator and its adjoint associated to a
# covariance matrix.
# \date 2017-07-19 16:16:18+0100
#
# \param self The object
# \param cov Variance covariance matrix as numpy array
# \param alpha_cut Cut-off distance in integer, i.e. 3 means cutting
# off a 3 sigma in each direction
#
# \return The gaussian blurring operators.
#
def get_gaussian_blurring_operators(self, cov, alpha_cut=3):
kernel = self._kernels.get_gaussian(cov=cov, alpha_cut=alpha_cut)
return self.get_convolution_and_adjoint_convolution_operators(kernel)
##
# Gets the differential operator in x-direction and its adjoint
# \date 2017-07-19 16:31:55+0100
#
# \param self The object
# \param mode Mode specifying the boundary conditions for the
# convolution
#
# \return The x-differential operators.
#
def get_dx_operators(self, mode="constant"):
kernel = self._kernels.get_dx_forward_difference()
kernel_adj = -self._kernels.get_dx_backward_difference()
D = lambda x: scipy.ndimage.convolve(x, kernel, mode=mode)
D_adj = lambda x: scipy.ndimage.convolve(x, kernel_adj, mode=mode)
return D, D_adj
##
# Gets the gradient operator and its adjoint for both 2D and 3D.
#
# Operator \p grad applied on (m x n) numpy array returns an (dim*m x n)
# numpy array, i.e. stacking the differentials on top of each other.
# Operator \p grad_adj maps from (dim*m x n) to (m x n)
# \date 2017-07-19 17:14:26+0100
#
# \param self The object
# \param mode The mode
#
# \return The gradient operators.
#
def get_gradient_operators(self, mode="constant"):
Dx, Dx_adj = self.get_dx_operators(mode=mode)
if self._dimension == 1:
grad = Dx
grad_adj = Dx_adj
if self._dimension == 2:
Dy, Dy_adj = self.get_dy_operators(mode=mode)
grad = lambda x: np.concatenate((Dx(x), Dy(x)))
grad_adj = lambda x: self._get_adjoint_gradient_operator(
x, [Dx_adj, Dy_adj])
elif self._dimension == 3:
Dy, Dy_adj = self.get_dy_operators(mode=mode)
Dz, Dz_adj = self.get_dz_operators(mode=mode)
grad = lambda x: np.concatenate((Dx(x), Dy(x), Dz(x)))
grad_adj = lambda x: self._get_adjoint_gradient_operator(
x, [Dx_adj, Dy_adj, Dz_adj])
return grad, grad_adj
##
# Gets the adjoint gradient operator.
#
# Apply Dx_adj(x[0:m,...]) + Dy_adj(x[m:2m,...]) (+ Dz_adj(x[2m:,...]))
# \date 2017-07-19 17:19:06+0100
#
# \param self The object
# \param x numpy array of shape (dim*m x n x p)
# \param D_adj_list The d adj list
#
# \return The adjoint gradient operator.
#
def _get_adjoint_gradient_operator(self, x, D_adj_list):
x_split = np.array_split(x, self._dimension)
D_adj_x_list = [D_adj_list[i](x_split[i])
for i in range(0, self._dimension)]
D_adj_x = D_adj_x_list[0]
for i in range(1, self._dimension):
D_adj_x += D_adj_x_list[i]
return D_adj_x
class LinearOperators1D(LinearOperators):
def __init__(self, spacing=1):
super(self.__class__, self).__init__(dimension=1, spacing=spacing)
class LinearOperators2D(LinearOperators):
def __init__(self, spacing=np.ones(2)):
super(self.__class__, self).__init__(dimension=2, spacing=spacing)
##
# Gets the differential operator in y-direction and its adjoint
# \date 2017-07-19 16:31:55+0100
#
# \param self The object
# \param mode Mode specifying the boundary conditions for the
# convolution
#
# \return The y-differential operators.
#
def get_dy_operators(self, mode="constant"):
kernel = self._kernels.get_dy_forward_difference()
kernel_adj = -self._kernels.get_dy_backward_difference()
D = lambda x: scipy.ndimage.convolve(x, kernel, mode=mode)
D_adj = lambda x: scipy.ndimage.convolve(x, kernel_adj, mode=mode)
return D, D_adj
class LinearOperators3D(LinearOperators):
def __init__(self, spacing=np.ones(3)):
super(self.__class__, self).__init__(dimension=3, spacing=spacing)
##
# Gets the differential operator in y-direction and its adjoint
# \date 2017-07-19 16:31:55+0100
#
# \param self The object
# \param mode Mode specifying the boundary conditions for the
# convolution
#
# \return The y-differential operators.
#
def get_dy_operators(self, mode="constant"):
kernel = self._kernels.get_dy_forward_difference()
kernel_adj = -self._kernels.get_dy_backward_difference()
D = lambda x: scipy.ndimage.convolve(x, kernel, mode=mode)
D_adj = lambda x: scipy.ndimage.convolve(x, kernel_adj, mode=mode)
return D, D_adj
##
# Gets the differential operator in z-direction and its adjoint for 3D
# \date 2017-07-19 16:31:55+0100
#
# \param self The object
# \param mode Mode specifying the boundary conditions for the
# convolution
#
# \return The z-differential operators.
#
def get_dz_operators(self, mode="constant"):
kernel = self._kernels.get_dz_forward_difference()
kernel_adj = -self._kernels.get_dz_backward_difference()
D = lambda x: scipy.ndimage.convolve(x, kernel, mode=mode)
D_adj = lambda x: scipy.ndimage.convolve(x, kernel_adj, mode=mode)
return D, D_adj | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_bs-cyrl-ba.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u043f\u0440\u0435 \u043f\u043e\u0434\u043d\u0435",
"\u043f\u043e\u043f\u043e\u0434\u043d\u0435"
],
"DAY": [
"\u043d\u0435\u0434\u0435\u0459\u0430",
"\u043f\u043e\u043d\u0435\u0434\u0435\u0459\u0430\u043a",
"\u0443\u0442\u043e\u0440\u0430\u043a",
"\u0441\u0440\u0438\u0458\u0435\u0434\u0430",
"\u0447\u0435\u0442\u0432\u0440\u0442\u0430\u043a",
"\u043f\u0435\u0442\u0430\u043a",
"\u0441\u0443\u0431\u043e\u0442\u0430"
],
"MONTH": [
"\u0458\u0430\u043d\u0443\u0430\u0440",
"\u0444\u0435\u0431\u0440\u0443\u0430\u0440",
"\u043c\u0430\u0440\u0442",
"\u0430\u043f\u0440\u0438\u043b",
"\u043c\u0430\u0458",
"\u0458\u0443\u043d\u0438",
"\u0458\u0443\u043b\u0438",
"\u0430\u0432\u0433\u0443\u0441\u0442",
"\u0441\u0435\u043f\u0442\u0435\u043c\u0431\u0430\u0440",
"\u043e\u043a\u0442\u043e\u0431\u0430\u0440",
"\u043d\u043e\u0432\u0435\u043c\u0431\u0430\u0440",
"\u0434\u0435\u0446\u0435\u043c\u0431\u0430\u0440"
],
"SHORTDAY": [
"\u043d\u0435\u0434",
"\u043f\u043e\u043d",
"\u0443\u0442\u043e",
"\u0441\u0440\u0438",
"\u0447\u0435\u0442",
"\u043f\u0435\u0442",
"\u0441\u0443\u0431"
],
"SHORTMONTH": [
"\u0458\u0430\u043d",
"\u0444\u0435\u0431",
"\u043c\u0430\u0440",
"\u0430\u043f\u0440",
"\u043c\u0430\u0458",
"\u0458\u0443\u043d",
"\u0458\u0443\u043b",
"\u0430\u0432\u0433",
"\u0441\u0435\u043f",
"\u043e\u043a\u0442",
"\u043d\u043e\u0432",
"\u0434\u0435\u0446"
],
"fullDate": "EEEE, dd. MMMM y.",
"longDate": "dd. MMMM y.",
"medium": "dd.MM.y. HH:mm:ss",
"mediumDate": "dd.MM.y.",
"mediumTime": "HH:mm:ss",
"short": "d.M.yy. HH:mm",
"shortDate": "d.M.yy.",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "KM",
"DECIMAL_SEP": ",",
"GROUP_SEP": ".",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "bs-cyrl-ba",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/BALISTICA-1.0.0.tar.gz/BALISTICA-1.0.0/balistica/GUI/AnalyticV.py | import numpy as np
import matplotlib
import tkinter as tk
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
matplotlib.use("TkAgg")
from matplotlib import pyplot as plt
plt.rcParams.update({'figure.max_open_warning': 0})
from matplotlib.figure import Figure
from balistica.PhysicsEngine.AnalyticVPhysicsHandler import AnalyticVPhysicsHandler
from tkinter import filedialog
class AnalyticVGUI(tk.Frame):
def __init__(self, master=None):
self.physicshandler = AnalyticVPhysicsHandler(0, 0, 0)
tk.Frame.__init__(self, master)
self.grid()
# Top level panel structure
self.panels = tk.Frame(self)
self.panels.pack(fill=tk.BOTH, expand=1)
# Left and right panels
self.leftpanel = tk.Frame(self.panels, relief=tk.GROOVE)
self.leftpanel.pack(side=tk.LEFT)
self.rightpanel = tk.Frame(self.panels)
self.rightpanel.pack(side=tk.RIGHT)
# Controls grid for upper left pannel
self.ulpanel = tk.LabelFrame(self.leftpanel, text='Parameters')
self.ulpanel.pack(side=tk.TOP)
# Control for angle
self.anglelable = tk.Label(self.ulpanel, text='Initial angle (degrees)')
self.anglelable.grid(row=0, column=0)
self.angleinput = tk.Scale(self.ulpanel, from_=0, to=90, resolution=1, length=170,orient=tk.HORIZONTAL)
self.angleinput.grid(row=0, column=1)
# Control for drag
self.draglable = tk.Label(self.ulpanel, text='Drag coefficient (s^-1)')
self.draglable.grid(row=1, column=0)
self.draginput = tk.Scale(self.ulpanel, from_=0.01, to=2, resolution=0.01, length=170, orient=tk.HORIZONTAL)
self.draginput.grid(row=1, column=1)
# Control for velocity
self.velocitylabel = tk.Label(self.ulpanel, text='Initial velocity (m/s)')
self.velocitylabel.grid(row=2, column=0)
self.velocityinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.velocityinput.grid(row=2, column=1)
self.velocityinput.insert(0, '125')
self.latIlabel = tk.Label(self.ulpanel, text='I. Lat (m)')
self.latIlabel.grid(row=3, column=0)
self.lonIlabel = tk.Label(self.ulpanel, text='I. Lon (m)')
self.lonIlabel.grid(row=3, column=1)
self.heightIlabel = tk.Label(self.ulpanel, text='I. Height (m)')
self.heightIlabel.grid(row=3, column=2)
self.latIinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.latIinput.grid(row=4, column=0)
self.lonIinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.lonIinput.grid(row=4, column=1)
self.heightIinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.heightIinput.grid(row=4, column=2)
self.latIinput.insert(0, '0')
self.lonIinput.insert(0, '0')
self.heightIinput.insert(0, '0')
self.pblanklabel = tk.Label(self.ulpanel, text='')
self.pblanklabel.grid(row=5, column=0, columnspan=2)
self.latFlabel = tk.Label(self.ulpanel, text='F. Lat (m)')
self.latFlabel.grid(row=6, column=0)
self.lonFlabel = tk.Label(self.ulpanel, text='F. Lon (m)')
self.lonFlabel.grid(row=6, column=1)
self.heightFlabel = tk.Label(self.ulpanel, text='F. Height (m)')
self.heightFlabel.grid(row=6, column=2)
self.latFinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.latFinput.grid(row=7, column=0)
self.lonFinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.lonFinput.grid(row=7, column=1)
self.heightFinput = tk.Entry(self.ulpanel, justify=tk.RIGHT, width=10)
self.heightFinput.grid(row=7, column=2)
self.latFinput.insert(0, '100')
self.lonFinput.insert(0, '100')
self.heightFinput.insert(0, '0')
self.barrierset = tk.BooleanVar()
self.barriercheck = tk.Checkbutton(self.ulpanel, justify=tk.RIGHT, variable=self.barrierset, onvalue=True,
offvalue=False, text='Show barrier')
self.barriercheck.grid(row=8, column=0)
# Controls grid for upper left pannel
self.blpanel = tk.Frame(self.leftpanel)
self.blpanel.pack(side=tk.BOTTOM)
# Buttons for various functions
self.blanklabel= tk.Label(self.blpanel, text="")
self.blanklabel.grid(row=0, column=0, columnspan=2)
self.computebutton = tk.Button(self.blpanel, text="Compute", width=20, command=self.compute, default=tk.NORMAL)
self.computebutton.grid(row=1, column=0, columnspan=3)
self.computebutton = tk.Button(self.blpanel, text="x(t) vs. t", width=10, command=self.txGraph, default=tk.NORMAL)
self.computebutton.grid(row=2, column=0)
self.computebutton = tk.Button(self.blpanel, text="z(t) vs. t", width=10, command=self.tyGraph, default=tk.NORMAL)
self.computebutton.grid(row=2, column=1)
self.computebutton = tk.Button(self.blpanel, text="v(t) vs. t", width=10, command=self.tvGraph, default=tk.NORMAL)
self.computebutton.grid(row=2, column=2)
self.computebutton = tk.Button(self.blpanel, text="z(t) vs. x(t)", width=10, command=self.xyGraph, default=tk.NORMAL)
self.computebutton.grid(row=3, column=0)
self.computebutton = tk.Button(self.blpanel, text="v(t)vs. x(t)", width=10, command=self.xvGraph, default=tk.NORMAL)
self.computebutton.grid(row=3, column=1)
self.computebutton = tk.Button(self.blpanel, text="v(t) vs. z(t)", width=10, command=self.yvGraph, default=tk.NORMAL)
self.computebutton.grid(row=3, column=2)
self.userlabel = tk.Label(self.blpanel, text="", fg="red")
self.userlabel.grid(row=4, column=0, columnspan=3)
self.csvbutton= tk.Button(self.blpanel, text="Save to CSV", command=self.saveCSV, default=tk.NORMAL)
self.csvbutton.grid(row=5, column=0)
self.pngbutton = tk.Button(self.blpanel, text="Save to PNG", command=self.savePNG, default=tk.NORMAL)
self.pngbutton.grid(row=5, column=1)
self.quitbutton = tk.Button(self.blpanel, text="Quit", command=self.bye, default=tk.NORMAL)
self.quitbutton.grid(row=5, column=2)
self.physicshandler.v0 = 0
self.physicshandler.theta = 0
self.physicshandler.b = 1
fig, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
axs.set_xlabel('Distance (m)')
axs.set_ylabel('Height (m)')
axs.set_xlim(0, 100)
axs.set_ylim(0, 100)
axs.set_title('Ballistics with drag (b) proportional to v')
canvas = FigureCanvasTkAgg(fig, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig : Figure = fig
def geography(self):
latI = 0.0
try:
latI = float(self.latIinput.get())
except:
self.userlabel['text'] = "Initial latitude format incorrect"
latF = 0.0
try:
latF = float(self.latFinput.get())
except:
self.userlabel['text'] = "Final latitude format incorrect"
lonI = 0.0
try:
lonI = float(self.lonIinput.get())
except:
self.userlabel['text'] = "Initial longitude format incorrect"
lonF = 0.0
try:
lonF = float(self.lonFinput.get())
except:
self.userlabel['text'] = "Final longitude format incorrect"
heightI = 0.0
try:
heightI = float(self.heightIinput.get())
except:
self.userlabel['text'] = "Initial latitude format incorrect"
heightF = 0.0
try:
heightF = float(self.heightFinput.get())
except:
self.userlabel['text'] = "Initial latitude format incorrect"
distance = np.sqrt(np.power((latF - latI), 2) + np.power((lonF - lonI), 2))
height = heightF - heightI
return (distance, height)
def compute(self):
self.userlabel['text'] = ""
vel0 = 0.0
try:
vel0 = float(self.velocityinput.get())
except:
self.userlabel['text'] = "Velocity format incorrect"
return
theta = np.deg2rad(float(self.angleinput.get()))
b = float(self.draginput.get())
self.physicshandler.v0 = vel0
self.physicshandler.theta = theta
self.physicshandler.b = b
distance, height = self.geography()
self.physicshandler.distance = distance
if self.barrierset.get():
self.physicshandler.height = height
self.physicshandler.barrier = True
else:
self.physicshandler.height = -1
self.physicshandler.barrier = False
self.physicshandler.compute()
self.xyGraph()
def txGraph(self):
for s in self.rightpanel.grid_slaves():
s.destroy()
figtx, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
selected = self.physicshandler.data[self.physicshandler.data['t'] <= self.physicshandler.totalT()]
axs.plot(selected['t'], selected['x'], '-', linewidth=2, color='b')
axs.set_xlabel('Time (s)')
axs.set_ylabel('Distance (m)')
axs.set_title('Ballistics with drag (b) proportional to v')
canvas = FigureCanvasTkAgg(figtx, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig = figtx
def tyGraph(self):
for s in self.rightpanel.grid_slaves():
s.destroy()
figty, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
selected = self.physicshandler.data[self.physicshandler.data['t'] <= self.physicshandler.totalT()]
axs.plot(selected['t'], selected['z'], '-', linewidth=2, color='b')
axs.set_xlabel('Time (s)')
axs.set_ylabel('Height (m)')
axs.set_title('Ballistics with drag (b) proportional to v')
canvas = FigureCanvasTkAgg(figty, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig = figty
def tvGraph(self):
for s in self.rightpanel.grid_slaves():
s.destroy()
figtv, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
selected = self.physicshandler.data[self.physicshandler.data['t'] <= self.physicshandler.totalT()]
axs.plot(selected['t'], selected['v'], '-', linewidth=2, color='b')
axs.set_xlabel('Time (s)')
axs.set_ylabel('Velocity (m/s)')
axs.set_title('Ballistics with constant drag (b) proportional to v')
canvas = FigureCanvasTkAgg(figtv, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig = figtv
def xyGraph(self):
for s in self.rightpanel.grid_slaves():
s.destroy()
distance, height = self.geography()
figxy, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
axs.plot(self.physicshandler.data['x'], self.physicshandler.data['z'], '-', linewidth=2, color='b')
axs.set_xlabel('Distance (m)')
axs.set_ylabel('Height (m)')
if self.barrierset.get():
maxax = np.max([self.physicshandler.totalR() + 10, self.physicshandler.maxH() + 10, distance + 20])
minay = np.min([0, self.physicshandler.height - 10])
else:
maxax = np.max([self.physicshandler.totalR() + 10, self.physicshandler.maxH() + 10])
minay = 0
axs.set_xlim(np.min([0, self.physicshandler.totalR()]), maxax)
axs.set_ylim(minay, maxax)
axs.set_title('Ballistics with drag (b) proportional to v')
if self.barrierset.get():
axs.axvline(x=distance, color='red', linestyle='--')
axs.plot([distance], [height], marker='P', color='green')
canvas = FigureCanvasTkAgg(figxy, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig = figxy
def xvGraph(self):
for s in self.rightpanel.grid_slaves():
s.destroy()
figxv, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
selected = self.physicshandler.data[self.physicshandler.data['x'] <= self.physicshandler.totalR()]
axs.plot(selected['x'], selected['v'], '-', linewidth=2, color='b')
axs.set_xlabel('Distance (m)')
axs.set_ylabel('Velocity (m/s)')
axs.set_title('Ballistics with drag (b) proportional to v')
canvas = FigureCanvasTkAgg(figxv, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig = figxv
def yvGraph(self):
for s in self.rightpanel.grid_slaves():
s.destroy()
figyv, axs = plt.subplots(1, 1, figsize=(7, 6), dpi=80)
selected = self.physicshandler.data[self.physicshandler.data['z'] >= self.physicshandler.height]
axs.plot(selected['z'], selected['v'], '-', linewidth=2, color='b')
axs.set_xlabel('Height (m)')
axs.set_ylabel('Velocity (m/s)')
axs.set_title('Ballistics with drag (b) proportional to v')
axs.invert_xaxis()
canvas = FigureCanvasTkAgg(figyv, master=self.rightpanel)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
self.addStatistics()
self.mostrecentfig = figyv
def addStatistics(self):
stats = tk.LabelFrame(self.rightpanel, text='Results')
stats.grid(row=1, column=0)
rangeLabel = tk.Label(stats, text=f'Range: {self.physicshandler.totalR():.1f} m')
rangeLabel.grid(row=0, column=0)
rangeLabel = tk.Label(stats, text=f'Max height: {self.physicshandler.maxH():.1f} m')
rangeLabel.grid(row=1, column=0)
mheightLabel = tk.Label(stats, text=f'Time to max height: {self.physicshandler.maxT():.1f} s')
mheightLabel.grid(row=2, column=0)
mheightLabel = tk.Label(stats, text=f'Time of flight: {self.physicshandler.totalT():.1f} s')
mheightLabel.grid(row=3, column=0)
mheightLabel = tk.Label(stats, text=f'Velocity of impact: {self.physicshandler.finalV():.1f} m/s')
mheightLabel.grid(row=4, column=0)
mheightLabel = tk.Label(stats, text=f'Angle of impact: {self.physicshandler.finalTheta():.1f} degrees')
mheightLabel.grid(row=5, column=0)
def saveCSV(self):
if self.physicshandler.data is None:
self.userlabel['text'] = "No computed data exists"
else:
fname = filedialog.asksaveasfilename(initialdir = ".", title = "Select file",filetypes = (("CSV files","*.csv"),("all files","*.*")))
self.physicshandler.save_csv(fname+".csv")
self.userlabel['text'] = "File saved"
def savePNG(self):
if self.physicshandler.data is None:
self.userlabel['text'] = "No computed data exists"
else:
fname = filedialog.asksaveasfilename(initialdir=".", title="Select file",
filetypes=(("PNG files", "*.png"), ("all files", "*.*")))
self.physicshandler.save_csv(fname+".png")
self.userlabel['text'] = "File saved"
def bye(self):
self.quit()
self.destroy()
if __name__ == "__main__":
app = AnalyticVGUI()
app.mainloop() | PypiClean |
/MADAP-1.1.0.tar.gz/MADAP-1.1.0/README.rst | .. image:: logo.png
:align: center
MADAP
~~~~~
Modular and Autonomous Data Analysis Platform (MADAP) is a
well-documented python package which can be used for electrochmeical
data analysis.
This package consists of 3 main classes for analysis:
- Voltammetry
- Impedance spectroscopy
- Arrhenius
This package allows user to upload any common file format of data and
the select the data of choice. The user can use to scientifically plot
and get correspondence analysis from each procedure (i.e. by calling
“eis_analysis” , Nyquist, bode as well as the correspondence equivalent
circuit and its parameters will be drawn). This package can be installed
via pip/conda and can be utilized with a GUI, command line or just
directly importing the module in a python script.
Documentation
~~~~~~~~~~~~~
A documentation for the implementation and use of MADAP can be found
`here <https://fuzhanrahmanian.github.io/MADAP/>`__
Installation
~~~~~~~~~~~~
MADAP can be installed via pip:
.. code:: bash
pip install MADAP
Usage
~~~~~
A brief tutorial video of the basic of MADAP usage can found `here <https://youtu.be/nL-eJpb1AxI>`_.
MADAP can be used in a python script as follows:
.. code:: python
from madap.echem.arrhenius import arrhenius
from madap.echem.e_impedance import e_impedance
from madap.data_acquisition import data_acquisition as da
# Load the data
data = da.acquire_data('data.csv')
# Define the desired plots for Arrhenius analysis
plots_arr = ["arrhenius", "arrhenius_fit"]
# Define the desired plots for impedance analysis
plots_eis = ["nyquist", "nyquist_fit", "bode", "residual"]
# Define a save location#
save_dir = "/results"
### Arrhenius
# Instantiate the Arrhenius class for analysis (column names do not have to match exactly, this is just an example)
Arr = arrhenius.Arrhenius(da.format_data(data["temperature"], da.format_data(data["conductivity"])))
# Perform analysis and plotting
Arr.perform_all_actions(save_dir, plots = plots_arr)
### Impedance
# Initialize the Impedance class for analysis (column names do not have to match exactly, this is just an example)
Im = e_impedance.EImpedance(da.format_data(data["freq"]), da.format_data(data["real"]), da.format_data(data["img"]))
# Initialis the EIS procedure. The initial value is the initial guess for the equivalent circuit (can also be left empty)
Eis = e_impedance.EIS(Im, suggested_circuit = "R0-p(R1,CPE1)",initial_value =[860, 3e+5, 1e-09, 0.90])
# Analyze the data
Eis.perform_all_actions(save_dir, plots = plots_eis)
# More usages and options can be found in the documentation.
MADAP can also be used via command line:
.. code:: bash
python -m madap_cli --file <path_to_file> --procedure <procedure> --results <path_to_results> --header_list <header_list> --plot <list_of_plots>
MADAP can also be used via a GUI:
.. code:: bash
python -m madap_gui
License
~~~~~~~
MADAP is licensed under the MIT license. See the LICENSE file for more
details.
Citation
~~~~~~~~
If you use MADAP in your research, please cite this GitHub repository https://github.com/fuzhanrahmanian/MADAP.
.. image:: https://zenodo.org/badge/494354435.svg
:target: https://zenodo.org/badge/latestdoi/494354435
References
~~~~~~~~~~
This package is based relies on the following packages and papers:
- Impedance GitHub repository by Matthew D. Murbach and Brian Gerwe and Neal Dawson-Elli and Lok-kun Tsui: `link <https://github.com/ECSHackWeek/impedance.py>`__
- A Method for Improving the Robustness of linear Kramers-Kronig Validity Tests DOI: https://doi.org/10.1016/j.electacta.2014.01.034
Acknowledgement
~~~~~~~~~~~~~~~
This project has received funding from the European Union’s [Horizon 2020 research and innovation programme](https://ec.europa.eu/programmes/horizon2020/en) under grant agreement [No 957189](https://cordis.europa.eu/project/id/957189). The project is part of BATTERY 2030+, the large-scale European research initiative for inventing the sustainable batteries of the future.
| PypiClean |
/ImgAnn-0.8.1-py3-none-any.whl/imgann/operators/imgdata.py |
import os
import sys
import random
import logging
import pandas as pd
# set the logger
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
""":cvar
(self.dataset) image_df attributes:
- image_id : int
- name : str
- folder : str
- path : str (separated by / )
- width : int
- height : int
- format : class [(default) RGB, GBR, SHA ]
"""
class ImgData:
""" data extract from image dataset. """
def __init__(self, root: str, dataset):
self.dataset = dataset
self.root = root
@classmethod
def extract(cls, dataset_path: str):
"""
:param: dataset_path: directory of the dataset.
:return: ImgData instance
Extract folder names, all the files in the dataset.pip
"""
folders = ImgData.ext_folders(dataset_path)
files = ImgData.ext_files(dataset_path)
dataset = {"folders": folders, "files": files}
data_df = pd.DataFrame()
if type(folders) == str:
logger.error("\n ERROR : Enter a Folder directory.")
sys.exit(1)
else:
data_list = []
if len(folders) == 1:
files = ImgData.ext_files(os.path.abspath(dataset_path))
imgFiles = ImgData.__filterImg(files)
if files:
data_list.extend(ImgData.list_creator(os.path.abspath(dataset_path), folders[0], imgFiles))
else:
logger.error("\n ERROR : there are no files in given directory!")
sys.exit(1)
else:
for folder in folders:
files = ImgData.ext_files(os.path.abspath(dataset_path) + os.sep + folder)
imgFiles = ImgData.__filterImg(files)
if files:
data_list.extend(
ImgData.list_creator(os.path.abspath(dataset_path + os.sep + folder), folder, imgFiles))
else:
continue
if data_list:
data_df = pd.DataFrame.from_records(data_list, columns=['name', 'folder', 'path'])
else:
logger.warning("\nthere was some error, record tuples are empty.")
sys.exit(1)
return cls(root=dataset_path, dataset=data_df)
@staticmethod
def list_creator(root: str, folder: str, files: list):
""" concatenate two list row wise and add complete file path
:param root: absolute path for the folder
:param folder: parent folder of a file
:param files: all the files
:return: [(name, folder, path), ..]
"""
tol_list = []
for file in files:
tol_list.append((file, folder, root + os.sep + file))
return tol_list
@staticmethod
def ext_folders(path):
"""
:param: path: absolute or relative path
:return: all the folder names in the given directory.
"""
folders = []
try:
assert os.path.exists(path), "path does not exists"
folders = [x[1] for x in os.walk(path) if x[1] != []]
if not folders:
if not [x for x in os.walk(path)]: # case : given dir is a file
parent_path, file_name = os.path.split(path)
folders = os.path.basename(parent_path)
else: # case : there are no folders under the dir.
folders = [os.path.basename(path)]
else: # case : there are sub folders in the folder.
folders = folders[0]
except Exception as error:
logger.exception(error)
sys.exit(1)
return folders
@staticmethod
def ext_files(path):
"""
:param: path: absolute or relative path
:return: list of files in the directory or file name
Output all the files in the given directory.
"""
format_list = ['png', 'jpg', 'jpeg']
files = []
try:
assert os.path.exists(path), "path does not exists"
files = [x[2] for x in os.walk(path) if x[2] != []]
if not files:
if not [x for x in os.walk(path)]:
parent_path, file_name = os.path.split(path)
files = file_name
else:
files = None
else:
if len(files) == 1:
files = files[0]
except Exception as error:
logger.exception("\n ERROR : There are no files in the given directory")
sys.exit(1)
return files
def sample_dataset(self, numOfSamples: int):
"""
:param: numOfSample : number of sample images required to show.
:return: Dataframe object from self.dataset with numOFSample records.
"""
numOfrecords, _ = self.dataset.shape
rnd_numbers = sorted(random.sample(range(0, numOfrecords), numOfSamples))
sample_df = self.dataset.iloc[rnd_numbers, :]
return sample_df
@staticmethod
def __filterImg(file_list: list):
"""
:param file_list: list of image file names
:return: list of image files names
"""
files_type = ["png", "jpg", "jpeg"]
img_list = []
for file in file_list:
ext = file.split(".")[-1].lower()
if ext in files_type:
img_list.append(file)
return img_list
def describe(self):
"""
give of summary of data folders
:return: (dictionary)
desc_dict = { number of images : str,
number of folders : int,
folder image count : dict{}
}
"""
desc_dict = {}
num_of_imgs = self.dataset.shape[0]
desc_dict["number of images"] = num_of_imgs
folders = self.dataset.loc[:, "folder"].unique().tolist()
desc_dict["number of folders"] = len(folders)
fld_img_cnt_df = self.dataset.loc[:, ["name","folder"]].groupby("folder").count()
desc_dict["folder image counts"] = fld_img_cnt_df.to_dict()["name"]
return desc_dict | PypiClean |
/HTSQL-2.3.3.tar.gz/HTSQL-2.3.3/doc/html/searchindex.js | Search.setIndex({objects:{"htsql.core.syn.grammar.SyntaxGrammar":{add_rule:[10,1,1,""]},"htsql.core.util.Clonable":{clone_to:[10,1,1,""],clone:[10,1,1,""]},"htsql.HTSQL":{"__call__":[15,1,1,""],produce:[15,1,1,""]},"htsql.core.syn.parse":{parse:[10,3,1,""],prepare_parse:[10,3,1,""]},"htsql.core.util":{omapof:[10,2,1,""],DB:[10,2,1,""],to_literal:[10,3,1,""],toposort:[10,3,1,""],listof:[10,2,1,""],Printable:[10,2,1,""],filelike:[10,2,1,""],Hashable:[10,2,1,""],oneof:[10,2,1,""],trim_doc:[10,3,1,""],aresubclasses:[10,3,1,""],frozenomap:[10,2,1,""],to_name:[10,3,1,""],tupleof:[10,2,1,""],dictof:[10,2,1,""],Clonable:[10,2,1,""],TextBuffer:[10,2,1,""],autoimport:[10,3,1,""],maybe:[10,2,1,""],cachedproperty:[10,5,1,""],setof:[10,2,1,""],urlquote:[10,3,1,""],omap:[10,2,1,""],subclassof:[10,2,1,""],similar:[10,3,1,""],isfinite:[10,3,1,""]},"htsql.core.error":{ForbiddenError:[10,4,1,""],ConflictError:[10,4,1,""],EngineError:[10,4,1,""],NotFoundError:[10,4,1,""],Mark:[10,2,1,""],BadRequestError:[10,4,1,""],PermissionError:[10,4,1,""],InternalServerError:[10,4,1,""],NotImplementedError:[10,4,1,""],HTTPError:[10,4,1,""]},"htsql.core.error.Mark":{union:[10,6,1,""],excerpt:[10,1,1,""]},"htsql.core":{util:[10,0,1,""],domain:[10,0,1,""],syn:[10,0,1,""],error:[10,0,1,""]},"htsql.core.syn.syntax":{IntegerSyntax:[10,2,1,""],PrefixSyntax:[10,2,1,""],StringSyntax:[10,2,1,""],FunctionSyntax:[10,2,1,""],ComposeSyntax:[10,2,1,""],LinkSyntax:[10,2,1,""],SkipSyntax:[10,2,1,""],LiteralSyntax:[10,2,1,""],AssignSyntax:[10,2,1,""],PipeSyntax:[10,2,1,""],VoidSyntax:[10,2,1,""],ApplySyntax:[10,2,1,""],UnpackSyntax:[10,2,1,""],ReferenceSyntax:[10,2,1,""],DirectSyntax:[10,2,1,""],DecimalSyntax:[10,2,1,""],RecordSyntax:[10,2,1,""],ProjectSyntax:[10,2,1,""],NumberSyntax:[10,2,1,""],LabelSyntax:[10,2,1,""],CollectSyntax:[10,2,1,""],FilterSyntax:[10,2,1,""],OperatorSyntax:[10,2,1,""],LiftSyntax:[10,2,1,""],SelectSyntax:[10,2,1,""],LocateSyntax:[10,2,1,""],DetachSyntax:[10,2,1,""],ListSyntax:[10,2,1,""],FloatSyntax:[10,2,1,""],Syntax:[10,2,1,""],IdentitySyntax:[10,2,1,""],GroupSyntax:[10,2,1,""],IdentifierSyntax:[10,2,1,""],SpecifySyntax:[10,2,1,""],AttachSyntax:[10,2,1,""]},htsql:{HTSQL:[15,2,1,""]},"htsql.core.error.HTTPError":{status:[10,5,1,""]},"htsql.core.domain.ID":{make:[10,6,1,""]},"htsql.core.syn.scan":{prepare_scan:[10,3,1,""],scan:[10,3,1,""]},"htsql.core.syn":{decode:[10,0,1,""],grammar:[10,0,1,""],scan:[10,0,1,""],syntax:[10,0,1,""],parse:[10,0,1,""],token:[10,0,1,""]},"htsql.core.domain":{FloatDomain:[10,2,1,""],NumberDomain:[10,2,1,""],ContainerDomain:[10,2,1,""],EnumDomain:[10,2,1,""],Profile:[10,2,1,""],Product:[10,2,1,""],NullDomain:[10,2,1,""],IdentityDomain:[10,2,1,""],TextDomain:[10,2,1,""],DateTimeDomain:[10,2,1,""],ListDomain:[10,2,1,""],EntityDomain:[10,2,1,""],Value:[10,2,1,""],VoidDomain:[10,2,1,""],OpaqueDomain:[10,2,1,""],DecimalDomain:[10,2,1,""],BooleanDomain:[10,2,1,""],RecordDomain:[10,2,1,""],Record:[10,2,1,""],ID:[10,2,1,""],Domain:[10,2,1,""],IntegerDomain:[10,2,1,""],DateDomain:[10,2,1,""],TimeDomain:[10,2,1,""],UntypedDomain:[10,2,1,""]},"htsql.core.util.TextBuffer":{fail:[10,1,1,""],peek:[10,1,1,""],reset:[10,1,1,""],pull:[10,1,1,""]},"htsql.core.syn.grammar.LexicalGrammar":{add_signal:[10,1,1,""],add_rule:[10,1,1,""]},"htsql.core.syn.grammar.LexicalRule":{add_token:[10,1,1,""]},"htsql.core.domain.Record":{make:[10,6,1,""]},"htsql.core.util.DB":{parse:[10,6,1,""]},"htsql.core.syn.grammar":{SyntaxGrammar:[10,2,1,""],LexicalRule:[10,2,1,""],ParseStream:[10,2,1,""],LexicalGrammar:[10,2,1,""]},"htsql.core.domain.Domain":{parse:[10,7,1,""],dump:[10,7,1,""]},"htsql.core.syn.decode":{decode:[10,3,1,""]},"htsql.core.syn.token":{Token:[10,2,1,""]},"htsql.core.syn.grammar.ParseStream":{reset:[10,1,1,""],pull:[10,1,1,""],peek:[10,1,1,""]}},terms:{umth:15,four:[13,2,16,18],database_url:9,prefix:[13,2,15,9,16],is_open:10,whose:18,educ:[2,14,4,5,11,13,15,16,18],pmth:15,concret:[13,10],under:[14,19],digit:[2,10,13,16,17,18],everi:[14,4,5,11,19,13,16,17,18],"void":17,affect:[4,18],school:[2,14,4,5,11,19,13,15,16,17,18],rmejia:2,gnu:11,vector:[10,12],"10k":19,num_dept:[2,14,18,5],miller:14,naiv:18,"10g":19,consequ:2,second:[2,14,10,11,19,16,17,18],aggreg:[0,14,6,4,5,18],follow:[2,14,5,9,10,11,12,13,15,16,17,18,19],is_singular:[11,19],even:[11,18,13,16,17,19],supervis:[2,16,18],hide:19,identitysyntax:10,neg:14,introspect:[11,19],bipolar:18,"new":[16,10,14,7,18],metadata:[0,10,11,19],abov:[13,5,16,18],num_std:15,never:17,chang:[4,10,19,18],here:[9,11,12,13,16,17,18,19],studio:[4,2,13,18,5],path:[13,10,14,12,19],pleasur:5,enrol:[13,11,19,18],interpret:[4,13,14],precis:[10,17],credit:[2,14,4,5,19,13,16,18],permit:[14,5,10,11,19,17,18],studi:[2,14,5,13,15,16,18],oodbm:13,substr:[14,18],course_pk:13,txt:[9,14,18],unit:[13,2,16,18],highli:13,bookkeep:[13,14,18],describ:[2,14,6,3,7,9,10,11,19,13,16,17,18],would:[5,11,19,18],init:9,call:[2,14,4,5,10,19,13,16,17,18],asset:[2,16,18],recommend:12,type:[0,14,2,6,4,5,10,11,19,13,15,16,17,18],until:[10,17],relat:[0,6,5,7,10,13,16,17,18],start_dat:[14,19],warn:19,exce:2,unpack:10,addon:[9,15,11,19],join:[5,14,18,11],gecon:18,setup:12,telescop:18,timedomain:10,conceptu:[2,16,18],coalesc:5,root:[2,14,4,11,12,15,19],overrid:[2,10,11,19,18],course_title_uk:13,give:12,gedu:[14,18],indic:[0,14,2,5,10,19,13,16,17,18],opaquedomain:10,to_nam:10,want:[9,5,19,18],end:[13,5,10,14,18],hoc:[2,14],quot:[2,14,10,18,17,19],identitydomain:10,classifi:[2,16],how:[3,5,9,13,16,17,19],rewind:10,answer:5,verifi:[10,11],config:[9,11,19],updat:19,binary_float:17,dialect:5,recogn:[13,2,10],after:[11,18],lab:[14,18],diagram:[4,13,16],befor:[2,16,18],modest:5,gmth:15,demonstr:[16,11,19],third:[16,19],classmethod:10,opaqu:17,credenti:[9,15,19],think:5,greek:18,maintain:[5,18],environ:[9,15,3],enter:[9,10],aloud:5,greec:18,order:[2,14,4,5,10,19,13,17,18],oper:[0,14,2,6,4,5,10,19,13,16,17,18],possib:11,composit:[16,2,10,18,5],over:[2,14,4,5,10,11,19,16,17,18],becaus:[13,5,16,17],smallint:17,cli:9,fix:[5,10,17],better:18,offic:[11,14,18],persist:19,comprehens:[2,11,7,5],hidden:19,descend:[2,14,18],them:[17,11,19,18],thei:[14,4,5,10,11,19,13,16,18],fragment:[13,5,10,18],uacct:18,safe:18,stdart:[4,5,18],trail:[2,10,14,19],htsql_demo:[5,15,11,19],choic:16,subqueri:5,grammar:[10,0,6,2],arrow:[13,16,6],each:[2,14,4,5,10,11,19,13,16,17,18],debug:[2,14,19,18,5],side:[9,2,10,5],mean:[14,5,19,10,12,13,18],mediev:18,colleagu:5,doub:17,extract:[4,14],network:13,semiconductor:18,rewrit:18,adapt:[5,11,19,18],reader:5,got:[17,16,11],integersyntax:10,unrecogn:17,navig:[6,4,5,7,11,13,18],parenthesi:18,sademo:19,infix:[2,16,18],filter:[2,14,6,4,5,10,18],unabl:[16,10],nulldomain:10,regress:15,onto:5,rang:[2,17,14,18,5],class_typ:10,grade:[19,18],independ:[5,11],restrict:[13,12,19],instruct:[9,12,3],alreadi:[9,10,14,12],wrapper:5,primari:[13,14,19,18],hood:14,rewritten:16,top:[2,1,4,5,10,19,13,15,16,18],reverse_nam:19,specifysyntax:10,fiction:[13,18],underlin:10,master:[9,5,15,18],too:[5,10],similarli:[14,19],labelsyntax:10,toolset:18,john:14,school_campus_ck:13,consol:12,namespac:19,signup:9,took:5,direct:[2,14,4,5,10,13,18],decimaldomain:10,technic:18,target:[14,4,5,13,17,19],keyword:[15,10],provid:[1,5,7,19,10,11,12,9,14,15,18],expr:14,tree:10,project:[0,14,2,6,4,5,19,10,12,9,16,18],matter:[5,18],minut:[14,19],untypeddomain:10,raf:14,spectrum:18,spent:18,raw:[10,14],manner:[13,5,19],increment:[5,14],seem:11,seek:16,seminar:5,wsgiscriptalia:12,is_nul:[14,18],program_part_of_fk:13,latter:[2,18],rome:18,transmit:2,simplifi:18,part_of:13,though:[5,18,13,16,17,19],usernam:[15,10,11,19],object:[2,14,5,10,16,19],lexic:[2,10,6],regular:[2,14,6,5,10,13,16,17,18],letter:[13,2,16],tradit:13,don:[9,11,19,18],dob:[2,14],doc:10,flow:[0,14,2,6,4,5,10],doe:[2,14,4,19,10,12,13,16,17,18],bracket:[13,2,10],wildcard:[16,2,10,18],floa:17,unchang:[10,14],section:[2,14,4,5,7,12,13,16,17,18],dot:2,uphi:15,radix:10,opposit:13,syntax:[2,14,6,5,10,19,15,18],pia_cours:14,involv:18,acquir:13,menu:11,explain:13,configur:[0,3,11,12,13,19],apach:12,siev:[4,2,16,14,5],sugar:18,varieri:5,busi:[2,14,4,5,7,11,13,15,16,18],rich:18,"__call__":15,department_cod:[2,14,5,11,19,13,16,18],report:[5,7,10,13,16,18],datetimedomain:10,emb:5,voidsyntax:10,"public":[14,4,5,19,15,16,18],twice:[19,18],bad:10,administ:[13,18],serve_forev:[9,15],nul:[2,10],mandatori:[5,10,19,18],result:[2,14,4,5,10,15,16,18],respons:10,fail:10,best:[13,11],literalsyntax:10,said:16,heterogen:10,databas:[2,1,5,7,9,10,11,12,13,15,16,17,18,19],wikipedia:19,simplest:4,approach:5,attribut:[2,14,4,5,10,19,13,16,18],accord:[4,13],extend:[2,15],extens:[0,3,5,11,19,17,18],"__unicode__":10,protect:[12,19],lexicalrul:10,expos:[13,17],hashabl:10,howev:[2,17,18],against:[2,12,18,19],logic:[0,14,2,6,4,17,18],browser:[5,11,19,18],com:19,nchar:17,kwd:10,dougla:2,guid:[11,12],assum:[11,14,12,18,19],summar:[5,16],duplic:[5,17,18],liabil:[2,16,18],union:10,three:[2,10,19,13,16,17,18],pbusad:18,much:10,interest:11,basic:[9,2,14,18],quickli:18,ani:[2,14,5,9,10,11,12,13,17,18,19],lift:[5,10,18],ident:[16,2,10,5],"__basis__":10,servic:[2,5,11,18,9,19],properti:[13,10,18],dept:[5,16,18],calcul:[0,14,2,5,10,19,16,18],dashboard:5,publicli:18,printabl:10,have:[2,5,7,9,10,11,19,13,15,16,17,18],tabl:[0,1,2,4,5,9,11,12,13,14,15,16,17,18,19],toolkit:19,tediou:[5,18],sever:[13,5,19,18],datetim:[17,15,10,14],perform:[10,19],suggest:10,make:[2,4,5,10,19,15,18],format:[14,5,10,11,19,17,18],complex:[5,7,18,19],split:13,complet:[15,16,11,19,18],pick:5,rail:9,hand:[10,18],"_via_":13,rais:[10,14],refin:18,aka:17,techniqu:[2,18,5],thu:[13,16,19,18],qualit:5,client:[5,14],thi:[2,14,6,4,5,3,9,10,11,12,13,15,16,17,18,19],programm:[5,7,18],everyth:11,campu:[2,14,4,5,11,19,13,16,18],left:[2,14,5,10,16,17],identifi:[2,14,6,10,13,16,19],birth:14,just:[13,5,16,18],gtch:18,human:[2,14,4,5,13,15,18],yet:[5,10],languag:[4,5,7,18,6],easi:[5,15,14,18],help:[19,11,7,18],wright:14,save:[10,12],applic:[0,2,4,5,7,19,10,12,9,15,18],mayb:10,preserv:18,"223607e":14,background:[5,19],shadow:16,liftsyntax:10,specif:[14,5,7,10,11,19,13,15,17,18],gift:5,arbitrari:[2,14,5,18,17,19],manual:[0,8,7,11,9,15],numbersyntax:10,recordsyntax:10,underli:[13,5,14,18],right:[16,2,10,17,5],old:[2,14,4,5,11,13,18],negat:[2,14,18],interv:17,excerpt:10,intern:[2,10],flatten:18,item_typ:10,pain:5,transmiss:10,total:[13,10,18],cooper:14,subclass:10,multipli:[4,14],overcom:18,condit:[4,2,14,18,5],localhost:[15,11,19],core:[10,19,18],statistician:18,program_degree_ck:13,colleg:[2,14,4,5,11,13,15,16,18],repositori:9,peer:14,post:[13,5,14,18],plug:5,postgresql:[1,5,11,9,15,17,19],avg_credit:[2,16,14,18,5],literatur:5,commit:9,produc:[2,14,4,5,10,19,15,16,17,18],binary_doubl:17,civil:18,"float":[2,15,10,14,17],encod:[2,14,6,5,10,18],bound:18,down:[14,18],pair:[2,18],wrap:[13,5,10,17],storag:[13,5,14,19],east:18,accordingli:[12,18],git:9,fabric:18,wai:[2,1,4,5,11,19,18],support:[2,14,5,19,10,12,13,16,17,18],listsyntax:10,transform:[4,5,10],happi:11,avail:[1,10,11,19,15,16,18],width:10,reli:[13,9,19],editor:19,fraction:10,music:[2,14,4,5,15,16,18],constantli:11,lowest:2,head:[10,14],medium:14,form:[2,14,4,5,19,10,11,12,13,15,16,17,18],offer:[13,5,14,18],sqlalchemi:[5,11,19],"true":[2,14,4,5,10,19,13,16,17,18],analyst:[5,7,18],reset:[10,14],attr:19,maximum:[4,10,18],tell:18,mth:18,absenc:13,fundament:[5,18],unrel:18,emit:[15,10,14],trim:[14,18],classif:[14,19],featur:[5,19],"abstract":[5,10],exist:[14,5,11,12,13,16,18],ship:19,check:[13,17,10,14],assembl:5,encrypt:18,"6e1":14,when:[2,14,4,5,9,10,19,13,15,16,17,18],role:[2,12],test:[2,5,11,12,9,18],presum:11,node:[13,16,10,18],intend:[9,14],insensit:[2,14,19,18],intent:18,consid:[2,10,13,16,17,18],sql:[0,1,2,5,19,10,12,13,14,15,17,18],"15k":11,frozenomap:10,omega:14,bitbucket:7,"_id":19,furthermor:9,anywher:18,class_seq:[2,19],referencesyntax:10,tinyint:17,ignor:[10,14],time:[14,6,10,19,13,15,17,18],push:[9,10],concept:[4,18],chain:[10,18],skip:[10,18],global:[16,19],focus:5,functionsyntax:10,signific:10,customari:14,row:[2,14,5,11,19,15,18],value_typ:10,middl:18,depend:[2,1,4,10,11,14,16,17,18],graph:[4,13,16,18,6],decim:[2,14,10,15,16,17],readabl:[5,18],unpacksyntax:10,mathemat:[5,15,18],sociolog:5,must:[2,14,4,5,9,10,19,13,15,16,17,18],isinst:10,sourc:[15,1,19,3],string:[2,14,6,10,13,15,17,18],broadli:[5,18],uml:[2,5],brows:18,selectsyntax:10,administr:[13,0,5,18,3],level:[5,15,14,19,18],detachsyntax:10,gui:[2,5],iter:[15,10],magnet:18,item:[11,14],unsupport:10,larm:10,quick:[0,15,12],round:[2,14,18],prevent:19,work:[2,1,5,11,19,13,18],peek:10,sign:[2,10,18],groupsyntax:10,uastro:15,recorddomain:10,port:[10,11,12,9,15,19],procfil:9,appear:[2,16,12,18],current:[2,14,5,10,11,19,15,16,18],"271828e":[2,14,17],deriv:[13,16,10,19],gener:[2,14,4,5,7,10,11,19,13,15,16,17,18],satisfi:[2,14,4,5,10,13,18],address:[15,10,12,19],coincid:[2,14,10,13,16,17,19],teacher:5,box:[5,1],sqlite_gw:19,mysit:19,prepare_scan:10,prometheu:7,behav:10,semant:[5,10,14,6],regardless:18,extra:[5,10,14],tweak:[9,5,15,11,19],modul:[15,10,19],prefer:13,instal:[0,1,3,19,11,12,9,15,18],mobil:18,market:[4,2,18,5],httpd:15,memori:[11,19],univers:[13,5,18],visit:[5,14,7,19],todai:[2,14],mssql:[17,15,1,19],connector:18,live:19,criteria:[5,18],key_typ:10,scope:[0,14,2,6,4,5,16,18],edpol:5,reorder:[4,5,10,14],claus:5,finit:[10,17],visual:[13,11,19,18],templat:[9,14,18],examin:[17,18],effort:5,easiest:1,behalf:18,religi:18,dburi:[9,19],prepar:13,uniqu:[2,14,4,10,19,13,16,18],descriptor:10,can:[2,14,5,9,10,11,19,13,15,16,18],inadequ:[5,12],purpos:[13,5],nearest:14,encapsul:5,stream:10,agent:[11,18],topic:[13,5],critic:[13,5,18],occur:14,alwai:[13,2,10,19,18],differenti:13,multipl:[2,18,5],write:[14,5,7,10,19,13,18],urlquot:10,parameter:18,map:[5,10,18,16,17,19],product:[14,5,7,10,12,18],max:[4,5,14,18],clone:10,usabl:[5,18],mai:[2,14,4,5,19,10,12,13,16,18],underscor:[13,2,16,10],data:[0,14,2,6,4,5,7,19,10,12,13,16,17,18],course_by_credit:16,classroom:[5,14],clonabl:10,practic:[13,5,14,12,18],eng:[2,14,4,5,11,13,18],explicit:[5,16,11,19],predic:[5,14,18],inform:[14,5,7,9,11,12,13,18,19],"switch":[14,18],cannot:[2,14,10,12,13,15,17,18],combin:[4,5,17,18],basket:14,smalldatetim:17,graduat:[5,18],still:[2,10,19],ieee:[10,17],dynam:16,entiti:[2,4,5,19,10,12,13,16,17,18],ethic:[2,16,18],conjunct:18,group:[2,7,18,5],concis:18,polici:[5,18],platform:[9,1],requir:[2,5,9,10,19,13,15,17,18],curli:2,embedd:5,non:[2,14,5,10,19,13,18],synopsi:[8,7],profession:5,initi:[14,4,5,9,10,13,17],gatewai:[5,12,19],aesthet:5,now:[9,14,18],discuss:[4,6],nor:2,introduct:[13,5,14,18],term:13,name:[0,14,2,6,4,5,10,11,19,13,15,16,17,18],revers:[13,19],separ:[2,14,5,10,19,13,17,18],full_nam:19,projectsyntax:10,stockhold:[2,16,18],domain:[14,5,10,19,13,15,16,18],replac:[2,14,5,9,10,11,13,19],individu:[13,2,15,18],continu:[14,5,13,15,16,18],year:[2,14,19],operand:[16,17,10,14,18],happen:18,shown:18,hexdecim:2,space:[11,14,19,18],profil:[2,10,5],internet:18,formula:17,correct:5,migrat:9,runtimeerror:10,ajax:19,orm:13,theori:[13,5,18],org:[1,5,7,11,19,14,18],care:[5,18],is_symbol:10,thing:5,place:[14,12,19],nonprofit:[2,16,18],principl:[2,18,5],imposs:12,frequent:5,first:[2,14,5,9,10,12,13,16,18,19],origin:[14,5,10,13,16,19],sanchez:14,reimplement:10,directli:[5,10,13,15,17,18],carri:5,onc:[9,10,18],submit:[7,19],trunc_month:19,oppos:[4,13,16,17],school_pk:13,open:[9,19,18],predefin:[16,10,17],size:10,given:[14,4,5,10,19,13,18],convent:[10,14,19,18],engineerror:10,heroku:[9,3],add_token:10,is_hard:10,averag:[5,16,14,18],handbook:[0,11],conveni:[16,14],especi:18,season:2,copi:11,specifi:[2,14,5,9,10,11,19,13,15,17,18],enclos:[13,2,10,17,19],mostli:16,holder:19,than:[2,14,4,5,10,19,13,18],serv:[5,7,10,11,13,16,19],mysql:[1,5,11,15,17,19],sophis:5,course_with_credit:16,balanc:18,were:10,posit:[16,2,15,10,18],seri:19,pre:7,fork:[5,14,18,11],sai:[13,9],argument:[2,14,4,7,10,19,15,16,17,18],dash:2,notimplementederror:10,exclud:[19,18],unadorn:13,engin:[2,14,4,5,19,10,11,12,13,15,16,18],squar:[10,14],note:[14,11,19,13,16,17,18],take:[14,4,5,10,11,19,16,17,18],channel:2,begin:[13,5,10,14,19],sure:[19,18],nvarchar2:17,normal:[13,10,14,19],buffer:10,paid:18,exclus:14,school_name_uk:13,freshman:18,beth:14,renam:[13,19],adopt:13,drive:11,runtim:10,senior:18,show:[13,5,16,18],avg_over_3:5,curlei:18,permiss:[10,12],threshold:19,remote_gw:19,contemporari:5,ground:5,xml:[5,14,18],onli:[2,14,4,5,19,10,12,13,16,17,18],explicitli:[17,19,18],transact:[19,18],activ:18,behind:12,robl:2,dict:10,thompson:14,variou:[1,3,4,5,11,13,18],get:[0,2,5,10,11,19,15,18],sss:17,ssl:12,ssn:19,ssh:9,gem:9,truli:5,borrow:13,intrins:17,where:[2,14,5,9,10,19,13,16,18],wiki:19,kernel:[4,16,14],containerdomain:10,reserv:[10,18],calendar:17,infinit:10,review:[5,14,18],enumer:[17,10,14],label:[5,10,11,19],enough:5,between:[2,14,5,19,12,13,16,18],"import":[9,5,15,10,12],islam:18,across:[5,16,18],parent:[5,18],marvin:2,screen:5,come:[16,11],audienc:5,tutori:[0,11,18,5],psycopg2:[9,15],improv:13,among:[13,2,14,5],aresubclass:10,overview:[0,14,11,5],inspir:18,period:[13,2,18,5],dispatch:9,cancel:11,student_id:19,mark:[13,10,19,18],workshop:[14,18],valueerror:10,program_school_fk:13,emphas:13,resolut:[13,16,6],rubi:9,add_rul:10,repres:[2,5,10,13,17,18],former:[2,18],those:[2,4,5,9,10,13,16,18],"case":[2,14,4,5,10,19,13,16,17,18],plugin:[9,15,11,19],trick:19,cast:[17,14,18],invok:19,ctrl:11,henc:[11,14,18],program_via_part_of:[13,19],"__init__":10,develop:[9,2,14,18,5],doctor:15,obfusc:5,alphabet:[2,18],medic:18,null_if:14,econom:[4,2,13,18,5],same:[2,14,5,10,11,19,13,16,17,18],binari:[2,1,3,10,13,16,17],html:[5,14,18,11],document:[0,6,14,12,3],upolisci:18,finish:10,webserv:[9,11],nest:[5,16,19,18],assist:11,driver:[15,10,11],driven:18,mani:[5,19,18],program_cod:14,appropri:[10,19],choos:18,macro:5,htsq:14,without:[5,10,11,18,17,19],model:[0,6,5,10,11,13,16],clob:17,excel:5,rest:[2,10],kill:11,aspect:[2,16,19,18,5],mosfet:18,death:14,hint:18,except:[17,15,10,14,19],instrument:14,versa:17,real:17,read:[5,19,10,12,9,18],virginia:2,conflicterror:10,pop:10,amp:5,clone_typ:10,whitespac:[2,10,17],integ:[2,14,10,13,15,17,19],server:[1,5,7,9,10,11,12,13,15,17,19],either:[14,18,10,12,15,19],django_settings_modul:19,output:[2,14,4,5,19,15,17,18],manag:[2,1,4,5,9,15,18],yyyi:[17,10,14],verbal:5,ascend:[4,2,14,18],anomal:18,intact:19,slice:[4,2,14,5],definit:[5,19,18],honorarium:[14,4,5,15,16,18],evolv:5,exit:[10,11,19],inject:10,refer:[0,14,2,6,5,3,10,11,19,13,15,16,18],timeout:[11,19],power:18,internship:[2,16,18,5],immut:10,tupleof:10,acc:[2,14,4,5,11,13,16,18],comparison:[2,10,14,18],central:4,target_nam:19,degre:[2,5,18,13,16,19],srv:9,processor:[2,4,18,16,17,19],routin:[13,5,11,7,19],elementari:5,astro_cours:14,strip:[13,10,14],gart:[13,15,18],your:[9,5,11,18],log:9,aren:[5,11,18],if_nul:14,start:[0,14,2,6,4,5,7,9,10,11,12,13,15,16,18,19],interfac:[5,7,19],lot:11,laboratori:[14,18],tupl:[15,10],num_school:19,categor:13,pull:10,possibl:[2,5,18,12,17,19],"default":[2,14,9,10,11,19,13,17,18],electromagnet:18,start_respons:15,embed:5,connect:[13,15,10,11,19],scanner:[2,10],creat:[14,5,9,10,11,12,13,15,18,19],certain:2,deep:5,file:[9,10,11,12,19],fill:13,incorrect:14,denot:[13,2,16,14],prepend:2,field:[15,10,14,19,18],valid:[4,2,10,14,17],you:[1,5,19,11,12,9,15,16,18],architectur:5,poor:5,sequenc:[2,4,5,10,13,16,17,19],symbol:[16,2,10,14,19],docstr:10,pool:19,reduc:18,directori:[9,12,19],invest:5,descript:[2,14,5,7,13,8,16,17,18],linksyntax:10,potenti:[10,12],escap:10,represent:10,all:[2,1,4,5,10,11,19,13,14,15,16,17,18],dist:11,illustr:18,forbidden:[10,19],lack:[5,10,14,12,18],dollar:[2,18],month:[14,19],scalar:[6,4,5,18,13,19],correl:[5,18],abil:2,semest:11,children:5,content:[2,1,4,5,9,11,12,13,14,15,16,17,18,19],codasyl:13,tail:14,program:[2,14,5,11,19,13,15,16,18],introduc:[5,18],liter:[2,14,6,5,7,10,15,17,18],straightforward:[13,18],fals:[2,14,10,11,19,13,16,17,18],htraf:14,subcommand:19,util:[13,2,10],mechan:[3,5,18,12,16,19],veri:[13,9],is_act:14,site_request_forgeri:19,list:[2,14,5,7,10,11,19,15,16,17,18],emul:17,adjust:19,small:5,past:9,sophomor:18,zero:[17,10,14],filtersyntax:10,design:[2,14,4,5,7,11,12,13,15,16,18],pass:[5,11,12,15,16,19],further:[5,18],arthi:[2,14,4,11,13,18],what:[0,14,5,13,17,18],sub:[4,19],sum:[5,14,18],abl:[5,17],overload:[12,18],delet:[10,19],version:[2,11,14,7,5],is_mandatori:19,uchem:15,alphons:14,method:[5,15,10],unlabel:19,full:13,themselv:[13,5],students_by_year:19,sophist:18,is_junk:10,shouldn:5,capacitor:18,standard:[2,18],modifi:[16,18],dictof:10,valu:[2,14,4,5,10,11,19,13,15,16,17,18],trunc:[14,18],search:[0,16,14,18],prior:18,amount:19,social:5,introductori:18,quotient:16,via:[2,19,18,5],depart:[2,14,4,5,11,19,13,15,16,18],curat:5,deprec:10,famili:17,decrement:14,coercion:17,select:[2,14,4,5,10,12,13,16,18],hexadecim:18,distinct:[4,5,13,14,18],liber:5,regist:[9,19],two:[2,14,4,5,10,19,13,16,17,18],psci:18,taken:[5,19,18],more:[2,4,5,7,19,11,12,13,16,17,18],desir:[10,18],particular:[5,17,12],known:10,cach:[9,10,12,19],skipsyntax:10,none:[17,15,10,14],hour:[5,14,17],outlin:9,histori:[2,14,5,11,13,16,18],remain:[13,9,19],learn:5,identifiersyntax:10,def:10,prompt:[9,11,19],scan:[2,10],challeng:5,share:[5,7,9,19,13,16,18],accept:[2,14,4,10,11,17,19],tabular:[4,14],explor:[11,18],parsabl:5,cours:[2,14,4,5,11,19,13,16,18],goal:14,secur:[12,18,3],rather:[4,5,10],anoth:[19,10,11,12,16,17,18],spreadsheet:18,divis:[2,14],csv:[2,14,5,11,19,16,18],simpl:[5,10,18],css:19,isn:[5,14,18,11],prefixsyntax:10,resourc:[5,12,18,19],referenc:[11,18],algebra:[5,18],variant:[14,18],reflect:[5,14],catalog:[11,19],inquiri:[5,7],unlink:[5,18],associ:[2,14,4,5,19,10,11,12,13,16,17,18],circumst:10,"short":[14,19,18],footer:14,postfix:2,confus:[2,18],author:5,notfounderror:10,django:[5,11,19],caus:[5,10,19],cx_oracl:15,trade:5,scientist:5,through:[11,18],hierarchi:18,paramet:[2,14,10,11,19,15,16,18],department_name_uk:13,exact:[11,10,14,18,17],formost:5,late:18,bypass:5,might:[5,11],alter:16,wouldn:18,good:[2,5],"return":[14,5,10,11,19,15,17,18],timestamp:17,framework:19,detach:[16,2,10],authent:[19,15,10,12,18],token:[2,10,19],found:[5,10,11,19,9,16,18],unicod:[16,2,15,10],truncat:[4,5,19,18],interleav:14,hard:[5,14],procedur:[2,16,18],heavi:[5,18],expect:[16,17,10,14,18],orient:[2,18,5],is_flow:10,unquot:[2,10,17],admiss:19,pipesyntax:10,research:[5,18],lexicalgrammar:10,print:[5,15,14],occurr:[2,16],confidenti:19,proxi:12,advanc:5,differ:[2,14,4,5,10,11,19,13,16,17,18],asc:[5,14],cooki:19,reason:10,base:[5,10,11,18,16,19],put:[19,12,18],teach:[13,5,18],basi:[5,10],pyyaml:9,assignsyntax:10,integerdomain:10,omit:[13,10,17,19],perhap:[11,19,18],assign:[2,14,10,19,13,15,16,17,18],omap:10,feed:19,major:9,obviou:[5,18],upper:[2,14],exchang:18,misc:11,number:[2,14,4,5,10,11,19,13,16,17,18],sometim:18,done:[9,5,16,19,18],construct:[13,16,14,19,18],blank:[10,18],stabl:5,bioengin:[5,14,18,11],miss:14,guess:[19,18],exponenti:[2,10,17],interact:19,least:[13,2,14],statement:11,store:[13,10],schema:[5,9,11,18,13,19],adher:5,xmln:[5,14],option:[2,14,5,7,10,11,19,15,17,18],relationship:[13,11,18],selector:[2,14,19,18,5],getter:10,pars:[2,10],consult:16,kind:4,remov:[4,10,19],reus:19,str:10,consumpt:18,toward:14,danc:[2,14,4,5,15,16,18],bouchard:14,comput:[13,2,16,18,5],packag:[1,3,10,18,15,19],"null":[2,14,5,10,19,13,16,18],remote_db:19,equival:[2,12,18],self:[13,5,10,14,19],lit:18,also:[2,14,3,5,7,10,11,19,13,8,15,16,17,18],build:[5,15,1,18],brace:18,tool:[14,5,7,11,9,18],run_htsql:9,distribut:[9,19],previou:[4,18],inexact:17,most:[13,17,10,14,18],plai:[2,5],plan:12,myisam:11,alpha:14,autolimit:[15,11,19],cover:[10,18],setof:10,part:[14,5,10,19,13,16,17,18],microsoft:[5,15,1,19],wsgi:[15,10,12],alphanumer:[13,2,16,10,19],find:[4,16,19],access:[2,5,19,10,11,12,13,15,18],coerc:14,execut:[2,14,5,18,15,19],equiti:[2,16,18],solut:[5,18],quel:14,factor:5,unus:10,express:[0,14,2,6,4,5,10,19,13,15,16,17,18],composesyntax:10,nativ:[17,10,14,18],is_tot:10,synthes:13,antiqu:18,common:5,certif:[5,18],set:[2,14,4,5,19,10,11,12,13,15,17,18],art:[2,14,4,5,11,13,15,16,18],dump:10,mutabl:10,see:[2,14,7,19,11,12,8,18],arg:10,close:13,unqualifi:14,arm:10,umath:18,expert:[5,18],bridg:[13,11],httperror:10,"15th":18,altern:[2,15,10,18],latin:[2,16],syntact:[2,5],xslt:[5,14],numer:[2,14,6,10,17,18],complement:[2,16,14,18],javascript:[14,19],lowercas:10,pymssql:15,popul:13,prepare_pars:10,both:[14,19,11,12,13,16,18],subtitl:7,department_pk:13,foreign:[5,9,11,18,13,19],roman:18,undetermin:10,context:[2,5,10,11,13,16,17],whole:14,load:[9,11,19],childhood:5,point:[9,15,10,17,19],arbitrarili:5,header:[11,14,19],linux:[11,1],along:[9,18],backend:[2,1,19,14,17],becom:[13,10,19],floatsyntax:10,due:11,empti:[2,14,4,10,9,16,17,18],implicit:[5,17],secret:[11,19],allow_cs_writ:19,clariti:[13,18],convert:[2,14,4,5,10,15,17,18],gap:13,floatdomain:10,understand:[5,14,18],demand:17,talent:5,repetit:5,instructor:18,look:[9,5],formatt:[11,14,18,6],"while":[5,11,18,16,17,19],behavior:14,pacc:18,everyon:11,loop:10,table_nam:[11,19],readi:[9,10,18],itself:[2,5,10,19,13,16,18],costli:18,centuri:18,decor:[2,14,6,5,10,18],listof:10,grant:18,belong:[13,18],shorten:18,tched:[5,18],hester:2,decod:[2,10],sqlite3:15,mod_wsgi:12,conflict:[13,10],higher:2,development:5,is_exact:10,optim:5,program_title_uk:13,syn:10,ctl:[0,8,2,7,11,19],temporari:17,user:[2,4,5,19,10,11,12,13,15,17,18],enumdomain:10,tsql:14,stack:[9,16],built:[2,14,19,11,12,15,16,18],subpackag:10,travers:[4,14],older:19,entri:[15,10,19],chemistri:15,parenthes:[2,10,19],person:12,textdomain:10,expens:[11,19],academ:18,lowel:14,stringsyntax:10,shape:13,glite:18,operatorsyntax:10,cut:14,shortcut:[14,18],forbid:[2,19],input:[2,14,4,10,16,17],subsequ:[16,10,14],varchar:[13,17],transpar:[5,18],intuit:[5,18],item_domain:10,nginx:9,bit:[10,17,18],formal:[2,10,18],semi:18,docutil:7,signal:10,resolv:[13,16],elaps:11,collect:[13,16,10,19],api:[2,10],avg_grad:19,often:[13,5,16,18],spring:[2,14],some:[2,14,4,5,9,10,11,19,13,16,17,18],back:[15,16],sampl:10,"15t20":14,scale:10,school_cod:[2,14,4,5,11,19,13,15,18],per:18,substitut:18,internalservererror:10,larg:[2,19],slash:18,datebas:19,anna:14,machin:[5,19],run:[0,1,3,4,7,11,9,14,15,16,19],simple_serv:[9,15],step:[9,10],prerequisit:9,wget:11,subtract:[2,14],impos:13,constraint:[13,5,9,19,6],block:[10,19,18],emphasi:18,listdomain:10,within:18,ensur:19,nclob:17,inclus:5,span:7,fictiti:5,fledg:13,stylesheet:14,"long":[15,10,19],custom:[9,5,19],adjac:5,arithmet:[2,14,18,17],includ:[2,14,5,10,11,19,13,18],forward:18,xpath:5,properli:11,link:[2,14,4,5,9,10,11,19,13,16,18],translat:[5,10,11,19,13,16,17,18],atom:[2,17],line:[2,14,7,19,10,11,12,18],junior:18,concaten:14,utf:[2,14,18,5],consist:[4,2,16,18,5],planet:14,tightli:18,postgr:9,highlight:[5,19],similar:[10,19],curv:5,constant:[16,18],alumni:18,parser:10,doesn:[13,5,11,18],lectur:[14,18],"char":[13,17],titl:[2,14,5,19,13,15,16,18],invalid:[17,19],meaning:[2,17,5],forbiddenerror:10,lang:18,vice:17,stuctur:10,virtualhost:12,notion:13,reimagin:5,directsyntax:10,leak:[12,19],far:18,hello:[9,5,14],clone_to:10,pluggabl:18,code:[2,14,4,5,10,11,19,13,15,16,18],partial:[13,10],queri:[2,14,6,4,5,7,9,10,11,19,13,15,16,17,18],collectsyntax:10,legal:[2,16,18],edu:[2,14,4,5,11,13,18],privat:18,sensit:[5,14,18,11],lower:14,htsql:[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19],wave:18,implicitli:[14,18],tri:18,gender:14,"try":11,radio:18,pleas:[5,11,7],malici:19,visualis:13,natur:[2,14,4,5,13,15,16,18],focu:13,download:[11,15,1,12],click:18,compat:14,index:[0,10,14],compar:[13,16,18],cell:18,deduc:16,len:10,bodi:18,bachelor:[13,5,15,18],let:[13,11,18],extrema:14,sinc:[14,5,9,10,19,13,17,18],great:11,locatesyntax:10,survei:18,convers:[17,10,14,18,6],autoimport:10,technolog:[13,5],uhist:[13,15,14,18],earli:5,typic:[9,5,16,17,18],weakvaluedictionari:10,egypt:18,appli:[2,14,4,10,19,13,17,18],app:9,disciplin:13,inequ:18,"boolean":[14,6,10,13,15,17,18],booleandomain:10,cloud:[9,0,3],baccalaur:[13,5,18],from:[2,1,3,4,5,9,10,11,12,13,14,15,16,17,18,19],commun:[5,11],over_3:5,doubl:[2,17,18],part_of_cod:[13,19,18],next:[2,4,5,9,10,12,13,16,18],websit:19,few:11,jonathan:14,sort:[2,14,4,5,10,19,18],econ:[4,2,18,5],is_vari:10,badrequesterror:10,train:18,starter:18,account:[2,14,4,5,9,11,19,13,16,18],retriev:[13,5],augment:[16,18],alia:14,datedomain:10,drill:18,meet:5,mesopotamia:18,control:[5,14,11],sqlite:[1,5,10,11,15,17,19],tap:9,quickstart:9,process:[14,5,10,11,9,18],carrol:14,high:[5,18],tag:[14,19],tab:[11,14],serial:[2,10,14],prece:10,nvarchar:17,varchar2:17,textbuff:10,instead:[13,5,18],ltrim:14,overridden:18,singular:[4,5,13,11,18],attent:18,philosophi:[0,5],physic:[13,15,18],instructor_cod:2,bind:18,correspond:[14,6,5,10,19,13,16,17,18],element:[2,14,4,10,13,17],issu:[11,7],bursar:[11,14,18],allow:[2,14,5,10,13,15,16,19],american:18,solv:[5,7],move:10,comma:[2,10,14,19,18],filelik:10,perfect:14,outer:[5,10,14],reilli:[2,17],underpin:18,tsv:14,therefor:[13,2,17],tsq:[2,14],junction:18,greater:[2,14,18],handl:[5,14,19,18],entitydomain:10,innermost:17,handi:11,literaci:18,mention:18,front:10,preschool:5,auto:11,edit:19,ustudio:[13,15,14,18],polit:[5,18],persia:18,to_liter:10,subset:18,societi:5,meta:[5,10,14,19,11],"static":[10,14,19,18],our:[0,14,5,11,18,19],special:[2,14,6,5,10,16,17,18],out:[5,1,19],variabl:10,course_dept_fk:13,pgsql:[1,5,10,11,9,15,17,19],suitabl:18,rel:[5,18],isfinit:10,plural:[14,4,5,11,13,18],program_pk:13,math:18,statist:[5,18],clarifi:18,workflow:5,dictionari:[15,10,19],likelihood:18,indent:10,financi:[13,2,16,18,5],could:[2,14,5,9,10,11,19,13,15,16,17,18],ask:[5,18],num_prog_by_degre:16,keep:[5,11,19],counterpart:17,length:[2,10,14],enforc:13,outsid:14,organiz:5,south:[2,14,4,5,11,13,16,18],softwar:[2,5],omapof:10,isbl:14,qualiti:18,date:[14,6,10,19,13,15,16,17,18],facil:19,toposort:10,forgeri:19,ancient:18,unknown:[16,10,18],system:[14,5,9,11,19,13,15,18],messag:[10,19],attach:[16,2,10,11],attack:[12,19],assort:10,"final":[17,18],prone:5,shell:[9,11,14,7,19],methodolog:5,transistor:18,rtrim:14,exactli:[13,18],count_cours:5,structur:[2,14,6,10,19,13,18],charact:[2,14,10,19,13,16,17,18],collaps:18,psycholog:5,clearli:5,coher:18,cohes:5,need:[1,5,7,9,11,19,13,15,18],min:[14,18],preced:[16,2,10,18],which:[2,14,4,5,19,10,11,12,13,15,16,18],conclud:[14,6],linda:14,singl:[2,14,4,5,9,10,19,13,17,18],analysi:[13,5,10,18],deploy:[9,12,3],who:[5,7,18],oracl:[5,15,1,19,17],discov:19,rigor:5,deploi:[13,0,12,3],segment:[2,15,10,5],why:[0,5],homogen:[4,13,10,17],url:[9,5,12,18,19],request:[2,5,10,19,9,15,18],uri:[15,10,11,19,18],pipe:10,determin:[2,5,9,13,15,16,17,19],occasion:18,"_cach":10,fact:[13,10],dba:18,text:[14,10,11,19,13,17,18],verbos:5,add_sign:10,bring:11,ssssss:10,portfolio:14,trivial:[13,18],locat:[2,10,19,5],applysyntax:10,should:[2,14,5,10,11,12,17],smallest:14,suppos:18,local:[9,15,16,11,19],hope:18,meant:14,notat:[2,14,10,19,13,16,17,18],familiar:11,csrf:19,increas:[4,18],enabl:[5,11,19],organ:[13,2,16,18],whether:[4,13,10],stuff:5,integr:[2,16,12,18,19],contain:[2,14,3,5,10,11,19,13,16,17,18],parsestream:10,temporarili:10,statu:10,error:[14,5,10,11,19,16,17,18],pattern:[2,10,19,5],boundari:[19,18],make_serv:[9,15],written:[2,14,18,9,17,19],progress:19,neither:2,induc:[13,16],kei:[5,9,10,11,19,13,15,18],gilmor:14,otherwis:[14,5,10,11,13,16,19],addit:[2,1,18,13,15,19],weav:14,career:[14,18],equal:[2,10,14,18,17],wsgiref:[9,15],etc:[4,5,13,17,18],admit:[2,14],instanc:[14,6,5,10,19,13,15,17,18],subclassof:10,department_school_fk:13,etl:19,comment:10,walk:11,distinguish:[16,18],c14n:19,respect:[2,14,4,19,10,12,13,16,17,18],admin:15,quit:[11,19],divid:14,yaml:[9,11,19],addition:18,compos:[2,19,18,5],compon:[13,5,14,19,17],json:[5,14,19,18,11],besid:[5,11],treat:[2,16,14,18,17],electr:18,deliber:18,untyp:[17,15,14],cor:[11,19],togeth:[16,19,18],last:[13,14,18],numberdomain:10,present:[13,17,14,18],plain:[16,14,18],defin:[2,14,5,9,10,19,13,16,17,18],intranet:18,cachedproperti:10,observ:[13,10,18],layer:[13,12,18],customiz:14,almost:[5,19],demo:[9,5,11,19,18],site:[12,19],bigint:17,avg:[2,14,5,18,16,19],scienc:[2,14,4,5,13,15,18],denial:19,parti:19,referenti:[13,16,14,19],cross:[11,19,18],python:[0,2,5,10,9,15,19],largest:14,decimalsyntax:10,infer:[13,2,15,17,5],difficult:5,oneof:10,http:[2,1,5,7,19,10,11,12,14,15,18],course_no:2,again:18,upon:[9,5,11,18],effect:[16,19],dai:[17,14,18],phy:18,student:[2,14,11,19,13,15,18],collat:18,codemirror:19,expand:[19,18],audit:[2,16,18,5],well:[14,19,18],thought:18,exampl:[2,14,4,5,11,19,13,15,16,17,18],command:[2,14,7,11,19,9,8,18],english:18,usual:[2,19,18],unari:[16,10],less:[17,14,18],obtain:18,filedb:19,web:[2,5,7,19,11,12,9,18],jinja:5,nullabl:11,hall:2,script:[12,19],add:[2,14,5,10,11,12,9,16,19],lookup:[5,16],match:[9,16,10,19,18],allow_cs_read:19,num_prog:[2,16,14],punctuat:[2,10],realiz:[5,19],cedar:9,know:[5,14,18],press:11,password:[15,10,11,19],insert:[10,19],like:[14,5,10,11,9,18],lost:5,tinytext:17,page:[0,14,19,8,11],signular:11,voiddomain:10,"export":9,superclass:10,proper:[17,12],librari:[2,15,5],leaf:10,lead:[10,14,19],broad:18,avoid:[5,18],octet:[2,10,18],outgo:16,leav:[4,10],mode:[2,18],trim_doc:10,accident:[5,7,18,19],"enum":[17,6],usag:[3,7,19,10,12,9,18],host:[10,11,12,9,15,19],nutshel:[0,5],obei:[2,14,17],although:[17,11,18],offset:18,unprint:18,about:[11,14,19,18],actual:[13,17,10,14],world:[9,5,18],column:[2,14,5,11,19,13,15,18],rarm:10,lifecycl:14,constructor:[15,10],discard:9,comp:[2,18,5],conflat:5,artefact:10,own:18,devic:18,easy_instal:15,automat:[14,5,11,18,15,19],merg:19,syntaxgrammar:10,"var":18,attachsyntax:10,"function":[0,14,2,6,4,5,10,11,19,15,16,17,18],north:[2,14,4,5,13,16,18],astro:[2,14,4,11,13,18],prehistor:18,overflow:14,highest:2,bug:[8,7],count:[2,14,4,5,11,19,15,16,17,18],succe:10,sex_cod:14,wish:[5,12,18],displai:[4,5,7,10,19,17,18],record:[2,14,4,5,10,19,15,17,18],below:[2,12,18],limit:[14,4,5,11,19,9,15,18],"27htsql":2,problem:[5,14,18],subordin:5,evalu:[2,14,4,5,10,16],"int":[9,15,10,17],pia:14,implement:[5,10,11,19],pip:[15,1],percent:[2,18],detail:[2,16,14,19,11],virtual:18,other:[2,1,5,7,10,11,19,13,14,16,17,18],bool:[15,10,17],futur:[2,19],branch:14,varieti:[5,17],stat:18,repeat:[5,18],star:14,"class":[2,4,7,10,13,15,16,19],astronomi:[2,14,5,11,13,15,16,18],permissionerror:10,sheri:14,stai:5,appoint:[11,19],eof:10,scientif:14,rule:[14,6,5,10,13,16,17,18],portion:9},objtypes:{"0":"py:module","1":"py:method","2":"py:class","3":"py:function","4":"py:exception","5":"py:attribute","6":"py:classmethod","7":"py:staticmethod"},titles:["HTSQL Documentation","Installing HTSQL","HTSQL Grammar","HTSQL Administration","Data Flows","Overview of HTSQL","HTSQL Reference","<tt class=\"docutils literal\"><span class=\"pre\">htsql-ctl</span></tt>","Manual Pages","HTSQL in the Cloud","Internal API","HTSQL Handbook","Deploying HTSQL","Data Model","Functions and Operators","HTSQL in Python Applications","Naming Scopes","Data Types","HTSQL Tutorial","Using and Configuring HTSQL"],objnames:{"0":["py","module","Python module"],"1":["py","method","Python method"],"2":["py","class","Python class"],"3":["py","function","Python function"],"4":["py","exception","Python exception"],"5":["py","attribute","Python attribute"],"6":["py","classmethod","Python class method"],"7":["py","staticmethod","Python static method"]},filenames:["index","admin/install","ref/syntax","admin/index","ref/flows","overview","ref/index","man/htsql-ctl.1","man/index","admin/cloud","api","handbook","admin/deploy","ref/model","ref/functions","embed","ref/scopes","ref/types","tutorial","admin/usage"]}) | PypiClean |
/Lmgeo-1.1.0.tar.gz/Lmgeo-1.1.0/lmgeo/formats/asciigrid.py | from .const import Const, constants as const
import os.path
import array
import pycrs
from .raster import Raster
from .gridenvelope2d import GridEnvelope2D;
from warnings import warn
__author__ = "Steven B. Hoek"
class AsciiGrid(Raster, GridEnvelope2D):
"""A raster represented by an ASCII file, with extension 'asc'"""
# Data attributes - assign some dummy values for the mean time
_const = None
name = ""
folder = os.getcwd();
nodatavalue = -9999.0;
datatype = const.FLOAT;
dataformat='f'
datafile = None;
currow = 0;
# Private attributes
__digitspercell = 7;
def __init__(self, filepath='', *datatype):
# Check input
if filepath == '':
print('File path cannot be an empty string (method __init__).')
# Module wide constants
self._const = Const()
self._const.FILEXT = "asc";
self._const.MAXDIGITSPERCELL = 8 # TODO this is hardcoded - change this
self.name = "dummy." + self._const.FILEXT;
# Initialise further
Raster.__init__(self, filepath)
GridEnvelope2D.__init__(self, 1, 1, 0.0, 0.0, 0.1, 0.1)
# Retrieve the name from the filepath and assign - incl. extension
self.name = os.path.basename(filepath);
# Also derive the folder
self.folder = os.path.dirname(filepath);
# Finally set the datatype
if len(datatype) > 0:
if (datatype[0] == const.INTEGER):
self.datatype = const.INTEGER;
self.dataformat = 'i'
else:
self.datatype = const.FLOAT;
def open(self, mode, ncols=1, nrows=1, xll=0.0, yll=0.0, cellsize=100.0, nodatavalue=-9999.0):
# Initialise
super(AsciiGrid, self).open(mode);
# If file does not exist and mode[0] = 'w', create it!
self._mode = mode[0];
if (mode[0] == 'w') and (not self.file_exists):
self.datafile = open(os.path.join(self.folder, self.name), 'w');
GridEnvelope2D.__init__(self, ncols, nrows, xll, yll, cellsize, cellsize);
self.cellsize = cellsize;
self.nodatavalue = nodatavalue;
self.writeheader();
self.write_crs();
return True;
else:
# Open the file
if self.file_exists:
self.datafile = open(os.path.join(self.folder, self.name), mode[0]);
if (mode[0] == 'w'):
# Assign the data attributes
self.ncols = ncols;
self.nrows = nrows;
self.xll = xll;
self.yll = yll;
self.cellsize = cellsize;
self.nodatavalue = nodatavalue;
self.writeheader();
else:
# File is open - retrieve the data attributes from the header of the file
self.readheader();
self.read_crs()
# Also find out how many digits per cell were used - assume it's constant
pos = self.datafile.tell();
line = self.datafile.readline();
self.__digitspercell = ((1 + len(line)) / self.ncols) - 1;
self.datafile.seek(pos); # return to first line with data
GridEnvelope2D.__init__(self, self.ncols, self.nrows, self.xll, self.yll, self.cellsize, self.cellsize);
return True;
else: return False;
def readheader(self):
# Assume that the file is open; read header of the file and assign all attributes
if (self.datafile != None):
# TODO: make this case-insensitive!
if (not self.datafile.closed):
hl = self.datafile.readline();
self.ncols = int(hl.replace(const.NCOLS, ''));
hl = self.datafile.readline();
self.nrows = int(hl.replace(const.NROWS, ''));
hl = self.datafile.readline();
self.xll = float(hl.replace(const.XLLCORNER.lower(), ''));
hl = self.datafile.readline();
self.yll = float(hl.replace(const.YLLCORNER.lower(), ''));
hl = self.datafile.readline();
pixsize = float(hl.replace(const.CELLSIZE, ''));
self.cellsize = pixsize;
self.dx = self.dy = pixsize;
hl = self.datafile.readline();
if (self.datatype == const.INTEGER):
self.nodatavalue = int(hl.replace(const.NODATA_VALUE, ''));
else:
self.nodatavalue = float(hl.replace(const.NODATA_VALUE, ''));
else:
msg = "File " + self.name + " not found in folder " + self.folder;
raise IOError(msg);
def read_crs(self):
if (self.datafile != None):
stem = os.path.splitext(self.name)[0]
fn = os.path.join(self.folder, stem + "." + const.PROJFILEXT)
if os.path.exists(fn):
self._crs = pycrs.load.from_file(fn)
def next(self, parseLine=True):
# Read the next row if possible, otherwise generate StopIteration
# Assume that the header lines have been read and are correct wrt. ncols and nrows
result = None;
try:
if (self.datafile != None):
if (not self.datafile.closed):
self.currow += 1;
if (self.currow > self.nrows):
raise StopIteration("Attempt to move beyond last row.");
# Allocate a new array with ncols of the right type
if (self.datatype == const.INTEGER):
result = array.array('l', self.ncols * [self.nodatavalue]);
else:
result = array.array('f', self.ncols * [self.nodatavalue]);
# Now fill the array - first translate whitespace into space
rawline = self.datafile.readline();
if parseLine:
i = 0;
for x in rawline.split():
if (i < self.ncols):
if (self.datatype == const.INTEGER):
result[i] = int(x);
else:
result[i] = float(x);
i = i + 1;
return result;
else: raise StopIteration("Attempt to read raster data from a closed file.");
else: raise StopIteration("Attempt to read raster data from an unassigned file.")
except StopIteration:
raise StopIteration;
except Exception as e:
raise Exception(e);
@staticmethod
def getFileExt(self):
return Raster.getDataFileExt()
def writeheader(self):
# Assume that the file is open; write header of the file with all attributes
if (self.datafile != None):
if (not self.datafile.closed):
try:
maxdigits = self._const.MAXDIGITSPERCELL + 1
self.datafile.write(const.NCOLS + " " + str(self.ncols).rjust(maxdigits) + "\n");
self.datafile.write(const.NROWS + " " + str(self.nrows).rjust(maxdigits) + "\n");
self.datafile.write(const.XLLCORNER.lower() + " " + str(self.xll).rjust(maxdigits) + "\n");
self.datafile.write(const.YLLCORNER.lower() + " " + str(self.yll).rjust(maxdigits) + "\n");
self.datafile.write(const.CELLSIZE + " " + str(self.cellsize).rjust(maxdigits) + "\n");
self.datafile.write(const.NODATA_VALUE + " " + str(self.nodatavalue).rjust(maxdigits) + "\n");
except Exception as e:
print(e);
msg = "Header lines could not be written to file " + self.name + " in folder " + self.folder;
raise IOError(msg);
def write_crs(self):
if (self.datafile != None):
stem = os.path.splitext(self.name)[0]
fn = os.path.join(self.folder, stem + "." + const.PROJFILEXT)
with open(fn, "w") as writer:
writer.write(self.crs.to_esri_wkt())
def writenext(self, sequence_with_data):
# Write the next line if possible, otherwise generate StopIteration
# We assume that exactly 1 row is included.
try:
if (self.datatype == const.INTEGER):
# TODO deal with numpy arrays if necessary
for k in range(0, self.ncols):
s = str(sequence_with_data[k]).rjust(self._const.MAXDIGITSPERCELL + 1);
self.datafile.write(s);
else:
totalwidth = self._const.MAXDIGITSPERCELL - 1
fmtstr = "{:" + str(totalwidth) + ".3f}" # TODO format is hardcoded - change this!
for k in range(0, self.ncols):
s = fmtstr.format(sequence_with_data[k]).rjust(self._const.MAXDIGITSPERCELL + 1);
self.datafile.write(s);
return self.datafile.write("\n");
except Exception as e:
print(e);
raise StopIteration
def flush(self):
self.datafile.flush();
def reset(self):
self.datafile.seek(0);
if (self._mode[0] == 'r'):
self.readheader();
super(AsciiGrid, self).reset()
def get_value(self, i, k):
# Return the wanted value
for _ in range(0, i): self.next(False)
line = self.next()
self.reset()
return line[int(k)]
def get_type(self):
if self.dataformat == 'i':
return int
else:
return float
@GridEnvelope2D.dx.setter
def dx(self, dx):
# We assume that the cellsize was already set
if abs(dx - self._cellsize) > const.epsilon:
warn("Given the *.asc file format, class Asciigrid must have 1 pixel size for the horizontal and the vertical!")
GridEnvelope2D.dx.fset(self, dx)
@GridEnvelope2D.dy.setter
def dy(self, dy):
# We assume that the cellsize was already set
if abs(dy - self._cellsize) > const.epsilon:
warn("Given the *.asc file format, class Asciigrid must have 1 pixel size for the horizontal and the vertical!")
GridEnvelope2D.dx.fset(self, dy) | PypiClean |
/Misago-0.36.1.tar.gz/Misago-0.36.1/misago/threads/serializers/thread.py | from math import ceil
from django.urls import reverse
from rest_framework import serializers
from ...categories.serializers import CategorySerializer
from ...core.serializers import MutableFields
from ...notifications.threads import ThreadNotifications
from ..models import Thread
from .poll import PollSerializer
from .threadparticipant import ThreadParticipantSerializer
__all__ = ["ThreadSerializer", "PrivateThreadSerializer", "ThreadsListSerializer"]
BasicCategorySerializer = CategorySerializer.subset_fields(
"id",
"parent",
"name",
"short_name",
"color",
"description",
"is_closed",
"css_class",
"level",
"lft",
"rght",
"is_read",
"url",
)
class ThreadSerializer(serializers.ModelSerializer, MutableFields):
category = BasicCategorySerializer(many=False, read_only=True)
acl = serializers.SerializerMethodField()
has_unapproved_posts = serializers.SerializerMethodField()
is_new = serializers.SerializerMethodField()
is_read = serializers.SerializerMethodField()
path = BasicCategorySerializer(many=True, read_only=True)
poll = PollSerializer(many=False, read_only=True)
pages = serializers.SerializerMethodField()
best_answer = serializers.PrimaryKeyRelatedField(read_only=True)
best_answer_marked_by = serializers.PrimaryKeyRelatedField(read_only=True)
notifications = serializers.SerializerMethodField()
starter = serializers.SerializerMethodField()
last_poster = serializers.SerializerMethodField()
api = serializers.SerializerMethodField()
url = serializers.SerializerMethodField()
class Meta:
model = Thread
fields = [
"id",
"category",
"title",
"replies",
"has_unapproved_posts",
"started_on",
"starter_name",
"last_post_on",
"last_post_is_event",
"last_post",
"last_poster_name",
"is_unapproved",
"is_hidden",
"is_closed",
"weight",
"best_answer",
"best_answer_is_protected",
"best_answer_marked_on",
"best_answer_marked_by",
"best_answer_marked_by_name",
"best_answer_marked_by_slug",
"acl",
"is_new",
"is_read",
"path",
"poll",
"notifications",
"starter",
"last_poster",
"pages",
"api",
"url",
]
def get_acl(self, obj):
try:
return obj.acl
except AttributeError:
return {}
def get_has_unapproved_posts(self, obj):
try:
acl = obj.acl
except AttributeError:
return False
return acl.get("can_approve") and obj.has_unapproved_posts
def get_is_new(self, obj):
try:
return obj.is_new
except AttributeError:
return None
def get_is_read(self, obj):
try:
return obj.is_read
except AttributeError:
return None
def get_participants(self, obj):
return ThreadParticipantSerializer(obj.participants_list, many=True).data
def get_notifications(self, obj):
if self.context:
watched_thread = self.context.get("watched_thread")
if watched_thread:
if watched_thread.send_emails:
return ThreadNotifications.SITE_AND_EMAIL
return ThreadNotifications.SITE_ONLY
watched_threads = self.context.get("watched_threads")
if watched_threads:
return watched_threads.get(obj.id)
return None
def get_starter(self, obj):
if obj.starter_id:
return {
"id": obj.starter_id,
"username": obj.starter.username,
"real_name": obj.starter.get_real_name(),
"avatars": obj.starter.avatars,
}
def get_last_poster(self, obj):
if obj.last_poster_id:
return {
"id": obj.last_poster_id,
"username": obj.last_poster.username,
"real_name": obj.last_poster.get_real_name(),
"avatars": obj.last_poster.avatars,
}
def get_pages(self, obj):
settings = self.context["settings"]
posts_per_page = settings.posts_per_page - 1
posts_per_page_orphans = settings.posts_per_page_orphans
if posts_per_page_orphans:
posts_per_page_orphans += 1
total_posts = obj.replies + 1
if total_posts <= posts_per_page + posts_per_page_orphans:
return 1
hits = total_posts - posts_per_page_orphans
return ceil(hits / posts_per_page)
def get_api(self, obj):
return {
"index": obj.get_api_url(),
"editor": obj.get_editor_api_url(),
"merge": obj.get_merge_api_url(),
"poll": obj.get_poll_api_url(),
"watch": obj.get_watch_api_url(),
"posts": {
"index": obj.get_posts_api_url(),
"merge": obj.get_post_merge_api_url(),
"move": obj.get_post_move_api_url(),
"split": obj.get_post_split_api_url(),
},
}
def get_url(self, obj):
return {
"index": obj.get_absolute_url(),
"new_post": obj.get_new_post_url(),
"last_post": obj.get_last_post_url(),
"best_answer": obj.get_best_answer_url(),
"unapproved_post": obj.get_unapproved_post_url(),
"starter": self.get_starter_url(obj),
"last_poster": self.get_last_poster_url(obj),
}
def get_starter_url(self, obj):
if obj.starter_id:
return reverse(
"misago:user", kwargs={"slug": obj.starter_slug, "pk": obj.starter_id}
)
def get_last_poster_url(self, obj):
if obj.last_poster_id:
return reverse(
"misago:user",
kwargs={"slug": obj.last_poster_slug, "pk": obj.last_poster_id},
)
class PrivateThreadSerializer(ThreadSerializer):
participants = serializers.SerializerMethodField()
class Meta:
model = Thread
fields = ThreadSerializer.Meta.fields + ["participants"]
class ThreadsListSerializer(ThreadSerializer):
category = serializers.PrimaryKeyRelatedField(read_only=True)
last_post = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Thread
fields = ThreadSerializer.Meta.fields + ["has_poll"]
ThreadsListSerializer = ThreadsListSerializer.exclude_fields("path", "poll") | PypiClean |
/NeodroidAgent-0.4.8-py36-none-any.whl/neodroidagent/common/memory/exclude/wtf/data_structures/Action_Balanced_Replay_Buffer.py | import random
from collections import deque, namedtuple
import numpy as np
import torch
from .Replay_Buffer import Replay_Buffer
class Action_Balanced_Replay_Buffer(Replay_Buffer):
"""Replay buffer that provides sample of experiences that have an equal number of each action being
conducted"""
def __init__(self, buffer_size, batch_size, seed, num_actions):
self.num_actions = num_actions
self.buffer_size_per_memory = int(buffer_size / self.num_actions)
print("NUM ACTIONS ", self.num_actions)
self.memories = {action:deque(maxlen=self.buffer_size_per_memory) for action in range(self.num_actions)}
self.batch_size = batch_size
self.experience = namedtuple("Experience",
field_names=["state", "action", "reward", "next_state", "done"])
self.seed = random.seed(seed)
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def add_experience(self, states, actions, rewards, next_states, dones):
"""Adds experience or list of experiences into the replay buffer"""
if type(dones) == list:
assert type(dones[0]) != list, "A done shouldn't be a list"
experiences = [self.experience(state, action, reward, next_state, done)
for state, action, reward, next_state, done in
zip(states, actions, rewards, next_states, dones)]
for experience in experiences:
action = experience.action
self.memories[action].append(experience)
else:
experience = self.experience(states, actions, rewards, next_states, dones)
self.memories[actions].append(experience)
def pick_experiences(self, num_experiences=None):
"""Picks the experiences that the sample function will return as a random sample of experiences. It
works by picking
an equal number of experiences that used each action (as far as possible)"""
if num_experiences:
batch_size = num_experiences
else:
batch_size = self.batch_size
batch_per_action = self.calculate_batch_sizes_per_action(batch_size)
samples_split_by_action = self.sample_each_action_equally(batch_per_action)
combined_sample = []
for key in samples_split_by_action.keys():
combined_sample.extend(samples_split_by_action[key])
return combined_sample
def calculate_batch_sizes_per_action(self, batch_size):
"""Calculates the batch size we need to randomly draw from each action to make sure there is equal
coverage
per action and that the batch gets filled up"""
min_batch_per_action = int(batch_size / self.num_actions)
batch_per_action = {k:min_batch_per_action for k in range(self.num_actions)}
current_batch_size = np.sum([batch_per_action[k] for k in range(self.num_actions)])
remainder = batch_size - current_batch_size
give_remainder_to = random.sample(range(self.num_actions), remainder)
for action in give_remainder_to:
batch_per_action[action] += 1
return batch_per_action
def sample_each_action_equally(self, batch_per_action):
"""Samples a number of experiences (determined by batch_per_action) from the memory buffer for each
action"""
samples = {}
for action in range(self.num_actions):
memory = self.memories[action]
batch_size_for_action = batch_per_action[action]
action_memory_size = len(memory)
assert action_memory_size > 0, "Need at least 1 experience for each action"
if action_memory_size >= batch_size_for_action:
samples[action] = random.sample(memory, batch_size_for_action)
else:
print("Memory size {} vs. required batch size {}".format(action_memory_size, batch_size_for_action))
samples_for_action = []
while len(samples_for_action) < batch_per_action[action]:
remainder = batch_per_action[action] - len(samples_for_action)
sampled_experiences = random.sample(memory, min(remainder, action_memory_size))
samples_for_action.extend(sampled_experiences)
samples[action] = samples_for_action
return samples
def __len__(self):
return np.sum([len(memory) for memory in self.memories.values()])
def sample_experiences_with_certain_actions(self, allowed_actions, num_all_actions, required_batch_size):
"""Samples a number of experiences where the action conducted was in the list of required actions"""
assert isinstance(allowed_actions, list)
assert len(allowed_actions) > 0
num_new_actions = len(allowed_actions)
experiences_to_sample = int(required_batch_size * float(num_all_actions) / float(num_new_actions))
experiences = self.sample(num_experiences=experiences_to_sample)
states, actions, rewards, next_states, dones = experiences
matching_indexes = np.argwhere((np.in1d(actions.numpy(), allowed_actions)))
assert matching_indexes.shape[1] == 1
matching_indexes = matching_indexes[:, 0]
states = states[matching_indexes]
actions = actions[matching_indexes]
rewards = rewards[matching_indexes]
next_states = next_states[matching_indexes]
dones = dones[matching_indexes]
assert abs(states.shape[0] - required_batch_size) <= 0.05 * required_batch_size, "{} vs. {}".format(
states.shape[0], required_batch_size)
return (states, actions, rewards, next_states, dones) | PypiClean |
/Notable-0.4.2.tar.gz/Notable-0.4.2/notable/static/lib/ace/src-min/theme-eclipse.js | define("ace/theme/eclipse",["require","exports","module","ace/lib/dom"],function(e,t,n){t.isDark=!1,t.cssText='.ace-eclipse .ace_gutter {background: #ebebeb;border-right: 1px solid rgb(159, 159, 159);color: rgb(136, 136, 136);}.ace-eclipse .ace_print-margin {width: 1px;background: #ebebeb;}.ace-eclipse {background-color: #FFFFFF;}.ace-eclipse .ace_fold {background-color: rgb(60, 76, 114);}.ace-eclipse .ace_cursor {border-left: 2px solid black;}.ace-eclipse .ace_storage,.ace-eclipse .ace_keyword,.ace-eclipse .ace_variable {color: rgb(127, 0, 85);}.ace-eclipse .ace_constant.ace_buildin {color: rgb(88, 72, 246);}.ace-eclipse .ace_constant.ace_library {color: rgb(6, 150, 14);}.ace-eclipse .ace_function {color: rgb(60, 76, 114);}.ace-eclipse .ace_string {color: rgb(42, 0, 255);}.ace-eclipse .ace_comment {color: rgb(113, 150, 130);}.ace-eclipse .ace_comment.ace_doc {color: rgb(63, 95, 191);}.ace-eclipse .ace_comment.ace_doc.ace_tag {color: rgb(127, 159, 191);}.ace-eclipse .ace_constant.ace_numeric {color: darkblue;}.ace-eclipse .ace_tag {color: rgb(25, 118, 116);}.ace-eclipse .ace_type {color: rgb(127, 0, 127);}.ace-eclipse .ace_xml-pe {color: rgb(104, 104, 91);}.ace-eclipse .ace_marker-layer .ace_selection {background: rgb(181, 213, 255);}.ace-eclipse .ace_marker-layer .ace_bracket {margin: -1px 0 0 -1px;border: 1px solid rgb(192, 192, 192);}.ace-eclipse .ace_meta.ace_tag {color:rgb(25, 118, 116);}.ace-eclipse .ace_invisible {color: #ddd;}.ace-eclipse .ace_entity.ace_other.ace_attribute-name {color:rgb(127, 0, 127);}.ace-eclipse .ace_marker-layer .ace_step {background: rgb(255, 255, 0);}.ace-eclipse .ace_marker-layer .ace_active-line {background: rgb(232, 242, 254);}.ace-eclipse .ace_marker-layer .ace_selected-word {border: 1px solid rgb(181, 213, 255);}.ace-eclipse .ace_indent-guide {background: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bLly//BwAmVgd1/w11/gAAAABJRU5ErkJggg==") right repeat-y;}',t.cssClass="ace-eclipse";var r=e("../lib/dom");r.importCssString(t.cssText,t.cssClass)}) | PypiClean |
/Kallithea-0.7.0.tar.gz/Kallithea-0.7.0/docs/usage/vcs_notes.rst | .. _vcs_notes:
===================================
Version control systems usage notes
===================================
.. _importing:
Importing existing repositories
-------------------------------
There are two main methods to import repositories in Kallithea: via the web
interface or via the filesystem. If you have a large number of repositories to
import, importing them via the filesystem is more convenient.
Importing via web interface
^^^^^^^^^^^^^^^^^^^^^^^^^^^
For a small number of repositories, it may be easier to create the target
repositories through the Kallithea web interface, via *Admin > Repositories* or
via the *Add Repository* button on the entry page of the web interface.
Repositories can be nested in repository groups by first creating the group (via
*Admin > Repository Groups* or via the *Add Repository Group* button on the
entry page of the web interface) and then selecting the appropriate group when
adding the repository.
After creation of the (empty) repository, push the existing commits to the
*Clone URL* displayed on the repository summary page. For Git repositories,
first add the *Clone URL* as remote, then push the commits to that remote. The
specific commands to execute are shown under the *Existing repository?* section
of the new repository's summary page.
A benefit of this method particular for Git repositories, is that the
Kallithea-specific Git hooks are installed automatically. For Mercurial, no
hooks are required anyway.
Importing via the filesystem
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The alternative method of importing repositories consists of creating the
repositories in the desired hierarchy on the filesystem and letting Kallithea
scan that location.
All repositories are stored in a central location on the filesystem. This
location is specified during installation (via ``db-create``) and can be reviewed
at *Admin > Settings > VCS > Location of repositories*. Repository groups
(defined in *Admin > Repository Groups*) are represented by a directory in that
repository location. Repositories of the repository group are nested under that
directory.
To import a set of repositories and organize them in a certain repository group
structure, first place clones in the desired hierarchy at the configured
repository location.
These clones should be created without working directory. For Mercurial, this is
done with ``hg clone -U``, for Git with ``git clone --bare``.
When the repositories are added correctly on the filesystem:
* go to *Admin > Settings > Remap and Rescan* in the Kallithea web interface
* select the *Install Git hooks* checkbox when importing Git repositories
* click *Rescan Repositories*
This step will scan the filesystem and create the appropriate repository groups
and repositories in Kallithea.
*Note*: Once repository groups have been created this way, manage their access
permissions through the Kallithea web interface.
Mercurial-specific notes
------------------------
Working with subrepositories
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This section explains how to use Mercurial subrepositories_ in Kallithea.
Example usage::
## init a simple repo
hg init mainrepo
cd mainrepo
echo "file" > file
hg add file
hg ci --message "initial file"
# clone subrepo we want to add from Kallithea
hg clone http://kallithea.local/subrepo
## specify URL to existing repo in Kallithea as subrepository path
echo "subrepo = http://kallithea.local/subrepo" > .hgsub
hg add .hgsub
hg ci --message "added remote subrepo"
In the file list of a clone of ``mainrepo`` you will see a connected
subrepository at the revision it was cloned with. Clicking on the
subrepository link sends you to the proper repository in Kallithea.
Cloning ``mainrepo`` will also clone the attached subrepository.
Next we can edit the subrepository data, and push back to Kallithea. This will
update both repositories.
.. _subrepositories: http://mercurial.aragost.com/kick-start/en/subrepositories/
| PypiClean |
/Mage2Gen-2.3.3.tar.gz/Mage2Gen-2.3.3/mage2gen/snippets/eaventityattribute.py | import os, locale
from .. import Module, Phpclass, Phpmethod, Xmlnode, StaticFile, Snippet, SnippetParam
from ..utils import upperfirst
class EavEntityAttributeSnippet(Snippet):
snippet_label = 'EAV Attribute (custom)'
FRONTEND_INPUT_TYPE = [
("text","Text Field"),
("textarea","Text Area"),
("date","Date"),
("boolean","Yes/No"),
("multiselect","Multiple Select"),
("select","Dropdown"),
("price","Price"),
("static","Static")
]
STATIC_FIELD_TYPES = [
("varchar","Varchar"),
("text","Text"),
("int","Int"),
("decimal","Decimal")
]
FRONTEND_INPUT_VALUE_TYPE = {
"text":"varchar",
"textarea":"text",
"date":"date",
"boolean":"int",
"multiselect":"varchar",
"select":"int",
"price":"decimal",
#"media_image":"",
#"weee":"",
#"swatch_visual":"",
#"swatch_text":""
}
description = """
Install Magento 2 custom eav entity attributes programmatically.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.count = 1
def add(self, entity_model_class, attribute_label, frontend_input='text', required=False, options=None, source_model=False, extend_adminhtml_form=False, extra_params=None):
entity_type = "\{}::ENTITY".format(entity_model_class)
entity_table = '{}_{}_entity'.format(self._module.package.lower(), entity_model_class.split('\\')[-1].lower())
extra_params = extra_params if extra_params else {}
self.count += 1
value_type = self.FRONTEND_INPUT_VALUE_TYPE.get(frontend_input,'int')
value_type = value_type if value_type != 'date' else 'datetime'
user_defined = 'true'
options = options.split(',') if options else []
options_php_array = '"'+'","'.join(x.strip() for x in options) + '"'
options_php_array_string = "array('values' => array("+options_php_array+"))"
attribute_code = extra_params.get('attribute_code', None)
if not attribute_code:
attribute_code = attribute_label.lower().replace(' ','_')[:30]
split_attribute_code = attribute_code.split('_')
attribute_code_capitalized = ''.join(upperfirst(item) for item in split_attribute_code)
if source_model and frontend_input in ['multiselect', 'select']:
source_model = "\{}\{}\Model\Attribute\Source\{}::class".format(self._module.package, self._module.name, attribute_code_capitalized)
options_array = []
for val in options:
options_array.append("['value' => '" + val.lower() + "', 'label' => __('" + val + "')]")
options_php_array = '[\n' + ',\n'.join(x.strip() for x in options_array) + '\n]'
self.add_source_model(attribute_code_capitalized, options_php_array)
options_php_array_string = "''"
else:
source_model = "''"
templatePath = os.path.join(os.path.dirname(__file__), '../templates/attributes/eavattribute.tmpl')
with open(templatePath, 'rb') as tmpl:
template = tmpl.read().decode('utf-8')
options_php_array_string = options_php_array_string
methodBody = template.format(
entity_type=entity_type,
attribute_code=attribute_code,
attribute_label=attribute_label,
value_type=value_type,
frontend_input=frontend_input,
user_defined=user_defined,
required = str(required).lower(),
options = options_php_array_string,
unique = 'true' if extra_params.get('unique', False) else 'false',
default = 'null',
backend = 'Magento\Eav\Model\Entity\Attribute\Backend\ArrayBackend' if frontend_input == 'multiselect' else '',
source_model = source_model,
sort_order = '30',
frontend = ''
)
patchType = 'add'
install_patch = Phpclass('Setup\\Patch\\Data\\{}{}{}Attribute'.format(patchType, attribute_code_capitalized, entity_model_class.split('\\')[-1]),
implements=['DataPatchInterface', 'PatchRevertableInterface'],
dependencies=[
'Magento\\Framework\\Setup\\Patch\\DataPatchInterface',
'Magento\\Framework\\Setup\\Patch\\PatchRevertableInterface',
'Magento\\Framework\\Setup\\ModuleDataSetupInterface',
'Magento\\Eav\\Setup\\EavSetupFactory',
'Magento\\Eav\\Setup\\EavSetup',
],
attributes=[
"/**\n\t * @var ModuleDataSetupInterface\n\t */\n\tprivate $moduleDataSetup;",
"/**\n\t * @var EavSetupFactory\n\t */\n\tprivate $eavSetupFactory;"
]
)
install_patch.add_method(Phpmethod(
'__construct',
params=[
'ModuleDataSetupInterface $moduleDataSetup',
'EavSetupFactory $eavSetupFactory'
],
body="$this->moduleDataSetup = $moduleDataSetup;\n$this->eavSetupFactory = $eavSetupFactory;",
docstring=[
'Constructor',
'',
'@param ModuleDataSetupInterface $moduleDataSetup',
'@param EavSetupFactory $eavSetupFactory'
]
))
install_patch.add_method(Phpmethod(
'apply',
body_start='$this->moduleDataSetup->getConnection()->startSetup();',
body_return='$this->moduleDataSetup->getConnection()->endSetup();',
body="""
/** @var EavSetup $eavSetup */
$eavSetup = $this->eavSetupFactory->create(['setup' => $this->moduleDataSetup]);
""" + methodBody,
docstring=[
'{@inheritdoc}',
]
))
install_patch.add_method(Phpmethod(
'revert',
body_start='$this->moduleDataSetup->getConnection()->startSetup();',
body_return='$this->moduleDataSetup->getConnection()->endSetup();',
body="""
/** @var EavSetup $eavSetup */
$eavSetup = $this->eavSetupFactory->create(['setup' => $this->moduleDataSetup]);
$eavSetup->removeAttribute({entity_type}, '{attribute_code}');""".format(entity_type=entity_type, attribute_code=attribute_code)
))
install_patch.add_method(Phpmethod(
'getAliases',
body="return [];",
docstring=[
'{@inheritdoc}'
]
))
install_patch.add_method(Phpmethod(
'getDependencies',
access='public static',
body="return [\n\n];",
docstring=[
'{@inheritdoc}'
]
))
self.add_class(install_patch)
etc_module = Xmlnode('config', attributes={
'xsi:noNamespaceSchemaLocation': "urn:magento:framework:Module/etc/module.xsd"}, nodes=[
Xmlnode('module', attributes={'name': self.module_name}, nodes=[
Xmlnode('sequence', attributes={}, nodes=[
Xmlnode('module', attributes={'name': 'Magento_Eav'})
])
])
])
self.add_xml('etc/module.xml', etc_module)
if extend_adminhtml_form:
# UI Component Form
ui_form = Xmlnode('form', nodes=[
Xmlnode('fieldset', attributes={'name': 'general'}, nodes=[
Xmlnode('field', attributes={'name': attribute_code, 'formElement': frontend_input,
'sortOrder': str(10 * self.count)}, nodes=[
Xmlnode('argument', attributes={'name': 'data', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'config', 'xsi:type': 'array'}, nodes=[
Xmlnode('item', attributes={'name': 'source', 'xsi:type': 'string'},
node_text=attribute_code),
]),
]),
Xmlnode('settings', nodes=[
Xmlnode('dataType', node_text='text'),
Xmlnode('label', attributes={'translate': 'true'}, node_text=attribute_label),
Xmlnode('dataScope', node_text=attribute_code),
Xmlnode('validation', nodes=[
Xmlnode('rule', attributes={'name': 'required-entry', 'xsi:type': 'boolean'},
node_text='true' if required else 'false'),
]),
]),
]),
]),
])
self.add_xml('view/adminhtml/ui_component/{}_form.xml'.format(entity_table), ui_form)
def add_source_model(self, attribute_code_capitalized, options_php_array_string):
source_model = Phpclass('Model\\Attribute\Source\\{}'.format(upperfirst(attribute_code_capitalized)),
extends='\\Magento\\Eav\\Model\\Entity\\Attribute\\Source\\AbstractSource')
source_model.add_method(Phpmethod(
'getAllOptions',
body="$this->_options = " + options_php_array_string + ";\n"
"return $this->_options;",
docstring=[
'getAllOptions',
'',
'@return array'
]
))
self.add_class(source_model)
@classmethod
def params(cls):
return [
SnippetParam(
name='entity_model_class',
required=True,
description='Example: Magento\Customer\Model\Customer',
regex_validator=r'^[\w\\]+$',
error_message='Only alphanumeric, underscore and backslash characters are allowed'),
SnippetParam(
name='attribute_label',
required=True,
description='Example: color',
regex_validator= r'^[a-zA-Z\d\-_\s]+$',
error_message='Only alphanumeric'),
SnippetParam(
name='frontend_input',
choises=cls.FRONTEND_INPUT_TYPE,
required=True,
default='text'),
SnippetParam(
name='options',
depend={'frontend_input': r'select|multiselect'},
required=False,
description='Dropdown or Multiselect options comma seperated',
error_message='Only alphanumeric'),
SnippetParam(
name='source_model',
depend={'frontend_input': r'select|multiselect'},
required=False,
default=False,
yes_no=True),
SnippetParam(
name='required',
required=True,
default=True,
yes_no=True),
SnippetParam(name='extend_adminhtml_form', yes_no=True, description='Extend the admin ui based on the Entity Model Class'),
]
@classmethod
def extra_params(cls):
return [
SnippetParam(
name='attribute_code',
description='Default to lowercase of label',
regex_validator= r'^[a-zA-Z]{1}\w{0,29}$',
error_message='Only alphanumeric and underscore characters are allowed, and need to start with a alphabetic character. And can\'t be longer then 30 characters'),
SnippetParam(
name='unique',
required=True,
default=False,
yes_no=True),
] | PypiClean |
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/full/lang/az.js | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/license
*/
CKEDITOR.lang['az']={"editor":"Mətn Redaktoru","editorPanel":"Mətn Redaktorun Paneli","common":{"editorHelp":"Yardım üçün ALT 0 düymələrini basın","browseServer":"Fayların siyahı","url":"URL","protocol":"Protokol","upload":"Serverə yüklə","uploadSubmit":"Göndər","image":"Şəkil","flash":"Flash","form":"Forma","checkbox":"Çekboks","radio":"Radio düyməsi","textField":"Mətn xanası","textarea":"Mətn","hiddenField":"Gizli xana","button":"Düymə","select":"Opsiyaların seçilməsi","imageButton":"Şəkil tipli düymə","notSet":"<seçilməmiş>","id":"Id","name":"Ad","langDir":"Yaziların istiqaməti","langDirLtr":"Soldan sağa (LTR)","langDirRtl":"Sağdan sola (RTL)","langCode":"Dilin kodu","longDescr":"URL-ın ətraflı izahı","cssClass":"CSS klassları","advisoryTitle":"Başlıq","cssStyle":"CSS","ok":"Tədbiq et","cancel":"İmtina et","close":"Bağla","preview":"Baxış","resize":"Eni dəyiş","generalTab":"Əsas","advancedTab":"Əlavə","validateNumberFailed":"Rəqəm deyil.","confirmNewPage":"Yadda saxlanılmamış dəyişikliklər itiriləcək. Davam etmək istədiyinizə əminsinizmi?","confirmCancel":"Dəyişikliklər edilib. Pəncərəni bağlamaq istəyirsizə əminsinizmi?","options":"Seçimlər","target":"Hədəf çərçivə","targetNew":"Yeni pəncərə (_blank)","targetTop":"Əsas pəncərə (_top)","targetSelf":"Carı pəncərə (_self)","targetParent":"Ana pəncərə (_parent)","langDirLTR":"Soldan sağa (LTR)","langDirRTL":"Sağdan sola (RTL)","styles":"Üslub","cssClasses":"Üslub klası","width":"En","height":"Uzunluq","align":"Yerləşmə","left":"Sol","right":"Sağ","center":"Mərkəz","justify":"Eninə görə","alignLeft":"Soldan düzləndir","alignRight":"Sağdan düzləndir","alignCenter":"Mərkəzə düzləndir","alignTop":"Yuxarı","alignMiddle":"Orta","alignBottom":"Aşağı","alignNone":"Yoxdur","invalidValue":"Yanlışdır.","invalidHeight":"Hündürlük rəqəm olmalıdır.","invalidWidth":"En rəqəm olmalıdır.","invalidLength":"\"%1\" xanasına, ölçü vahidinin (%2) göstərilməsindən asılı olmayaraq, müsbət ədəd qeyd olunmalıdır.","invalidCssLength":"\"%1\" xanasında göstərilən məzmun tam və müsbət olmalıdır, CSS-də olan ölçü vahidlərin (px, %, in, cm, mm, em, ex, pt, or pc) istifadısinə icazə verilir.","invalidHtmlLength":"\"%1\" xanasında göstərilən məzmun tam və müsbət olmalıdır HTML-də olan ölçü vahidlərin (px və ya %) istifadısinə icazə verilir.","invalidInlineStyle":"Teq içində olan üslub \"ad : məzmun\" şəklidə, nöqtə-verqül işarəsi ilə bitməlidir","cssLengthTooltip":"Piksel sayı və ya digər CSS ölçü vahidləri (px, %, in, cm, mm, em, ex, pt, or pc) daxil edin.","unavailable":"%1<span class=\"cke_accessibility\">, mövcud deyil</span>","keyboard":{"8":"Backspace","13":"Enter","16":"Shift","17":"Ctrl","18":"Alt","32":"Boşluq","35":"Son","36":"Evə","46":"Sil","112":"F1","113":"F2","114":"F3","115":"F4","116":"F5","117":"F6","118":"F7","119":"F8","120":"F9","121":"F10","122":"F11","123":"F12","124":"F13","125":"F14","126":"F15","127":"F16","128":"F17","129":"F18","130":"F19","131":"F20","132":"F21","133":"F22","134":"F23","135":"F24","224":"Əmr"},"keyboardShortcut":"Qısayol düymələri","optionDefault":"Standart"},"about":{"copy":"Copyright © $1. Bütün hüquqlar qorunur.","dlgTitle":"CKEditor haqqında","moreInfo":"Lisenziya informasiyası üçün zəhmət olmasa saytımızı ziyarət edin:"},"basicstyles":{"bold":"Qalın","italic":"Kursiv","strike":"Üstüxətli","subscript":"Aşağı indeks","superscript":"Yuxarı indeks","underline":"Altdan xətt"},"bidi":{"ltr":"Mətnin istiqaməti - soldan sağa","rtl":"Mətnin istiqaməti - sağdan sola"},"blockquote":{"toolbar":"Sitat bloku"},"notification":{"closed":"Xəbərdarlıq pəncərəsi bağlanıb"},"toolbar":{"toolbarCollapse":"Paneli gizlət","toolbarExpand":"Paneli göstər","toolbarGroups":{"document":"Mətn","clipboard":"Mübadilə buferi/İmtina et","editing":"Redaktə edilməsi","forms":"Formalar","basicstyles":"Əsas üslublar","paragraph":"Abzas","links":"Link","insert":"Əlavə et","styles":"Üslublar","colors":"Rənqlər","tools":"Alətləri"},"toolbars":"Redaktorun panelləri"},"clipboard":{"copy":"Köçür","copyError":"Avtomatik köçürülməsi mümkün deyil. Ctrl+C basın.","cut":"Kəs","cutError":"Avtomatik kəsmə mümkün deyil. Ctrl+X basın.","paste":"Əlavə et","pasteNotification":"Press %1 to paste. Your browser doesn‘t support pasting with the toolbar button or context menu option.","pasteArea":"Paste Area","pasteMsg":"Paste your content inside the area below and press OK."},"colorbutton":{"auto":"Avtomatik","bgColorTitle":"Doldurma rəngi","colors":{"000":"Qara","800000":"Şabalıdı","8B4513":"Açıq şabalı","2F4F4F":"Açıq boz","008080":"Firuzəyi göy","000080":"Tünd göy","4B0082":"İndigo","696969":"Tünd boz","B22222":"Kərpiç","A52A2A":"Şabalıdı","DAA520":"Qızıl ağcaqayın","006400":"Tünd yaşıl","40E0D0":"Firuzəyi","0000CD":"Göy","800080":"Bənövşəyi","808080":"Boz","F00":"Qırmızı","FF8C00":"Tünd narıncı","FFD700":"Qızılı","008000":"Yaşıl","0FF":"Mavi","00F":"Göy","EE82EE":"Açıq bənövşəyi","A9A9A9":"Asfalt rəngi","FFA07A":"Qızılbalıq","FFA500":"Narıncı","FFFF00":"Sarı","00FF00":"Laym","AFEEEE":"Acıq firuzəyi","ADD8E6":"Acıq göy","DDA0DD":"Gavalı","D3D3D3":"Acıq boz","FFF0F5":"Yasəmən","FAEBD7":"Kağız","FFFFE0":"Acıq sarı","F0FFF0":"Yemişi","F0FFFF":"Gömgöy","F0F8FF":"Solğun göy","E6E6FA":"Lavanda","FFF":"Ağ","1ABC9C":"Güclü mavi","2ECC71":"Zümrüd","3498DB":"Parlaq göy","9B59B6":"Ametist","4E5F70":"Bozlu göy","F1C40F":"Sapsarı","16A085":"Tünd mavi","27AE60":"Tünd zümrüd","2980B9":"Güclü göy","8E44AD":"Tünd bənövşəyi","2C3E50":"Rəngsiz göy","F39C12":"Narıncı","E67E22":"Yerkökülü","E74C3C":"Solğun qırmızı","ECF0F1":"Parlaq gümüşü","95A5A6":"Acıq bozlu mavi","DDD":"Acıq boz","D35400":"Balqabaqlı","C0392B":"Güclü qırmızı","BDC3C7":"Gümüşü","7F8C8D":"Bozlu mavi","999":"Tünd boz"},"more":"Digər rənglər...","panelTitle":"Rənglər","textColorTitle":"Mətnin rəngi"},"colordialog":{"clear":"Təmizlə","highlight":"Ayırmaq","options":"Rəng seçimləri","selected":"Seçilmiş rəng","title":"Rəngi seç"},"templates":{"button":"Şablon","emptyListMsg":"(Heç bir şablon təyin edilməyib)","insertOption":"Həqiqi içindəkiləri əvəz et","options":"Şablonun seçimləri","selectPromptMsg":"Redaktor ilə açmaq üçün şablonu seçin","title":"İçindəkinin şablonu"},"contextmenu":{"options":"Əlavə əməliyyatlar"},"copyformatting":{"label":"Formatı köçür","notification":{"copied":"Format köçürülüb","applied":"Format tədbiq edilib","canceled":"Format imtina edilib","failed":"Format etmək mümkün deyil. Siz üslubları köçürmədən onları tədbiq edə bilməzsiniz."}},"div":{"IdInputLabel":"Id","advisoryTitleInputLabel":"Məsləhətli başlıq","cssClassInputLabel":"Üslub klassları","edit":"DİV eıementini redaktə et","inlineStyleInputLabel":"Sözlərin üslubları","langDirLTRLabel":"Soldan sağa (LTR)","langDirLabel":"Yaziların istiqaməti","langDirRTLLabel":"Sağdan sola (RTL)","languageCodeInputLabel":"Dilin kodu","remove":"DİV elementini sil","styleSelectLabel":"Üslub","title":"DİV ilə əhatələməni yarat","toolbar":"DİV ilə əhatələməni yarat"},"elementspath":{"eleLabel":"Elementin izləri","eleTitle":"%1 element"},"filetools":{"loadError":"Faylını oxumaq mümkün deyil","networkError":"Xəta baş verdi.","httpError404":"Serverə göndərilməsinin zamanı xəta baş verdi (404 - fayl tapılmayıb)","httpError403":"Serverə göndərilməsinin zamanı xəta baş verdi (403 - gadağandır)","httpError":"Serverə göndərilməsinin zamanı xəta baş verdi (xətanın ststusu: %1)","noUrlError":"Yükləmə linki təyin edilməyib","responseError":"Serverin cavabı yanlışdır"},"find":{"find":"Tap","findOptions":"Axtarışın seçimləri","findWhat":"Nəyi axtarmaq","matchCase":"Reqistr nəzərə alınmaqla","matchCyclic":"Dövrəvi axtar","matchWord":"Tam sözünə uyğun","notFoundMsg":"Daxil etdiyiniz sorğu ilə heç bir nəticə tapılmayıb","replace":"Əvəz et","replaceAll":"Hamısını əvəz et","replaceSuccessMsg":"%1 daxiletmə(lər) əvəz edilib","replaceWith":"Əvəz etdirici mətn:","title":"Tap və əvəz et"},"fakeobjects":{"anchor":"Lövbər","flash":"Flash animasiya","hiddenfield":"Gizli xana","iframe":"IFrame","unknown":"Tanımamış obyekt"},"flash":{"access":"Skript tərəfindən müraciətlərin icazəsi","accessAlways":"Həmişə","accessNever":"Heç vaxt","accessSameDomain":"Eyni domen adından","alignAbsBottom":"Aşağıda","alignAbsMiddle":"Ortada","alignBaseline":"Əsas xətt","alignTextTop":"Mətnin yuxarıda","bgcolor":"Doldurma rəngi","chkFull":"Tam pəncərəyə aça bilər","chkLoop":"Dövrələmə","chkMenu":"Flash menyunu göstər","chkPlay":"Birbaşa oynat","flashvars":"Flash üçün dəyişənlər","hSpace":"Üfüqi boşluq","properties":"Flash xüsusiyyətləri","propertiesTab":"Xüsusiyyətlər","quality":"Keyfiyyət","qualityAutoHigh":"Avtomatik yüksək","qualityAutoLow":"Avtomatik aşağı","qualityBest":"Mükəmməl","qualityHigh":"Yüksək","qualityLow":"Aşağı","qualityMedium":"Orta","scale":"Dəyişkən","scaleAll":"Hamısını göstər","scaleFit":"Həqiqi ölçülər","scaleNoBorder":"Sərhədsiz","title":"Flash xüsusiyyətləri","vSpace":"Şaquli boşluq","validateHSpace":"Üfüqi boşluq rəqəm olmalıdır.","validateSrc":"Link boş ola bilməz","validateVSpace":"Şaquli boşluq rəqəm olmalıdır.","windowMode":"Pəncərənin rejimi","windowModeOpaque":"Görünən","windowModeTransparent":"Şəffaf","windowModeWindow":"Pəncərə"},"font":{"fontSize":{"label":"Şrift ölçüsü","voiceLabel":"Şrift ölçüsü","panelTitle":"Şrift ölçüsü"},"label":"Şrift","panelTitle":"Şrift","voiceLabel":"Şrift"},"forms":{"button":{"title":"Düymənin xüsusiyyətləri","text":"Mətn (kəmiyyət)","type":"Növ","typeBtn":"Düymə","typeSbm":"Təsdiq et","typeRst":"Dəyişiklikləri imtina et"},"checkboxAndRadio":{"checkboxTitle":"Qutucuqun xüsusiyyətləri","radioTitle":"Radio düyməsinin xüsusiyyətləri","value":"Kəmiyyət","selected":"Seçilmiş","required":"Tələb olunur"},"form":{"title":"Formanın xüsusiyyətləri","menu":"Formanın xüsusiyyətləri","action":"Emal edən ünvan","method":"Göndərilmə üsulu","encoding":"Kodlaşdırma"},"hidden":{"title":"Gizli xanasının xüsusiyyətləri","name":"Ad","value":"Kəmiyyət"},"select":{"title":"SELECT elementinin xüsusiyyətləri","selectInfo":"SELECT elementinin haqqında məlumat","opAvail":"Mövcud olan seçimləri","value":"Kəmiyyət","size":"Ölçülər","lines":"xəttlər","chkMulti":"Çox kəmiyyətli xana","required":"Tələb olunur","opText":"Mətn","opValue":"Kəmiyyət","btnAdd":"Əıavə et","btnModify":"Redaktə et","btnUp":"Yuxarı","btnDown":"Aşağı","btnSetValue":"Susmaya görə kəmiyyəti kimi seç","btnDelete":"Sil"},"textarea":{"title":"Mətn xanasının xüsusiyyətləri","cols":"Sütunlar","rows":"Sətirlər"},"textfield":{"title":"Mətn xanasının xüsusiyyətləri","name":"Ad","value":"Kəmiyyət","charWidth":"İşarənin eni","maxChars":"İşarələrin hüdudu","required":"Tələb olunur","type":"Növ","typeText":"Mətn","typePass":"Şifrə","typeEmail":"E-poçt","typeSearch":"Axtarış","typeTel":"Telefon nömrəsi","typeUrl":"Link"}},"format":{"label":"Format","panelTitle":"Abzasın formatı","tag_address":"Ünvan","tag_div":"Normal (DIV)","tag_h1":"Başlıq 1","tag_h2":"Başlıq 2","tag_h3":"Başlıq 3","tag_h4":"Başlıq 4","tag_h5":"Başlıq 5","tag_h6":"Başlıq 6","tag_p":"Normal","tag_pre":"Formatı saxla"},"horizontalrule":{"toolbar":"Sərhəd xətti yarat"},"iframe":{"border":"Çərçivə sərhədlərini göstər","noUrl":"Çərçivənin ünvanı daxil edin","scrolling":"Şürüşdürmələri əlavə et","title":"İFRAME elementinin alətləri","toolbar":"İFRAME"},"image":{"alt":"Alternativ mətn","border":"Sərhəd","btnUpload":"Serverə yüklə","button2Img":"Şəkil tipli düyməni şəklə çevirmək istədiyinizə əminsinizmi?","hSpace":"Üfüqi boşluq","img2Button":"Şəkli şəkil tipli düyməyə çevirmək istədiyinizə əminsinizmi?","infoTab":"Şəkil haqqında məlumat","linkTab":"Link","lockRatio":"Ölçülərin uyğunluğu saxla","menu":"Şəklin seçimləri","resetSize":"Ölçüləri qaytar","title":"Şəklin seçimləri","titleButton":"Şəkil tipli düyməsinin seçimləri","upload":"Serverə yüklə","urlMissing":"Şəklin ünvanı yanlışdır.","vSpace":"Şaquli boşluq","validateBorder":"Sərhədin eni rəqəm olmalıdır.","validateHSpace":"Üfüqi boşluq rəqəm olmalıdır.","validateVSpace":"Şaquli boşluq rəqəm olmalıdır."},"indent":{"indent":"Sol boşluqu artır","outdent":"Sol boşluqu azalt"},"smiley":{"options":"Smayli-nin seçimləri","title":"Smayli-ni daxil et","toolbar":"Smayli"},"language":{"button":"Dilini təyin et","remove":"Dilini sil"},"link":{"acccessKey":"Qısayol düyməsi","advanced":"Geniş seçimləri","advisoryContentType":"Məsləhətli məzmunun növü","advisoryTitle":"Məsləhətli başlıq","anchor":{"toolbar":"Xeş","menu":"Xeşi redaktə et","title":"Xeşin seçimləri","name":"Xeşin adı","errorName":"Xeşin adı yanlışdır","remove":"Xeşin adı sil"},"anchorId":"ID görə","anchorName":"Xeşin adına görə","charset":"Hədəfin kodlaşdırması","cssClasses":"Üslub klası","download":"Məcburi yükləmə","displayText":"Göstərilən mətn","emailAddress":"E-poçt ünvanı","emailBody":"Mesajın məzmunu","emailSubject":"Mesajın başlığı","id":"ID","info":"Linkin xüsusiyyətləri","langCode":"Dilin kodu","langDir":"Yaziların istiqaməti","langDirLTR":"Soldan sağa (LTR)","langDirRTL":"Sağdan sola (RTL)","menu":"Linki redaktə et","name":"Ad","noAnchors":"(heç bir xeş tapılmayıb)","noEmail":"E-poçt ünvanı daxil edin","noUrl":"Linkin URL-ı daxil edin","noTel":"Please type the phone number","other":"<digər>","phoneNumber":"Phone number","popupDependent":"Asılı (Netscape)","popupFeatures":"Pəncərənin xüsusiyyətləri","popupFullScreen":"Tam ekran rejimi (IE)","popupLeft":"Solda","popupLocationBar":"Ünvan paneli","popupMenuBar":"Menyu paneli","popupResizable":"Olçülər dəyişilir","popupScrollBars":"Sürüşdürmələr göstər","popupStatusBar":"Bildirişlərin paneli","popupToolbar":"Alətlərin paneli","popupTop":"Yuxarıda","rel":"Münasibət","selectAnchor":"Xeşi seçin","styles":"Üslub","tabIndex":"Tabın nömrəsi","target":"Hədəf çərçivə","targetFrame":"<freym>","targetFrameName":"Freymin adı","targetPopup":"<yeni pəncərə>","targetPopupName":"Pəncərənin adı","title":"Link","toAnchor":"Xeş","toEmail":"E-poçt","toUrl":"URL","toPhone":"Phone","toolbar":"Link","type":"Linkin növü","unlink":"Linki sil","upload":"Serverə yüklə"},"list":{"bulletedlist":"Markerlənmiş siyahını başlat/sil","numberedlist":"Nömrələnmiş siyahını başlat/sil"},"liststyle":{"bulletedTitle":"Markerlənmiş siyahının xüsusiyyətləri","circle":"Dəirəcik","decimal":"Rəqəm (1, 2, 3 və s.)","disc":"Disk","lowerAlpha":"Kiçik hərflər (a, b, c, d, e və s.)","lowerRoman":"Rum rəqəmləri (i, ii, iii, iv, v və s.)","none":"Yoxdur","notset":"<seçilməmiş>","numberedTitle":"Nömrəli siyahının xüsusiyyətləri","square":"Dördbucaq","start":"Başlanğıc","type":"Növ","upperAlpha":"Böyük hərflər (a, b, c, d, e və s.)","upperRoman":"Böyük Rum rəqəmləri (I, II, III, IV, V və s.)","validateStartNumber":"Siyahının başlanğıc nömrəsi tam və müsbət rəqəm olmalıdır."},"magicline":{"title":"Abzası burada əlavə et"},"maximize":{"maximize":"Aşkarla","minimize":"Gizlət"},"newpage":{"toolbar":"Yeni səhifə"},"pagebreak":{"alt":"Vərəq ayırıcı nişanı","toolbar":"Vərəqin çapı üçün ayırıcı nişanı qoy"},"pastetext":{"button":"Yalnız mətni saxla","pasteNotification":"Press %1 to paste. Your browser doesn‘t support pasting with the toolbar button or context menu option.","title":"Paste as Plain Text"},"pastefromword":{"confirmCleanup":"Əlavə edilən mətn Word-dan köçürülənə oxşayır. Təmizləmək istəyirsinizmi?","error":"Daxili səhvə görə əlavə edilən məlumatların təmizlənməsi mümkün deyil","title":"Word-dan əlavəetmə","toolbar":"Word-dan əlavəetmə"},"preview":{"preview":"Öncədən baxılması"},"print":{"toolbar":"Çap et"},"removeformat":{"toolbar":"Formatı sil"},"save":{"toolbar":"Yadda saxla"},"selectall":{"toolbar":"Hamısını seç"},"showblocks":{"toolbar":"Blokları göstər"},"sourcearea":{"toolbar":"HTML mənbəyini göstər"},"specialchar":{"options":"Xüsusi simvolların seçimləri","title":"Xüsusi simvolu seç","toolbar":"Xüsusi simvolu daxil et"},"scayt":{"btn_about":"About SCAYT","btn_dictionaries":"Dictionaries","btn_disable":"Disable SCAYT","btn_enable":"Enable SCAYT","btn_langs":"Languages","btn_options":"Options","text_title":"Spell Check As You Type"},"stylescombo":{"label":"Üslub","panelTitle":"Format üslubları","panelTitle1":"Blokların üslubları","panelTitle2":"Sözlərin üslubları","panelTitle3":"Obyektlərin üslubları"},"table":{"border":"Sərhədlərin eni","caption":"Cədvəlin başlığı","cell":{"menu":"Xana","insertBefore":"Burdan əvvələ xanası çək","insertAfter":"Burdan sonra xanası çək","deleteCell":"Xanaları sil","merge":"Xanaları birləşdir","mergeRight":"Sağdan birləşdir","mergeDown":"Soldan birləşdir","splitHorizontal":"Üfüqi böl","splitVertical":"Şaquli böl","title":"Xanaların seçimləri","cellType":"Xana növü","rowSpan":"Sətirləri birləşdir","colSpan":"Sütunları birləşdir","wordWrap":"Sətirlərin sınması","hAlign":"Üfüqi düzləndirmə","vAlign":"Şaquli düzləndirmə","alignBaseline":"Mətn xətti","bgColor":"Doldurma rəngi","borderColor":"Sərhədin rəngi","data":"Məlumatlar","header":"Başlıq","yes":"Bəli","no":"Xeyr","invalidWidth":"Xanasın eni rəqəm olmalıdır.","invalidHeight":"Xanasın hündürlüyü rəqəm olmalıdır.","invalidRowSpan":"Birləşdirdiyiniz sütun xanaların sayı tam və müsbət rəqəm olmalıdır.","invalidColSpan":"Birləşdirdiyiniz sətir xanaların sayı tam və müsbət rəqəm olmalıdır.","chooseColor":"Seç"},"cellPad":"Xanalardakı kənar boşluqlar","cellSpace":"Xanalararası interval","column":{"menu":"Sütun","insertBefore":"Sola sütun əlavə et","insertAfter":"Sağa sütun əlavə et","deleteColumn":"Sütunları sil"},"columns":"Sütunlar","deleteTable":"Cədvəli sil","headers":"Başlıqlar","headersBoth":"Hər ikisi","headersColumn":"Birinci sütun","headersNone":"yox","headersRow":"Birinci sətir","heightUnit":"height unit","invalidBorder":"Sərhədlərin eni müsbət rəqəm olmalıdır.","invalidCellPadding":"Xanalardakı kənar boşluqlar müsbət rəqəm olmalıdır.","invalidCellSpacing":"Xanalararası interval müsbət rəqəm olmalıdır.","invalidCols":"Sütunlarin sayı tam və müsbət olmalıdır.","invalidHeight":"Cədvəlin hündürlüyü rəqəm olmalıdır.","invalidRows":"Sətirlətin sayı tam və müsbət olmalıdır.","invalidWidth":"Cədvəlin eni rəqəm olmalıdır.","menu":"Cədvəl alətləri","row":{"menu":"Sətir","insertBefore":"Yuxarıya sətir əlavə et","insertAfter":"Aşağıya sətir əlavə et","deleteRow":"Sətirləri sil"},"rows":"Sətirlər","summary":"Xülasə","title":"Cədvəl alətləri","toolbar":"Cədvəl","widthPc":"faiz","widthPx":"piksel","widthUnit":"en vahidi"},"undo":{"redo":"Təkrar et","undo":"İmtina et"},"widget":{"move":"Tıklayın və aparın","label":"%1 vidjet"},"uploadwidget":{"abort":"Serverə yükləmə istifadəçi tərəfindən dayandırılıb","doneOne":"Fayl müvəffəqiyyətlə yüklənib","doneMany":"%1 fayllar müvəffəqiyyətlə yüklənib","uploadOne":"Faylın yüklənməsi ({percentage}%)","uploadMany":"Faylların yüklənməsi, {max}-dan {current} hazır ({percentage}%)..."},"wsc":{"btnIgnore":"Ignore","btnIgnoreAll":"Ignore All","btnReplace":"Replace","btnReplaceAll":"Replace All","btnUndo":"Undo","changeTo":"Change to","errorLoading":"Error loading application service host: %s.","ieSpellDownload":"Spell checker not installed. Do you want to download it now?","manyChanges":"Spell check complete: %1 words changed","noChanges":"Spell check complete: No words changed","noMispell":"Spell check complete: No misspellings found","noSuggestions":"- No suggestions -","notAvailable":"Sorry, but service is unavailable now.","notInDic":"Not in dictionary","oneChange":"Spell check complete: One word changed","progress":"Spell check in progress...","title":"Spell Checker","toolbar":"Check Spelling"}}; | PypiClean |
/BIT_framework-0.0.2-py3-none-any.whl/BIT_DL/pytorch/core/layers.py | import copy
import functools
import sys
from typing import Any, Callable, Dict, List, Optional, Type, Union
import torch
from torch import nn
from BIT_DL.pytorch.core import cell_wrappers as wrappers
from BIT_DL.pytorch.core.regularizers import L1L2, Regularizer
from BIT_DL.pytorch.hyperparams import HParams
from BIT_DL.pytorch.utils import utils
from BIT_DL.pytorch.utils.dtypes import is_str
__all__ = [
'default_rnn_cell_hparams',
'get_rnn_cell',
'identity',
'default_regularizer_hparams',
'get_initializer',
'get_regularizer',
'get_activation_fn',
'get_layer',
'MaxReducePool1d',
'AvgReducePool1d',
'get_pooling_layer_hparams',
'MergeLayer',
'Flatten',
'Identity',
]
def default_rnn_cell_hparams():
r"""Returns a `dict` of RNN cell hyperparameters and their default values.
.. code-block:: python
{
"type": "LSTMCell",
"input_size": 256,
"kwargs": {
"hidden_size": 256
},
"num_layers": 1,
"dropout": {
"input_keep_prob": 1.0,
"output_keep_prob": 1.0,
"state_keep_prob": 1.0,
"variational_recurrent": False,
},
"residual": False,
"highway": False,
}
Here:
`"type"`: str or cell class or cell instance
The RNN cell type. This can be
- The string name or full module path of a cell class. If class name is
provided, the class must be in module :mod:`torch.nn.modules.rnn`,
:mod:`texar.torch.core.cell_wrappers`, or :mod:`texar.torch.custom`.
- A cell class.
- An instance of a cell class. This is not valid if `"num_layers"` > 1.
For example
.. code-block:: python
"type": "LSTMCell" # class name
"type": "torch.nn.GRUCell" # module path
"type": "my_module.MyCell" # module path
"type": torch.nn.GRUCell # class
"type": LSTMCell(hidden_size=100) # cell instance
"type": MyCell(...) # cell instance
`"kwargs"`: dict
Keyword arguments for the constructor of the cell class.
A cell is created by :python:`cell_class(**kwargs)`, where
`cell_class` is specified in "type" above.
Ignored if "type" is a cell instance.
.. note::
It is unnecessary to specify `"input_size"` within `"kwargs"`.
This value will be automatically filled based on layer index.
.. note::
Although PyTorch uses `"hidden_size"` to denote the hidden layer
size, we follow TensorFlow conventions and use `"num_units"`.
`"num_layers"`: int
Number of cell layers. Each layer is a cell created as above, with
the same hyperparameters specified in `"kwargs"`.
`"dropout"`: dict
Dropout applied to the cell in **each** layer. See
:class:`~texar.torch.core.cell_wrappers.DropoutWrapper` for details of
the hyperparameters. If all `"\*_keep_prob"` = 1, no dropout is applied.
Specifically, if `"variational_recurrent"` = `True`,
the same dropout mask is applied across all time steps per batch.
`"residual"`: bool
If `True`, apply residual connection on the inputs and
outputs of cell in **each** layer except the first layer. Ignored
if `"num_layers"` = 1.
`"highway"`: bool
If True, apply highway connection on the inputs and
outputs of cell in each layer except the first layer. Ignored if
`"num_layers"` = 1.
"""
return {
'type': 'LSTMCell',
'kwargs': {
'num_units': 256,
},
'num_layers': 1,
'dropout': {
'input_keep_prob': 1.0,
'output_keep_prob': 1.0,
'state_keep_prob': 1.0,
'variational_recurrent': False,
},
'residual': False,
'highway': False,
'@no_typecheck': ['type']
}
def default_regularizer_hparams():
r"""Returns the hyperparameters and their default values of a variable
regularizer:
.. code-block:: python
{
"type": "L1L2",
"kwargs": {
"l1": 0.,
"l2": 0.
}
}
The default value corresponds to
:class:`~texar.torch.core.regularizers.L1L2` and, with ``(l1=0, l2=0)``,
disables regularization.
"""
return {
"type": "L1L2",
"kwargs": {
"l1": 0.,
"l2": 0.
}
}
def get_rnn_cell(input_size, hparams=None):
r"""Creates an RNN cell.
See :func:`~texar.torch.core.default_rnn_cell_hparams` for all
hyperparameters and default values.
Args:
input_size (int): Size of the input to the cell in the first layer.
hparams (dict or HParams, optional): Cell hyperparameters. Missing
hyperparameters are set to default values.
Returns:
A cell instance.
Raises:
ValueError: If ``hparams["num_layers"]``>1 and ``hparams["type"]`` is a
class instance.
"""
if hparams is None or isinstance(hparams, dict):
hparams = HParams(hparams, default_rnn_cell_hparams())
d_hp = hparams['dropout']
variational_recurrent = d_hp['variational_recurrent']
input_keep_prob = d_hp['input_keep_prob']
output_keep_prob = d_hp['output_keep_prob']
state_keep_prob = d_hp['state_keep_prob']
cells = []
num_layers = hparams['num_layers']
cell_kwargs = hparams['kwargs'].todict()
# rename 'num_units' to 'hidden_size' following PyTorch conventions
cell_kwargs['hidden_size'] = cell_kwargs['num_units']
del cell_kwargs['num_units']
for layer_i in range(num_layers):
# Create the basic cell
cell_type = hparams["type"]
if layer_i == 0:
cell_kwargs['input_size'] = input_size
else:
cell_kwargs['input_size'] = cell_kwargs['hidden_size']
if not isinstance(cell_type, str) and not isinstance(cell_type, type):
if num_layers > 1:
raise ValueError(
"If 'num_layers'>1, then 'type' must be a cell class or "
"its name/module path, rather than a cell instance.")
cell_modules = ['texar.torch.core.cell_wrappers', # prefer our wrappers
'torch.nn.modules.rnn', 'texar.torch.custom']
cell = utils.check_or_get_instance(cell_type, cell_kwargs, cell_modules)
if isinstance(cell, nn.RNNCellBase):
cell = wrappers.wrap_builtin_cell(cell)
# Optionally add dropout
if (input_keep_prob < 1.0 or
output_keep_prob < 1.0 or
state_keep_prob < 1.0):
# TODO: Would this result in non-final layer outputs being
# dropped twice?
cell = wrappers.DropoutWrapper(
cell=cell,
input_keep_prob=input_keep_prob,
output_keep_prob=output_keep_prob,
state_keep_prob=state_keep_prob,
variational_recurrent=variational_recurrent)
# Optionally add residual and highway connections
if layer_i > 0:
if hparams['residual']:
cell = wrappers.ResidualWrapper(cell)
if hparams['highway']:
cell = wrappers.HighwayWrapper(cell)
cells.append(cell)
if hparams['num_layers'] > 1:
cell = wrappers.MultiRNNCell(cells)
else:
cell = cells[0]
return cell
def identity(inputs: torch.Tensor):
r"""Returns a tensor with the same content as the input tensor.
Arguments:
inputs: The input tensor.
Returns:
A tensor of the same shape, type, and content.
"""
return inputs
def get_regularizer(hparams=None):
r"""Returns a variable regularizer instance.
See :func:`~texar.torch.core.default_regularizer_hparams` for all
hyperparameters and default values.
The "type" field can be a subclass
of :class:`~texar.torch.core.regularizers.Regularizer`, its string name
or module path, or a class instance.
Args:
hparams (dict or HParams, optional): Hyperparameters. Missing
hyperparameters are set to default values.
Returns:
A :class:`~texar.torch.core.regularizers.Regularizer` instance.
`None` if :attr:`hparams` is `None` or taking the default
hyperparameter value.
Raises:
ValueError: The resulting regularizer is not an instance of
:class:`~texar.torch.core.regularizers.Regularizer`.
"""
if hparams is None:
return None
if isinstance(hparams, dict):
hparams = HParams(hparams, default_regularizer_hparams())
rgl = utils.check_or_get_instance(
hparams.type, hparams.kwargs.todict(),
["texar.torch.core.regularizers", "texar.torch.custom"])
if not isinstance(rgl, Regularizer):
raise ValueError("The regularizer must be an instance of "
"texar.torch.core.regularizers.Regularizer.")
if isinstance(rgl, L1L2) and rgl.l1 == 0. and rgl.l2 == 0.:
return None
return rgl
def get_initializer(hparams=None) \
-> Optional[Callable[[torch.Tensor], torch.Tensor]]:
r"""Returns an initializer instance.
Args:
hparams (dict or HParams, optional): Hyperparameters with the structure
.. code-block:: python
{
"type": "initializer_class_or_function",
"kwargs": {
# ...
}
}
The `"type"` field can be a function name or module path. If name is
provided, it be must be from one the following modules:
:torch_docs:`torch.nn.init <nn.html#torch-nn-init>` and
:mod:`texar.torch.custom`.
Besides, the `"type"` field can also be an initialization function
called with :python:`initialization_fn(**kwargs)`. In this case
`"type"` can be the function, or its name or module path. If no
keyword argument is required, `"kwargs"` can be omitted.
Returns:
An initializer instance. `None` if :attr:`hparams` is `None`.
"""
if hparams is None:
return None
kwargs = hparams.get('kwargs', {})
if isinstance(kwargs, HParams):
kwargs = kwargs.todict()
modules = ['torch.nn.init', 'torch', 'texar.torch.custom']
initializer_fn = utils.get_function(hparams['type'], modules)
initializer = functools.partial(initializer_fn, **kwargs)
return initializer
def get_activation_fn(fn_name: Optional[Union[str,
Callable[[torch.Tensor],
torch.Tensor]]] = None,
kwargs: Union[HParams, Dict, None] = None) \
-> Optional[Callable[[torch.Tensor], torch.Tensor]]:
r"""Returns an activation function `fn` with the signature
`output = fn(input)`.
If the function specified by :attr:`fn_name` has more than one arguments
without default values, then all these arguments except the input feature
argument must be specified in :attr:`kwargs`. Arguments with default values
can also be specified in :attr:`kwargs` to take values other than the
defaults. In this case a partial function is returned with the above
signature.
Args:
fn_name (str or callable): An activation function, or its name or
module path. The function can be:
- Built-in function defined in
:torch_docs:`torch.nn.functional<nn.html#torch-nn-functional>`
- User-defined activation functions in module
:mod:`texar.torch.custom`.
- External activation functions. Must provide the full module path,
e.g., ``"my_module.my_activation_fn"``.
kwargs (optional): A `dict` or instance of :class:`~texar.torch.HParams`
containing the keyword arguments of the activation function.
Returns:
An activation function. `None` if :attr:`fn_name` is `None`.
"""
if fn_name is None:
return None
fn_modules = ['torch', 'torch.nn.functional',
'texar.torch.custom', 'texar.torch.core.layers']
activation_fn_ = utils.get_function(fn_name, fn_modules)
activation_fn = activation_fn_
# Make a partial function if necessary
if kwargs is not None:
if isinstance(kwargs, HParams):
kwargs = kwargs.todict()
def _partial_fn(features):
return activation_fn_(features, **kwargs)
activation_fn = _partial_fn
return activation_fn
def get_layer(hparams: Union[HParams, Dict[str, Any]]) -> nn.Module:
r"""Makes a layer instance.
The layer must be an instance of :torch_nn:`Module`.
Args:
hparams (dict or HParams): Hyperparameters of the layer, with
structure:
.. code-block:: python
{
"type": "LayerClass",
"kwargs": {
# Keyword arguments of the layer class
# ...
}
}
Here:
`"type"`: str or layer class or layer instance
The layer type. This can be
- The string name or full module path of a layer class. If
the class name is provided, the class must be in module
:torch_nn:`Module`, :mod:`texar.torch.core`, or
:mod:`texar.torch.custom`.
- A layer class.
- An instance of a layer class.
For example
.. code-block:: python
"type": "Conv1D" # class name
"type": "texar.torch.core.MaxReducePooling1D" # module path
"type": "my_module.MyLayer" # module path
"type": torch.nn.Module.Linear # class
"type": Conv1D(filters=10, kernel_size=2) # cell instance
"type": MyLayer(...) # cell instance
`"kwargs"`: dict
A dictionary of keyword arguments for constructor of the
layer class. Ignored if :attr:`"type"` is a layer instance.
- Arguments named "activation" can be a callable, or a `str` of
the name or module path to the activation function.
- Arguments named "\*_regularizer" and "\*_initializer" can be a
class instance, or a `dict` of hyperparameters of respective
regularizers and initializers. See
- Arguments named "\*_constraint" can be a callable, or a `str`
of the name or full path to the constraint function.
Returns:
A layer instance. If ``hparams["type"]`` is a layer instance, returns it
directly.
Raises:
ValueError: If :attr:`hparams` is `None`.
ValueError: If the resulting layer is not an instance of
:torch_nn:`Module`.
"""
if hparams is None:
raise ValueError("`hparams` must not be `None`.")
layer_type = hparams["type"]
if not is_str(layer_type) and not isinstance(layer_type, type):
layer = layer_type
else:
layer_modules = ["torch.nn", "texar.torch.core", "texar.torch.custom"]
layer_class: Type[nn.Module] = utils.check_or_get_class(
layer_type, layer_modules)
if isinstance(hparams, dict):
if (layer_class.__name__ == "Linear" and
"in_features" not in hparams["kwargs"]):
raise ValueError("\"in_features\" should be specified for "
"\"torch.nn.{}\"".format(layer_class.__name__))
elif (layer_class.__name__ in ["Conv1d", "Conv2d", "Conv3d"] and
"in_channels" not in hparams["kwargs"]):
raise ValueError("\"in_channels\" should be specified for "
"\"torch.nn.{}\"".format(layer_class.__name__))
default_kwargs: Dict[str, Any] = {}
default_hparams = {"type": layer_type, "kwargs": default_kwargs}
hparams = HParams(hparams, default_hparams)
# this case needs to be handled separately because nn.Sequential
# does not accept kwargs
if layer_type == "Sequential":
names: List[str] = []
layer = nn.Sequential()
sub_hparams = hparams.kwargs.layers
for hparam in sub_hparams:
sub_layer = get_layer(hparam)
name = utils.uniquify_str(sub_layer.__class__.__name__, names)
names.append(name)
layer.add_module(name=name, module=sub_layer)
else:
layer = utils.get_instance(layer_type, hparams.kwargs.todict(),
layer_modules)
if not isinstance(layer, nn.Module):
raise ValueError("layer must be an instance of `torch.nn.Module`.")
return layer
class MaxReducePool1d(nn.Module):
r"""A subclass of :torch_nn:`Module`.
Max Pool layer for 1D inputs. The same as :torch_nn:`MaxPool1d` except that
the pooling dimension is entirely reduced (i.e., `pool_size=input_length`).
"""
def forward(self, # type: ignore
input: torch.Tensor) -> torch.Tensor:
output, _ = torch.max(input, dim=2)
return output
class AvgReducePool1d(nn.Module):
r"""A subclass of :torch_nn:`Module`.
Avg Pool layer for 1D inputs. The same as :torch_nn:`AvgPool1d` except that
the pooling dimension is entirely reduced (i.e., `pool_size=input_length`).
"""
def forward(self, # type: ignore
input: torch.Tensor) -> torch.Tensor:
return torch.mean(input, dim=2)
_POOLING_TO_REDUCE = {
"MaxPool1d": "MaxReducePool1d",
"AvgPool1d": "AvgReducePool1d",
torch.nn.MaxPool1d: MaxReducePool1d,
torch.nn.AvgPool1d: AvgReducePool1d
}
def get_pooling_layer_hparams(hparams: Union[HParams, Dict[str, Any]]) \
-> Dict[str, Any]:
r"""Creates pooling layer hyperparameters `dict` for :func:`get_layer`.
If the :attr:`hparams` sets `'pool_size'` to `None`, the layer will be
changed to the respective reduce-pooling layer. For example,
:torch_docs:`torch.conv.MaxPool1d <nn.html#torch.nn.Conv1d>` is replaced
with :class:`~texar.torch.core.MaxReducePool1d`.
"""
if isinstance(hparams, HParams):
hparams = hparams.todict()
new_hparams = copy.copy(hparams)
kwargs = new_hparams.get('kwargs', None)
if kwargs and kwargs.get('kernel_size', None) is None:
pool_type = hparams['type']
new_hparams['type'] = _POOLING_TO_REDUCE.get(pool_type, pool_type)
kwargs.pop('kernel_size', None)
kwargs.pop('stride', None)
kwargs.pop('padding', None)
return new_hparams
class MergeLayer(nn.Module):
r"""A subclass of :torch_nn:`Module`.
A layer that consists of multiple layers in parallel. Input is fed to
each of the parallel layers, and the outputs are merged with a
specified mode.
Args:
layers (list, optional): A list of :torch_docs:`torch.nn.Module
<nn.html#module>` instances, or a list of hyperparameter
dictionaries each of which specifies `"type"` and `"kwargs"` of each
layer (see the `hparams` argument of :func:`get_layer`).
If `None`, this layer degenerates to a merging operator that merges
inputs directly.
mode (str): Mode of the merge op. This can be:
- :attr:`'concat'`: Concatenates layer outputs along one dim.
Tensors must have the same shape except for the dimension
specified in `dim`, which can have different sizes.
- :attr:`'elemwise_sum'`: Outputs element-wise sum.
- :attr:`'elemwise_mul'`: Outputs element-wise product.
- :attr:`'sum'`: Computes the sum of layer outputs along the
dimension given by `dim`. For example, given `dim=1`,
two tensors of shape `[a, b]` and `[a, c]` respectively
will result in a merged tensor of shape `[a]`.
- :attr:`'mean'`: Computes the mean of layer outputs along the
dimension given in `dim`.
- :attr:`'prod'`: Computes the product of layer outputs along the
dimension given in `dim`.
- :attr:`'max'`: Computes the maximum of layer outputs along the
dimension given in `dim`.
- :attr:`'min'`: Computes the minimum of layer outputs along the
dimension given in `dim`.
- :attr:`'and'`: Computes the `logical and` of layer outputs along
the dimension given in `dim`.
- :attr:`'or'`: Computes the `logical or` of layer outputs along
the dimension given in `dim`.
- :attr:`'logsumexp'`: Computes
log(sum(exp(elements across the dimension of layer outputs)))
dim (int): The dim to use in merging. Ignored in modes
:attr:`'elemwise_sum'` and :attr:`'elemwise_mul'`.
"""
_functions: Dict[str, Callable[[torch.Tensor, int], torch.Tensor]] = {
"sum": torch.sum,
"mean": torch.mean,
"prod": torch.prod,
"max": lambda tensors, dim: torch.max(tensors, dim)[0],
"min": lambda tensors, dim: torch.min(tensors, dim)[0],
"and": torch.all,
"or": torch.any,
"logsumexp": torch.logsumexp
}
def __init__(self, layers: Optional[List[nn.Module]] = None,
mode: str = 'concat', dim: Optional[int] = None):
super().__init__()
self._mode = mode
self._dim = dim
self._layers: Optional[nn.ModuleList] = None
if layers is not None:
if len(layers) == 0:
raise ValueError(
"'layers' must be either None or a non-empty list.")
self._layers = nn.ModuleList()
for layer in layers:
if isinstance(layer, nn.Module):
self._layers.append(layer)
else:
self._layers.append(get_layer(hparams=layer))
def forward(self, input: torch.Tensor) -> torch.Tensor: # type: ignore
r"""Feed input to every containing layer and merge the outputs.
Args:
input: The input tensor.
Returns:
The merged tensor.
"""
layer_outputs: List[torch.Tensor]
if self._layers is None:
layer_outputs = input
if not isinstance(layer_outputs, (list, tuple)):
layer_outputs = [layer_outputs]
else:
layer_outputs = []
for layer in self._layers:
layer_output = layer(input)
layer_outputs.append(layer_output)
# the merge dimension cannot be determined until we get the output from
# individual layers.
# In case of reduce pooling operations, feature dim is removed and
# channel dim is merged.
# In non-reduce pooling operations, feature dim is merged.
dim = self._dim if self._dim is not None else -1
if self._mode == 'concat':
outputs = torch.cat(tensors=layer_outputs, dim=dim)
elif self._mode == 'elemwise_sum':
outputs = layer_outputs[0]
for i in range(1, len(layer_outputs)):
outputs = torch.add(outputs, layer_outputs[i])
elif self._mode == 'elemwise_mul':
outputs = layer_outputs[0]
for i in range(1, len(layer_outputs)):
outputs = torch.mul(outputs, layer_outputs[i])
elif self._mode in self._functions:
_concat = torch.cat(tensors=layer_outputs, dim=dim)
outputs = self._functions[self._mode](_concat, dim)
else:
raise ValueError("Unknown merge mode: '%s'" % self._mode)
return outputs
@property
def layers(self) -> Optional[nn.ModuleList]:
r"""The list of parallel layers.
"""
return self._layers
class Flatten(nn.Module):
r"""Flatten layer to flatten a tensor after convolution."""
def forward(self, # type: ignore
input: torch.Tensor) -> torch.Tensor:
return input.view(input.size()[0], -1)
class Identity(nn.Module):
r"""Identity activation layer."""
def forward(self, # type: ignore
input: torch.Tensor) -> torch.Tensor:
return input | PypiClean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.